diff --git a/client/ayon_core/tools/publisher/models/publish.py b/client/ayon_core/tools/publisher/models/publish.py
index da7b64ceae..ef207bfb79 100644
--- a/client/ayon_core/tools/publisher/models/publish.py
+++ b/client/ayon_core/tools/publisher/models/publish.py
@@ -4,7 +4,7 @@
import traceback
import collections
from functools import partial
-from typing import Optional, Dict, List, Union, Any, Iterable, Literal
+from typing import Optional, Dict, List, Union, Any, Iterable
import arrow
import pyblish.plugin
@@ -22,15 +22,6 @@
# Define constant for plugin orders offset
PLUGIN_ORDER_OFFSET = 0.5
-ActionFilterType = Literal[
- "all",
- "notProcessed",
- "processed",
- "failed",
- "warning",
- "failedOrWarning",
- "succeeded"
-]
class PublishReportMaker:
@@ -318,8 +309,10 @@ class PublishPluginActionItem:
action_id (str): Action id.
plugin_id (str): Plugin id.
active (bool): Action is active.
- on_filter (ActionFilterType): Actions have 'on' attribute which define
- when can be action triggered (e.g. 'all', 'failed', ...).
+ on_filter (Literal["all", "notProcessed", "processed", "failed",
+ "warning", "failedOrWarning", "succeeded"]): Actions have 'on'
+ attribute which define when can be action triggered
+ (e.g. 'all', 'failed', ...).
label (str): Action's label.
icon (Optional[str]) Action's icon.
"""
@@ -329,14 +322,14 @@ def __init__(
action_id: str,
plugin_id: str,
active: bool,
- on_filter: ActionFilterType,
+ on_filter: str,
label: str,
icon: Optional[str],
):
self.action_id: str = action_id
self.plugin_id: str = plugin_id
self.active: bool = active
- self.on_filter: ActionFilterType = on_filter
+ self.on_filter: str = on_filter
self.label: str = label
self.icon: Optional[str] = icon
diff --git a/server_addon/applications/client/ayon_applications/__init__.py b/server_addon/applications/client/ayon_applications/__init__.py
deleted file mode 100644
index 99d201e49b..0000000000
--- a/server_addon/applications/client/ayon_applications/__init__.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from .version import __version__
-from .constants import (
- APPLICATIONS_ADDON_ROOT,
- DEFAULT_ENV_SUBGROUP,
- PLATFORM_NAMES,
-)
-from .exceptions import (
- ApplicationNotFound,
- ApplicationExecutableNotFound,
- ApplicationLaunchFailed,
- MissingRequiredKey,
-)
-from .defs import (
- LaunchTypes,
- ApplicationExecutable,
- UndefinedApplicationExecutable,
- ApplicationGroup,
- Application,
- EnvironmentToolGroup,
- EnvironmentTool,
-)
-from .hooks import (
- LaunchHook,
- PreLaunchHook,
- PostLaunchHook,
-)
-from .manager import (
- ApplicationManager,
- ApplicationLaunchContext,
-)
-from .addon import ApplicationsAddon
-
-
-__all__ = (
- "__version__",
-
- "APPLICATIONS_ADDON_ROOT",
- "DEFAULT_ENV_SUBGROUP",
- "PLATFORM_NAMES",
-
- "ApplicationNotFound",
- "ApplicationExecutableNotFound",
- "ApplicationLaunchFailed",
- "MissingRequiredKey",
-
- "LaunchTypes",
- "ApplicationExecutable",
- "UndefinedApplicationExecutable",
- "ApplicationGroup",
- "Application",
- "EnvironmentToolGroup",
- "EnvironmentTool",
-
- "LaunchHook",
- "PreLaunchHook",
- "PostLaunchHook",
-
- "ApplicationManager",
- "ApplicationLaunchContext",
-
- "ApplicationsAddon",
-)
diff --git a/server_addon/applications/client/ayon_applications/action.py b/server_addon/applications/client/ayon_applications/action.py
deleted file mode 100644
index e5942c7008..0000000000
--- a/server_addon/applications/client/ayon_applications/action.py
+++ /dev/null
@@ -1,147 +0,0 @@
-import copy
-
-import ayon_api
-
-from ayon_core import resources
-from ayon_core.lib import Logger, NestedCacheItem
-from ayon_core.settings import get_studio_settings, get_project_settings
-from ayon_core.pipeline.actions import LauncherAction
-
-from .exceptions import (
- ApplicationExecutableNotFound,
- ApplicationLaunchFailed,
-)
-
-
-class ApplicationAction(LauncherAction):
- """Action to launch an application.
-
- Application action based on 'ApplicationManager' system.
-
- Handling of applications in launcher is not ideal and should be completely
- redone from scratch. This is just a temporary solution to keep backwards
- compatibility with AYON launcher.
-
- Todos:
- Move handling of errors to frontend.
- """
-
- # Application object
- application = None
- # Action attributes
- name = None
- label = None
- label_variant = None
- group = None
- icon = None
- color = None
- order = 0
- data = {}
- project_settings = {}
- project_entities = {}
-
- _log = None
-
- # --- For compatibility for combinations of new and old ayon-core ---
- project_settings_cache = NestedCacheItem(
- levels=1, default_factory=dict, lifetime=20
- )
- project_entities_cache = NestedCacheItem(
- levels=1, default_factory=dict, lifetime=20
- )
-
- @classmethod
- def _app_get_project_settings(cls, selection):
- project_name = selection.project_name
- if project_name in ApplicationAction.project_settings:
- return ApplicationAction.project_settings[project_name]
-
- if hasattr(selection, "get_project_settings"):
- return selection.get_project_settings()
-
- cache = ApplicationAction.project_settings_cache[project_name]
- if not cache.is_valid:
- if project_name:
- settings = get_project_settings(project_name)
- else:
- settings = get_studio_settings()
- cache.update_data(settings)
- return copy.deepcopy(cache.get_data())
-
- @classmethod
- def _app_get_project_entity(cls, selection):
- project_name = selection.project_name
- if project_name in ApplicationAction.project_entities:
- return ApplicationAction.project_entities[project_name]
-
- if hasattr(selection, "get_project_settings"):
- return selection.get_project_entity()
-
- cache = ApplicationAction.project_entities_cache[project_name]
- if not cache.is_valid:
- project_entity = None
- if project_name:
- project_entity = ayon_api.get_project(project_name)
- cache.update_data(project_entity)
- return copy.deepcopy(cache.get_data())
-
- @property
- def log(self):
- if self._log is None:
- self._log = Logger.get_logger(self.__class__.__name__)
- return self._log
-
- def is_compatible(self, selection):
- if not selection.is_task_selected:
- return False
-
- project_entity = self._app_get_project_entity(selection)
- apps = project_entity["attrib"].get("applications")
- if not apps or self.application.full_name not in apps:
- return False
-
- project_settings = self._app_get_project_settings(selection)
- only_available = project_settings["applications"]["only_available"]
- if only_available and not self.application.find_executable():
- return False
- return True
-
- def _show_message_box(self, title, message, details=None):
- from qtpy import QtWidgets, QtGui
- from ayon_core import style
-
- dialog = QtWidgets.QMessageBox()
- icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
- dialog.setWindowIcon(icon)
- dialog.setStyleSheet(style.load_stylesheet())
- dialog.setWindowTitle(title)
- dialog.setText(message)
- if details:
- dialog.setDetailedText(details)
- dialog.exec_()
-
- def process(self, selection, **kwargs):
- """Process the full Application action"""
- try:
- self.application.launch(
- project_name=selection.project_name,
- folder_path=selection.folder_path,
- task_name=selection.task_name,
- **self.data
- )
-
- except ApplicationExecutableNotFound as exc:
- details = exc.details
- msg = exc.msg
- log_msg = str(msg)
- if details:
- log_msg += "\n" + details
- self.log.warning(log_msg)
- self._show_message_box(
- "Application executable not found", msg, details
- )
-
- except ApplicationLaunchFailed as exc:
- msg = str(exc)
- self.log.warning(msg, exc_info=True)
- self._show_message_box("Application launch failed", msg)
diff --git a/server_addon/applications/client/ayon_applications/addon.py b/server_addon/applications/client/ayon_applications/addon.py
deleted file mode 100644
index 26374ad0cd..0000000000
--- a/server_addon/applications/client/ayon_applications/addon.py
+++ /dev/null
@@ -1,321 +0,0 @@
-import os
-import json
-
-import ayon_api
-
-from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap
-
-from .version import __version__
-from .constants import APPLICATIONS_ADDON_ROOT
-from .defs import LaunchTypes
-from .manager import ApplicationManager
-
-
-class ApplicationsAddon(AYONAddon, IPluginPaths):
- name = "applications"
- version = __version__
-
- def initialize(self, settings):
- # TODO remove when addon is removed from ayon-core
- self.enabled = self.name in settings
-
- def get_app_environments_for_context(
- self,
- project_name,
- folder_path,
- task_name,
- full_app_name,
- env_group=None,
- launch_type=None,
- env=None,
- ):
- """Calculate environment variables for launch context.
-
- Args:
- project_name (str): Project name.
- folder_path (str): Folder path.
- task_name (str): Task name.
- full_app_name (str): Full application name.
- env_group (Optional[str]): Environment group.
- launch_type (Optional[str]): Launch type.
- env (Optional[dict[str, str]]): Environment variables to update.
-
- Returns:
- dict[str, str]: Environment variables for context.
-
- """
- from ayon_applications.utils import get_app_environments_for_context
-
- if not full_app_name:
- return {}
-
- return get_app_environments_for_context(
- project_name,
- folder_path,
- task_name,
- full_app_name,
- env_group=env_group,
- launch_type=launch_type,
- env=env,
- addons_manager=self.manager
- )
-
- def get_farm_publish_environment_variables(
- self,
- project_name,
- folder_path,
- task_name,
- full_app_name=None,
- env_group=None,
- ):
- """Calculate environment variables for farm publish.
-
- Args:
- project_name (str): Project name.
- folder_path (str): Folder path.
- task_name (str): Task name.
- env_group (Optional[str]): Environment group.
- full_app_name (Optional[str]): Full application name. Value from
- environment variable 'AYON_APP_NAME' is used if 'None' is
- passed.
-
- Returns:
- dict[str, str]: Environment variables for farm publish.
-
- """
- if full_app_name is None:
- full_app_name = os.getenv("AYON_APP_NAME")
-
- return self.get_app_environments_for_context(
- project_name,
- folder_path,
- task_name,
- full_app_name,
- env_group=env_group,
- launch_type=LaunchTypes.farm_publish
- )
-
- def get_applications_manager(self, settings=None):
- """Get applications manager.
-
- Args:
- settings (Optional[dict]): Studio/project settings.
-
- Returns:
- ApplicationManager: Applications manager.
-
- """
- return ApplicationManager(settings)
-
- def get_plugin_paths(self):
- return {
- "publish": [
- os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish")
- ]
- }
-
- def get_app_icon_path(self, icon_filename):
- """Get icon path.
-
- Args:
- icon_filename (str): Icon filename.
-
- Returns:
- Union[str, None]: Icon path or None if not found.
-
- """
- if not icon_filename:
- return None
- icon_name = os.path.basename(icon_filename)
- path = os.path.join(APPLICATIONS_ADDON_ROOT, "icons", icon_name)
- if os.path.exists(path):
- return path
- return None
-
- def get_app_icon_url(self, icon_filename, server=False):
- """Get icon path.
-
- Method does not validate if icon filename exist on server.
-
- Args:
- icon_filename (str): Icon name.
- server (Optional[bool]): Return url to AYON server.
-
- Returns:
- Union[str, None]: Icon path or None is server url is not
- available.
-
- """
- if not icon_filename:
- return None
- icon_name = os.path.basename(icon_filename)
- if server:
- base_url = ayon_api.get_base_url()
- return (
- f"{base_url}/addons/{self.name}/{self.version}"
- f"/public/icons/{icon_name}"
- )
- server_url = os.getenv("AYON_WEBSERVER_URL")
- if not server_url:
- return None
- return "/".join([
- server_url, "addons", self.name, self.version, "icons", icon_name
- ])
-
- def get_applications_action_classes(self):
- """Get application action classes for launcher tool.
-
- This method should be used only by launcher tool. Please do not use it
- in other places as its implementation is not optimal, and might
- change or be removed.
-
- Returns:
- list[ApplicationAction]: List of application action classes.
-
- """
- from .action import ApplicationAction
-
- actions = []
-
- manager = self.get_applications_manager()
- for full_name, application in manager.applications.items():
- if not application.enabled:
- continue
-
- icon = self.get_app_icon_path(application.icon)
-
- action = type(
- "app_{}".format(full_name),
- (ApplicationAction,),
- {
- "identifier": "application.{}".format(full_name),
- "application": application,
- "name": application.name,
- "label": application.group.label,
- "label_variant": application.label,
- "group": None,
- "icon": icon,
- "color": getattr(application, "color", None),
- "order": getattr(application, "order", None) or 0,
- "data": {}
- }
- )
- actions.append(action)
- return actions
-
- def launch_application(
- self, app_name, project_name, folder_path, task_name
- ):
- """Launch application.
-
- Args:
- app_name (str): Full application name e.g. 'maya/2024'.
- project_name (str): Project name.
- folder_path (str): Folder path.
- task_name (str): Task name.
-
- """
- app_manager = self.get_applications_manager()
- return app_manager.launch(
- app_name,
- project_name=project_name,
- folder_path=folder_path,
- task_name=task_name,
- )
-
- def webserver_initialization(self, manager):
- """Initialize webserver.
-
- Args:
- manager (WebServerManager): Webserver manager.
-
- """
- static_prefix = f"/addons/{self.name}/{self.version}/icons"
- manager.add_static(
- static_prefix, os.path.join(APPLICATIONS_ADDON_ROOT, "icons")
- )
-
- # --- CLI ---
- def cli(self, addon_click_group):
- main_group = click_wrap.group(
- self._cli_main, name=self.name, help="Applications addon"
- )
- (
- main_group.command(
- self._cli_extract_environments,
- name="extractenvironments",
- help=(
- "Extract environment variables for context into json file"
- )
- )
- .argument("output_json_path")
- .option("--project", help="Project name", default=None)
- .option("--folder", help="Folder path", default=None)
- .option("--task", help="Task name", default=None)
- .option("--app", help="Application name", default=None)
- .option(
- "--envgroup",
- help="Environment group (e.g. \"farm\")",
- default=None
- )
- )
- (
- main_group.command(
- self._cli_launch_applications,
- name="launch",
- help="Launch application"
- )
- .option("--app", required=True, help="Application name")
- .option("--project", required=True, help="Project name")
- .option("--folder", required=True, help="Folder path")
- .option("--task", required=True, help="Task name")
- )
- # Convert main command to click object and add it to parent group
- addon_click_group.add_command(
- main_group.to_click_obj()
- )
-
- def _cli_main(self):
- pass
-
- def _cli_extract_environments(
- self, output_json_path, project, folder, task, app, envgroup
- ):
- """Produces json file with environment based on project and app.
-
- Called by farm integration to propagate environment into farm jobs.
-
- Args:
- output_json_path (str): Output json file path.
- project (str): Project name.
- folder (str): Folder path.
- task (str): Task name.
- app (str): Full application name e.g. 'maya/2024'.
- envgroup (str): Environment group.
-
- """
- if all((project, folder, task, app)):
- env = self.get_farm_publish_environment_variables(
- project, folder, task, app, env_group=envgroup,
- )
- else:
- env = os.environ.copy()
-
- output_dir = os.path.dirname(output_json_path)
- if not os.path.exists(output_dir):
- os.makedirs(output_dir)
-
- with open(output_json_path, "w") as file_stream:
- json.dump(env, file_stream, indent=4)
-
- def _cli_launch_applications(self, project, folder, task, app):
- """Launch application.
-
- Args:
- project (str): Project name.
- folder (str): Folder path.
- task (str): Task name.
- app (str): Full application name e.g. 'maya/2024'.
-
- """
- self.launch_application(app, project, folder, task)
diff --git a/server_addon/applications/client/ayon_applications/constants.py b/server_addon/applications/client/ayon_applications/constants.py
deleted file mode 100644
index 92c8f4f254..0000000000
--- a/server_addon/applications/client/ayon_applications/constants.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import os
-
-APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
-
-PLATFORM_NAMES = {"windows", "linux", "darwin"}
-DEFAULT_ENV_SUBGROUP = "standard"
diff --git a/server_addon/applications/client/ayon_applications/defs.py b/server_addon/applications/client/ayon_applications/defs.py
deleted file mode 100644
index 5cc36041a1..0000000000
--- a/server_addon/applications/client/ayon_applications/defs.py
+++ /dev/null
@@ -1,404 +0,0 @@
-import os
-import platform
-import json
-import copy
-
-from ayon_core.lib import find_executable
-
-
-class LaunchTypes:
- """Launch types are filters for pre/post-launch hooks.
-
- Please use these variables in case they'll change values.
- """
-
- # Local launch - application is launched on local machine
- local = "local"
- # Farm render job - application is on farm
- farm_render = "farm-render"
- # Farm publish job - integration post-render job
- farm_publish = "farm-publish"
- # Remote launch - application is launched on remote machine from which
- # can be started publishing
- remote = "remote"
- # Automated launch - application is launched with automated publishing
- automated = "automated"
-
-
-class ApplicationExecutable:
- """Representation of executable loaded from settings."""
-
- def __init__(self, executable):
- # Try to format executable with environments
- try:
- executable = executable.format(**os.environ)
- except Exception:
- pass
-
- # On MacOS check if exists path to executable when ends with `.app`
- # - it is common that path will lead to "/Applications/Blender" but
- # real path is "/Applications/Blender.app"
- if platform.system().lower() == "darwin":
- executable = self.macos_executable_prep(executable)
-
- self.executable_path = executable
-
- def __str__(self):
- return self.executable_path
-
- def __repr__(self):
- return "<{}> {}".format(self.__class__.__name__, self.executable_path)
-
- @staticmethod
- def macos_executable_prep(executable):
- """Try to find full path to executable file.
-
- Real executable is stored in '*.app/Contents/MacOS/
Custom Frame Range
"
- "only used with 'Custom frame range' source"
- ),
- NumberDef(
- "custom_frameStart",
- label="Frame Start",
- default=frame_defaults["frameStart"],
- minimum=0,
- decimals=0,
- tooltip=(
- "Set the start frame for the export.\n"
- "Only used if frame range source is 'Custom frame range'."
- )
- ),
- NumberDef(
- "custom_frameEnd",
- label="Frame End",
- default=frame_defaults["frameEnd"],
- minimum=0,
- decimals=0,
- tooltip=(
- "Set the end frame for the export.\n"
- "Only used if frame range source is 'Custom frame range'."
- )
- ),
- NumberDef(
- "custom_handleStart",
- label="Handle Start",
- default=frame_defaults["handleStart"],
- minimum=0,
- decimals=0,
- tooltip=(
- "Set the start handles for the export, this will be "
- "added before the start frame.\n"
- "Only used if frame range source is 'Custom frame range'."
- )
- ),
- NumberDef(
- "custom_handleEnd",
- label="Handle End",
- default=frame_defaults["handleEnd"],
- minimum=0,
- decimals=0,
- tooltip=(
- "Set the end handles for the export, this will be added "
- "after the end frame.\n"
- "Only used if frame range source is 'Custom frame range'."
- )
- )
- ]
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/create/create_workfile.py b/server_addon/fusion/client/ayon_fusion/plugins/create/create_workfile.py
deleted file mode 100644
index 3dc14861df..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/create/create_workfile.py
+++ /dev/null
@@ -1,132 +0,0 @@
-import ayon_api
-
-from ayon_fusion.api import (
- get_current_comp
-)
-from ayon_core.pipeline import (
- AutoCreator,
- CreatedInstance,
-)
-
-
-class FusionWorkfileCreator(AutoCreator):
- identifier = "workfile"
- product_type = "workfile"
- label = "Workfile"
- icon = "fa5.file"
-
- default_variant = "Main"
-
- create_allow_context_change = False
-
- data_key = "openpype_workfile"
-
- def collect_instances(self):
-
- comp = get_current_comp()
- data = comp.GetData(self.data_key)
- if not data:
- return
-
- product_name = data.get("productName")
- if product_name is None:
- product_name = data["subset"]
- instance = CreatedInstance(
- product_type=self.product_type,
- product_name=product_name,
- data=data,
- creator=self
- )
- instance.transient_data["comp"] = comp
-
- self._add_instance_to_context(instance)
-
- def update_instances(self, update_list):
- for created_inst, _changes in update_list:
- comp = created_inst.transient_data["comp"]
- if not hasattr(comp, "SetData"):
- # Comp is not alive anymore, likely closed by the user
- self.log.error("Workfile comp not found for existing instance."
- " Comp might have been closed in the meantime.")
- continue
-
- # Imprint data into the comp
- data = created_inst.data_to_store()
- comp.SetData(self.data_key, data)
-
- def create(self, options=None):
- comp = get_current_comp()
- if not comp:
- self.log.error("Unable to find current comp")
- return
-
- existing_instance = None
- for instance in self.create_context.instances:
- if instance.product_type == self.product_type:
- existing_instance = instance
- break
-
- project_name = self.create_context.get_current_project_name()
- folder_path = self.create_context.get_current_folder_path()
- task_name = self.create_context.get_current_task_name()
- host_name = self.create_context.host_name
-
- existing_folder_path = None
- if existing_instance is not None:
- existing_folder_path = existing_instance["folderPath"]
-
- if existing_instance is None:
- folder_entity = ayon_api.get_folder_by_path(
- project_name, folder_path
- )
- task_entity = ayon_api.get_task_by_name(
- project_name, folder_entity["id"], task_name
- )
- product_name = self.get_product_name(
- project_name,
- folder_entity,
- task_entity,
- self.default_variant,
- host_name,
- )
- data = {
- "folderPath": folder_path,
- "task": task_name,
- "variant": self.default_variant,
- }
- data.update(self.get_dynamic_data(
- project_name,
- folder_entity,
- task_entity,
- self.default_variant,
- host_name,
- None
-
- ))
-
- new_instance = CreatedInstance(
- self.product_type, product_name, data, self
- )
- new_instance.transient_data["comp"] = comp
- self._add_instance_to_context(new_instance)
-
- elif (
- existing_folder_path != folder_path
- or existing_instance["task"] != task_name
- ):
- folder_entity = ayon_api.get_folder_by_path(
- project_name, folder_path
- )
- task_entity = ayon_api.get_task_by_name(
- project_name, folder_entity["id"], task_name
- )
- product_name = self.get_product_name(
- project_name,
- folder_entity,
- task_entity,
- self.default_variant,
- host_name,
- )
- existing_instance["folderPath"] = folder_path
- existing_instance["task"] = task_name
- existing_instance["productName"] = product_name
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/inventory/select_containers.py b/server_addon/fusion/client/ayon_fusion/plugins/inventory/select_containers.py
deleted file mode 100644
index e863c58ab3..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/inventory/select_containers.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from ayon_core.pipeline import InventoryAction
-
-
-class FusionSelectContainers(InventoryAction):
-
- label = "Select Containers"
- icon = "mouse-pointer"
- color = "#d8d8d8"
-
- def process(self, containers):
- from ayon_fusion.api import (
- get_current_comp,
- comp_lock_and_undo_chunk
- )
-
- tools = [i["_tool"] for i in containers]
-
- comp = get_current_comp()
- flow = comp.CurrentFrame.FlowView
-
- with comp_lock_and_undo_chunk(comp, self.label):
- # Clear selection
- flow.Select()
-
- # Select tool
- for tool in tools:
- flow.Select(tool)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/inventory/set_tool_color.py b/server_addon/fusion/client/ayon_fusion/plugins/inventory/set_tool_color.py
deleted file mode 100644
index 2c02afe32c..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/inventory/set_tool_color.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from qtpy import QtGui, QtWidgets
-
-from ayon_core.pipeline import InventoryAction
-from ayon_core import style
-from ayon_fusion.api import (
- get_current_comp,
- comp_lock_and_undo_chunk
-)
-
-
-class FusionSetToolColor(InventoryAction):
- """Update the color of the selected tools"""
-
- label = "Set Tool Color"
- icon = "plus"
- color = "#d8d8d8"
- _fallback_color = QtGui.QColor(1.0, 1.0, 1.0)
-
- def process(self, containers):
- """Color all selected tools the selected colors"""
-
- result = []
- comp = get_current_comp()
-
- # Get tool color
- first = containers[0]
- tool = first["_tool"]
- color = tool.TileColor
-
- if color is not None:
- qcolor = QtGui.QColor().fromRgbF(color["R"], color["G"], color["B"])
- else:
- qcolor = self._fallback_color
-
- # Launch pick color
- picked_color = self.get_color_picker(qcolor)
- if not picked_color:
- return
-
- with comp_lock_and_undo_chunk(comp):
- for container in containers:
- # Convert color to RGB 0-1 floats
- rgb_f = picked_color.getRgbF()
- rgb_f_table = {"R": rgb_f[0], "G": rgb_f[1], "B": rgb_f[2]}
-
- # Update tool
- tool = container["_tool"]
- tool.TileColor = rgb_f_table
-
- result.append(container)
-
- return result
-
- def get_color_picker(self, color):
- """Launch color picker and return chosen color
-
- Args:
- color(QtGui.QColor): Start color to display
-
- Returns:
- QtGui.QColor
-
- """
-
- color_dialog = QtWidgets.QColorDialog(color)
- color_dialog.setStyleSheet(style.load_stylesheet())
-
- accepted = color_dialog.exec_()
- if not accepted:
- return
-
- return color_dialog.selectedColor()
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/load/actions.py b/server_addon/fusion/client/ayon_fusion/plugins/load/actions.py
deleted file mode 100644
index dfa73e0b7a..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/load/actions.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""A module containing generic loader actions that will display in the Loader.
-
-"""
-
-from ayon_core.pipeline import load
-
-
-class FusionSetFrameRangeLoader(load.LoaderPlugin):
- """Set frame range excluding pre- and post-handles"""
-
- product_types = {
- "animation",
- "camera",
- "imagesequence",
- "render",
- "yeticache",
- "pointcache",
- "render",
- }
- representations = {"*"}
- extensions = {"*"}
-
- label = "Set frame range"
- order = 11
- icon = "clock-o"
- color = "white"
-
- def load(self, context, name, namespace, data):
-
- from ayon_fusion.api import lib
-
- version_attributes = context["version"]["attrib"]
-
- start = version_attributes.get("frameStart", None)
- end = version_attributes.get("frameEnd", None)
-
- if start is None or end is None:
- print("Skipping setting frame range because start or "
- "end frame data is missing..")
- return
-
- lib.update_frame_range(start, end)
-
-
-class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin):
- """Set frame range including pre- and post-handles"""
-
- product_types = {
- "animation",
- "camera",
- "imagesequence",
- "render",
- "yeticache",
- "pointcache",
- "render",
- }
- representations = {"*"}
-
- label = "Set frame range (with handles)"
- order = 12
- icon = "clock-o"
- color = "white"
-
- def load(self, context, name, namespace, data):
-
- from ayon_fusion.api import lib
-
- version_attributes = context["version"]["attrib"]
- start = version_attributes.get("frameStart", None)
- end = version_attributes.get("frameEnd", None)
-
- if start is None or end is None:
- print("Skipping setting frame range because start or "
- "end frame data is missing..")
- return
-
- # Include handles
- start -= version_attributes.get("handleStart", 0)
- end += version_attributes.get("handleEnd", 0)
-
- lib.update_frame_range(start, end)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/load/load_alembic.py b/server_addon/fusion/client/ayon_fusion/plugins/load/load_alembic.py
deleted file mode 100644
index 2e763b5330..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/load/load_alembic.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from ayon_core.pipeline import (
- load,
- get_representation_path,
-)
-from ayon_fusion.api import (
- imprint_container,
- get_current_comp,
- comp_lock_and_undo_chunk
-)
-
-
-class FusionLoadAlembicMesh(load.LoaderPlugin):
- """Load Alembic mesh into Fusion"""
-
- product_types = {"pointcache", "model"}
- representations = {"*"}
- extensions = {"abc"}
-
- label = "Load alembic mesh"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- tool_type = "SurfaceAlembicMesh"
-
- def load(self, context, name, namespace, data):
- # Fallback to folder name when namespace is None
- if namespace is None:
- namespace = context["folder"]["name"]
-
- # Create the Loader with the filename path set
- comp = get_current_comp()
- with comp_lock_and_undo_chunk(comp, "Create tool"):
-
- path = self.filepath_from_context(context)
-
- args = (-32768, -32768)
- tool = comp.AddTool(self.tool_type, *args)
- tool["Filename"] = path
-
- imprint_container(tool,
- name=name,
- namespace=namespace,
- context=context,
- loader=self.__class__.__name__)
-
- def switch(self, container, context):
- self.update(container, context)
-
- def update(self, container, context):
- """Update Alembic path"""
-
- tool = container["_tool"]
- assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
- comp = tool.Comp()
-
- repre_entity = context["representation"]
- path = get_representation_path(repre_entity)
-
- with comp_lock_and_undo_chunk(comp, "Update tool"):
- tool["Filename"] = path
-
- # Update the imprinted representation
- tool.SetData("avalon.representation", repre_entity["id"])
-
- def remove(self, container):
- tool = container["_tool"]
- assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
- comp = tool.Comp()
-
- with comp_lock_and_undo_chunk(comp, "Remove tool"):
- tool.Delete()
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/load/load_fbx.py b/server_addon/fusion/client/ayon_fusion/plugins/load/load_fbx.py
deleted file mode 100644
index a080fa3983..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/load/load_fbx.py
+++ /dev/null
@@ -1,87 +0,0 @@
-from ayon_core.pipeline import (
- load,
- get_representation_path,
-)
-from ayon_fusion.api import (
- imprint_container,
- get_current_comp,
- comp_lock_and_undo_chunk,
-)
-
-
-class FusionLoadFBXMesh(load.LoaderPlugin):
- """Load FBX mesh into Fusion"""
-
- product_types = {"*"}
- representations = {"*"}
- extensions = {
- "3ds",
- "amc",
- "aoa",
- "asf",
- "bvh",
- "c3d",
- "dae",
- "dxf",
- "fbx",
- "htr",
- "mcd",
- "obj",
- "trc",
- }
-
- label = "Load FBX mesh"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- tool_type = "SurfaceFBXMesh"
-
- def load(self, context, name, namespace, data):
- # Fallback to folder name when namespace is None
- if namespace is None:
- namespace = context["folder"]["name"]
-
- # Create the Loader with the filename path set
- comp = get_current_comp()
- with comp_lock_and_undo_chunk(comp, "Create tool"):
- path = self.filepath_from_context(context)
-
- args = (-32768, -32768)
- tool = comp.AddTool(self.tool_type, *args)
- tool["ImportFile"] = path
-
- imprint_container(
- tool,
- name=name,
- namespace=namespace,
- context=context,
- loader=self.__class__.__name__,
- )
-
- def switch(self, container, context):
- self.update(container, context)
-
- def update(self, container, context):
- """Update path"""
-
- tool = container["_tool"]
- assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
- comp = tool.Comp()
-
- repre_entity = context["representation"]
- path = get_representation_path(repre_entity)
-
- with comp_lock_and_undo_chunk(comp, "Update tool"):
- tool["ImportFile"] = path
-
- # Update the imprinted representation
- tool.SetData("avalon.representation", repre_entity["id"])
-
- def remove(self, container):
- tool = container["_tool"]
- assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
- comp = tool.Comp()
-
- with comp_lock_and_undo_chunk(comp, "Remove tool"):
- tool.Delete()
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/load/load_sequence.py b/server_addon/fusion/client/ayon_fusion/plugins/load/load_sequence.py
deleted file mode 100644
index 233f1d7021..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/load/load_sequence.py
+++ /dev/null
@@ -1,291 +0,0 @@
-import contextlib
-
-import ayon_core.pipeline.load as load
-from ayon_fusion.api import (
- imprint_container,
- get_current_comp,
- comp_lock_and_undo_chunk,
-)
-from ayon_core.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
-
-comp = get_current_comp()
-
-
-@contextlib.contextmanager
-def preserve_inputs(tool, inputs):
- """Preserve the tool's inputs after context"""
-
- comp = tool.Comp()
-
- values = {}
- for name in inputs:
- tool_input = getattr(tool, name)
- value = tool_input[comp.TIME_UNDEFINED]
- values[name] = value
-
- try:
- yield
- finally:
- for name, value in values.items():
- tool_input = getattr(tool, name)
- tool_input[comp.TIME_UNDEFINED] = value
-
-
-@contextlib.contextmanager
-def preserve_trim(loader, log=None):
- """Preserve the relative trim of the Loader tool.
-
- This tries to preserve the loader's trim (trim in and trim out) after
- the context by reapplying the "amount" it trims on the clip's length at
- start and end.
-
- """
-
- # Get original trim as amount of "trimming" from length
- time = loader.Comp().TIME_UNDEFINED
- length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
- trim_from_start = loader["ClipTimeStart"][time]
- trim_from_end = length - loader["ClipTimeEnd"][time]
-
- try:
- yield
- finally:
- length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
- if trim_from_start > length:
- trim_from_start = length
- if log:
- log.warning(
- "Reducing trim in to %d "
- "(because of less frames)" % trim_from_start
- )
-
- remainder = length - trim_from_start
- if trim_from_end > remainder:
- trim_from_end = remainder
- if log:
- log.warning(
- "Reducing trim in to %d "
- "(because of less frames)" % trim_from_end
- )
-
- loader["ClipTimeStart"][time] = trim_from_start
- loader["ClipTimeEnd"][time] = length - trim_from_end
-
-
-def loader_shift(loader, frame, relative=True):
- """Shift global in time by i preserving duration
-
- This moves the loader by i frames preserving global duration. When relative
- is False it will shift the global in to the start frame.
-
- Args:
- loader (tool): The fusion loader tool.
- frame (int): The amount of frames to move.
- relative (bool): When True the shift is relative, else the shift will
- change the global in to frame.
-
- Returns:
- int: The resulting relative frame change (how much it moved)
-
- """
- comp = loader.Comp()
- time = comp.TIME_UNDEFINED
-
- old_in = loader["GlobalIn"][time]
- old_out = loader["GlobalOut"][time]
-
- if relative:
- shift = frame
- else:
- shift = frame - old_in
-
- if not shift:
- return 0
-
- # Shifting global in will try to automatically compensate for the change
- # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
- # input values to "just shift" the clip
- with preserve_inputs(
- loader,
- inputs=[
- "ClipTimeStart",
- "ClipTimeEnd",
- "HoldFirstFrame",
- "HoldLastFrame",
- ],
- ):
- # GlobalIn cannot be set past GlobalOut or vice versa
- # so we must apply them in the order of the shift.
- if shift > 0:
- loader["GlobalOut"][time] = old_out + shift
- loader["GlobalIn"][time] = old_in + shift
- else:
- loader["GlobalIn"][time] = old_in + shift
- loader["GlobalOut"][time] = old_out + shift
-
- return int(shift)
-
-
-class FusionLoadSequence(load.LoaderPlugin):
- """Load image sequence into Fusion"""
-
- product_types = {
- "imagesequence",
- "review",
- "render",
- "plate",
- "image",
- "online",
- }
- representations = {"*"}
- extensions = set(
- ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
- )
-
- label = "Load sequence"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def load(self, context, name, namespace, data):
- # Fallback to folder name when namespace is None
- if namespace is None:
- namespace = context["folder"]["name"]
-
- # Use the first file for now
- path = self.filepath_from_context(context)
-
- # Create the Loader with the filename path set
- comp = get_current_comp()
- with comp_lock_and_undo_chunk(comp, "Create Loader"):
- args = (-32768, -32768)
- tool = comp.AddTool("Loader", *args)
- tool["Clip"] = comp.ReverseMapPath(path)
-
- # Set global in point to start frame (if in version.data)
- start = self._get_start(context["version"], tool)
- loader_shift(tool, start, relative=False)
-
- imprint_container(
- tool,
- name=name,
- namespace=namespace,
- context=context,
- loader=self.__class__.__name__,
- )
-
- def switch(self, container, context):
- self.update(container, context)
-
- def update(self, container, context):
- """Update the Loader's path
-
- Fusion automatically tries to reset some variables when changing
- the loader's path to a new file. These automatic changes are to its
- inputs:
- - ClipTimeStart: Fusion reset to 0 if duration changes
- - We keep the trim in as close as possible to the previous value.
- When there are less frames then the amount of trim we reduce
- it accordingly.
-
- - ClipTimeEnd: Fusion reset to 0 if duration changes
- - We keep the trim out as close as possible to the previous value
- within new amount of frames after trim in (ClipTimeStart) has
- been set.
-
- - GlobalIn: Fusion reset to comp's global in if duration changes
- - We change it to the "frameStart"
-
- - GlobalEnd: Fusion resets to globalIn + length if duration changes
- - We do the same like Fusion - allow fusion to take control.
-
- - HoldFirstFrame: Fusion resets this to 0
- - We preserve the value.
-
- - HoldLastFrame: Fusion resets this to 0
- - We preserve the value.
-
- - Reverse: Fusion resets to disabled if "Loop" is not enabled.
- - We preserve the value.
-
- - Depth: Fusion resets to "Format"
- - We preserve the value.
-
- - KeyCode: Fusion resets to ""
- - We preserve the value.
-
- - TimeCodeOffset: Fusion resets to 0
- - We preserve the value.
-
- """
-
- tool = container["_tool"]
- assert tool.ID == "Loader", "Must be Loader"
- comp = tool.Comp()
-
- repre_entity = context["representation"]
- path = self.filepath_from_context(context)
-
- # Get start frame from version data
- start = self._get_start(context["version"], tool)
-
- with comp_lock_and_undo_chunk(comp, "Update Loader"):
- # Update the loader's path whilst preserving some values
- with preserve_trim(tool, log=self.log):
- with preserve_inputs(
- tool,
- inputs=(
- "HoldFirstFrame",
- "HoldLastFrame",
- "Reverse",
- "Depth",
- "KeyCode",
- "TimeCodeOffset",
- ),
- ):
- tool["Clip"] = comp.ReverseMapPath(path)
-
- # Set the global in to the start frame of the sequence
- global_in_changed = loader_shift(tool, start, relative=False)
- if global_in_changed:
- # Log this change to the user
- self.log.debug(
- "Changed '%s' global in: %d" % (tool.Name, start)
- )
-
- # Update the imprinted representation
- tool.SetData("avalon.representation", repre_entity["id"])
-
- def remove(self, container):
- tool = container["_tool"]
- assert tool.ID == "Loader", "Must be Loader"
- comp = tool.Comp()
-
- with comp_lock_and_undo_chunk(comp, "Remove Loader"):
- tool.Delete()
-
- def _get_start(self, version_entity, tool):
- """Return real start frame of published files (incl. handles)"""
- attributes = version_entity["attrib"]
-
- # Get start frame directly with handle if it's in data
- start = attributes.get("frameStartHandle")
- if start is not None:
- return start
-
- # Get frame start without handles
- start = attributes.get("frameStart")
- if start is None:
- self.log.warning(
- "Missing start frame for version "
- "assuming starts at frame 0 for: "
- "{}".format(tool.Name)
- )
- return 0
-
- # Use `handleStart` if the data is available
- handle_start = attributes.get("handleStart")
- if handle_start:
- start -= handle_start
-
- return start
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/load/load_usd.py b/server_addon/fusion/client/ayon_fusion/plugins/load/load_usd.py
deleted file mode 100644
index 42ce339faf..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/load/load_usd.py
+++ /dev/null
@@ -1,87 +0,0 @@
-from ayon_core.pipeline import (
- load,
- get_representation_path,
-)
-from ayon_fusion.api import (
- imprint_container,
- get_current_comp,
- comp_lock_and_undo_chunk
-)
-from ayon_fusion.api.lib import get_fusion_module
-
-
-class FusionLoadUSD(load.LoaderPlugin):
- """Load USD into Fusion
-
- Support for USD was added since Fusion 18.5
- """
-
- product_types = {"*"}
- representations = {"*"}
- extensions = {"usd", "usda", "usdz"}
-
- label = "Load USD"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- tool_type = "uLoader"
-
- @classmethod
- def apply_settings(cls, project_settings):
- super(FusionLoadUSD, cls).apply_settings(project_settings)
- if cls.enabled:
- # Enable only in Fusion 18.5+
- fusion = get_fusion_module()
- version = fusion.GetVersion()
- major = version[1]
- minor = version[2]
- is_usd_supported = (major, minor) >= (18, 5)
- cls.enabled = is_usd_supported
-
- def load(self, context, name, namespace, data):
- # Fallback to folder name when namespace is None
- if namespace is None:
- namespace = context["folder"]["name"]
-
- # Create the Loader with the filename path set
- comp = get_current_comp()
- with comp_lock_and_undo_chunk(comp, "Create tool"):
-
- path = self.fname
-
- args = (-32768, -32768)
- tool = comp.AddTool(self.tool_type, *args)
- tool["Filename"] = path
-
- imprint_container(tool,
- name=name,
- namespace=namespace,
- context=context,
- loader=self.__class__.__name__)
-
- def switch(self, container, context):
- self.update(container, context)
-
- def update(self, container, context):
-
- tool = container["_tool"]
- assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
- comp = tool.Comp()
-
- repre_entity = context["representation"]
- path = get_representation_path(repre_entity)
-
- with comp_lock_and_undo_chunk(comp, "Update tool"):
- tool["Filename"] = path
-
- # Update the imprinted representation
- tool.SetData("avalon.representation", repre_entity["id"])
-
- def remove(self, container):
- tool = container["_tool"]
- assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
- comp = tool.Comp()
-
- with comp_lock_and_undo_chunk(comp, "Remove tool"):
- tool.Delete()
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/load/load_workfile.py b/server_addon/fusion/client/ayon_fusion/plugins/load/load_workfile.py
deleted file mode 100644
index c728f6b4aa..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/load/load_workfile.py
+++ /dev/null
@@ -1,33 +0,0 @@
-"""Import workfiles into your current comp.
-As all imported nodes are free floating and will probably be changed there
-is no update or reload function added for this plugin
-"""
-
-from ayon_core.pipeline import load
-
-from ayon_fusion.api import (
- get_current_comp,
- get_bmd_library,
-)
-
-
-class FusionLoadWorkfile(load.LoaderPlugin):
- """Load the content of a workfile into Fusion"""
-
- product_types = {"workfile"}
- representations = {"*"}
- extensions = {"comp"}
-
- label = "Load Workfile"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def load(self, context, name, namespace, data):
- # Get needed elements
- bmd = get_bmd_library()
- comp = get_current_comp()
- path = self.filepath_from_context(context)
-
- # Paste the content of the file into the current comp
- comp.Paste(bmd.readfile(path))
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_comp.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_comp.py
deleted file mode 100644
index 2e5bcd63db..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_comp.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import pyblish.api
-
-from ayon_fusion.api import get_current_comp
-
-
-class CollectCurrentCompFusion(pyblish.api.ContextPlugin):
- """Collect current comp"""
-
- order = pyblish.api.CollectorOrder - 0.4
- label = "Collect Current Comp"
- hosts = ["fusion"]
-
- def process(self, context):
- """Collect all image sequence tools"""
-
- current_comp = get_current_comp()
- assert current_comp, "Must have active Fusion composition"
- context.data["currentComp"] = current_comp
-
- # Store path to current file
- filepath = current_comp.GetAttrs().get("COMPS_FileName", "")
- context.data['currentFile'] = filepath
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_comp_frame_range.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_comp_frame_range.py
deleted file mode 100644
index 24a9a92337..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_comp_frame_range.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import pyblish.api
-
-
-def get_comp_render_range(comp):
- """Return comp's start-end render range and global start-end range."""
- comp_attrs = comp.GetAttrs()
- start = comp_attrs["COMPN_RenderStart"]
- end = comp_attrs["COMPN_RenderEnd"]
- global_start = comp_attrs["COMPN_GlobalStart"]
- global_end = comp_attrs["COMPN_GlobalEnd"]
-
- # Whenever render ranges are undefined fall back
- # to the comp's global start and end
- if start == -1000000000:
- start = global_start
- if end == -1000000000:
- end = global_end
-
- return start, end, global_start, global_end
-
-
-class CollectFusionCompFrameRanges(pyblish.api.ContextPlugin):
- """Collect current comp"""
-
- # We run this after CollectorOrder - 0.1 otherwise it gets
- # overridden by global plug-in `CollectContextEntities`
- order = pyblish.api.CollectorOrder - 0.05
- label = "Collect Comp Frame Ranges"
- hosts = ["fusion"]
-
- def process(self, context):
- """Collect all image sequence tools"""
-
- comp = context.data["currentComp"]
-
- # Store comp render ranges
- start, end, global_start, global_end = get_comp_render_range(comp)
-
- context.data.update({
- "renderFrameStart": int(start),
- "renderFrameEnd": int(end),
- "compFrameStart": int(global_start),
- "compFrameEnd": int(global_end)
- })
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_inputs.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_inputs.py
deleted file mode 100644
index 002c0a5672..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_inputs.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import pyblish.api
-
-from ayon_core.pipeline import registered_host
-
-
-def collect_input_containers(tools):
- """Collect containers that contain any of the node in `nodes`.
-
- This will return any loaded Avalon container that contains at least one of
- the nodes. As such, the Avalon container is an input for it. Or in short,
- there are member nodes of that container.
-
- Returns:
- list: Input avalon containers
-
- """
-
- # Lookup by node ids
- lookup = frozenset([tool.Name for tool in tools])
-
- containers = []
- host = registered_host()
- for container in host.ls():
-
- name = container["_tool"].Name
-
- # We currently assume no "groups" as containers but just single tools
- # like a single "Loader" operator. As such we just check whether the
- # Loader is part of the processing queue.
- if name in lookup:
- containers.append(container)
-
- return containers
-
-
-def iter_upstream(tool):
- """Yields all upstream inputs for the current tool.
-
- Yields:
- tool: The input tools.
-
- """
-
- def get_connected_input_tools(tool):
- """Helper function that returns connected input tools for a tool."""
- inputs = []
-
- # Filter only to actual types that will have sensible upstream
- # connections. So we ignore just "Number" inputs as they can be
- # many to iterate, slowing things down quite a bit - and in practice
- # they don't have upstream connections.
- VALID_INPUT_TYPES = ['Image', 'Particles', 'Mask', 'DataType3D']
- for type_ in VALID_INPUT_TYPES:
- for input_ in tool.GetInputList(type_).values():
- output = input_.GetConnectedOutput()
- if output:
- input_tool = output.GetTool()
- inputs.append(input_tool)
-
- return inputs
-
- # Initialize process queue with the node's inputs itself
- queue = get_connected_input_tools(tool)
-
- # We keep track of which node names we have processed so far, to ensure we
- # don't process the same hierarchy again. We are not pushing the tool
- # itself into the set as that doesn't correctly recognize the same tool.
- # Since tool names are unique in a comp in Fusion we rely on that.
- collected = set(tool.Name for tool in queue)
-
- # Traverse upstream references for all nodes and yield them as we
- # process the queue.
- while queue:
- upstream_tool = queue.pop()
- yield upstream_tool
-
- # Find upstream tools that are not collected yet.
- upstream_inputs = get_connected_input_tools(upstream_tool)
- upstream_inputs = [t for t in upstream_inputs if
- t.Name not in collected]
-
- queue.extend(upstream_inputs)
- collected.update(tool.Name for tool in upstream_inputs)
-
-
-class CollectUpstreamInputs(pyblish.api.InstancePlugin):
- """Collect source input containers used for this publish.
-
- This will include `inputs` data of which loaded publishes were used in the
- generation of this publish. This leaves an upstream trace to what was used
- as input.
-
- """
-
- label = "Collect Inputs"
- order = pyblish.api.CollectorOrder + 0.2
- hosts = ["fusion"]
- families = ["render", "image"]
-
- def process(self, instance):
-
- # Get all upstream and include itself
- if not any(instance[:]):
- self.log.debug("No tool found in instance, skipping..")
- return
-
- tool = instance[0]
- nodes = list(iter_upstream(tool))
- nodes.append(tool)
-
- # Collect containers for the given set of nodes
- containers = collect_input_containers(nodes)
-
- inputs = [c["representation"] for c in containers]
- instance.data["inputRepresentations"] = inputs
- self.log.debug("Collected inputs: %s" % inputs)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_instances.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_instances.py
deleted file mode 100644
index 921c282877..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_instances.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import pyblish.api
-
-
-class CollectInstanceData(pyblish.api.InstancePlugin):
- """Collect Fusion saver instances
-
- This additionally stores the Comp start and end render range in the
- current context's data as "frameStart" and "frameEnd".
-
- """
-
- order = pyblish.api.CollectorOrder
- label = "Collect Instances Data"
- hosts = ["fusion"]
-
- def process(self, instance):
- """Collect all image sequence tools"""
-
- context = instance.context
-
- # Include creator attributes directly as instance data
- creator_attributes = instance.data["creator_attributes"]
- instance.data.update(creator_attributes)
-
- frame_range_source = creator_attributes.get("frame_range_source")
- instance.data["frame_range_source"] = frame_range_source
-
- # get folder frame ranges to all instances
- # render product type instances `current_folder` render target
- start = context.data["frameStart"]
- end = context.data["frameEnd"]
- handle_start = context.data["handleStart"]
- handle_end = context.data["handleEnd"]
- start_with_handle = start - handle_start
- end_with_handle = end + handle_end
-
- # conditions for render product type instances
- if frame_range_source == "render_range":
- # set comp render frame ranges
- start = context.data["renderFrameStart"]
- end = context.data["renderFrameEnd"]
- handle_start = 0
- handle_end = 0
- start_with_handle = start
- end_with_handle = end
-
- if frame_range_source == "comp_range":
- comp_start = context.data["compFrameStart"]
- comp_end = context.data["compFrameEnd"]
- render_start = context.data["renderFrameStart"]
- render_end = context.data["renderFrameEnd"]
- # set comp frame ranges
- start = render_start
- end = render_end
- handle_start = render_start - comp_start
- handle_end = comp_end - render_end
- start_with_handle = comp_start
- end_with_handle = comp_end
-
- if frame_range_source == "custom_range":
- start = int(instance.data["custom_frameStart"])
- end = int(instance.data["custom_frameEnd"])
- handle_start = int(instance.data["custom_handleStart"])
- handle_end = int(instance.data["custom_handleEnd"])
- start_with_handle = start - handle_start
- end_with_handle = end + handle_end
-
- frame = instance.data["creator_attributes"].get("frame")
- # explicitly publishing only single frame
- if frame is not None:
- frame = int(frame)
-
- start = frame
- end = frame
- handle_start = 0
- handle_end = 0
- start_with_handle = frame
- end_with_handle = frame
-
- # Include start and end render frame in label
- product_name = instance.data["productName"]
- label = (
- "{product_name} ({start}-{end}) [{handle_start}-{handle_end}]"
- ).format(
- product_name=product_name,
- start=int(start),
- end=int(end),
- handle_start=int(handle_start),
- handle_end=int(handle_end)
- )
-
- instance.data.update({
- "label": label,
-
- # todo: Allow custom frame range per instance
- "frameStart": start,
- "frameEnd": end,
- "frameStartHandle": start_with_handle,
- "frameEndHandle": end_with_handle,
- "handleStart": handle_start,
- "handleEnd": handle_end,
- "fps": context.data["fps"],
- })
-
- # Add review family if the instance is marked as 'review'
- # This could be done through a 'review' Creator attribute.
- if instance.data.get("review", False):
- self.log.debug("Adding review family..")
- instance.data["families"].append("review")
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_render.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_render.py
deleted file mode 100644
index af52aee861..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_render.py
+++ /dev/null
@@ -1,208 +0,0 @@
-import os
-import attr
-import pyblish.api
-
-from ayon_core.pipeline import publish
-from ayon_core.pipeline.publish import RenderInstance
-from ayon_fusion.api.lib import get_frame_path
-
-
-@attr.s
-class FusionRenderInstance(RenderInstance):
- # extend generic, composition name is needed
- fps = attr.ib(default=None)
- projectEntity = attr.ib(default=None)
- stagingDir = attr.ib(default=None)
- app_version = attr.ib(default=None)
- tool = attr.ib(default=None)
- workfileComp = attr.ib(default=None)
- publish_attributes = attr.ib(default={})
- frameStartHandle = attr.ib(default=None)
- frameEndHandle = attr.ib(default=None)
-
-
-class CollectFusionRender(
- publish.AbstractCollectRender,
- publish.ColormanagedPyblishPluginMixin
-):
-
- order = pyblish.api.CollectorOrder + 0.09
- label = "Collect Fusion Render"
- hosts = ["fusion"]
-
- def get_instances(self, context):
-
- comp = context.data.get("currentComp")
- comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat")
- aspect_x = comp_frame_format_prefs["AspectX"]
- aspect_y = comp_frame_format_prefs["AspectY"]
-
-
- current_file = context.data["currentFile"]
- version = context.data["version"]
-
- project_entity = context.data["projectEntity"]
-
- instances = []
- for inst in context:
- if not inst.data.get("active", True):
- continue
-
- product_type = inst.data["productType"]
- if product_type not in ["render", "image"]:
- continue
-
- task_name = inst.data["task"]
- tool = inst.data["transientData"]["tool"]
-
- instance_families = inst.data.get("families", [])
- product_name = inst.data["productName"]
- instance = FusionRenderInstance(
- tool=tool,
- workfileComp=comp,
- productType=product_type,
- family=product_type,
- families=instance_families,
- version=version,
- time="",
- source=current_file,
- label=inst.data["label"],
- productName=product_name,
- folderPath=inst.data["folderPath"],
- task=task_name,
- attachTo=False,
- setMembers='',
- publish=True,
- name=product_name,
- resolutionWidth=comp_frame_format_prefs.get("Width"),
- resolutionHeight=comp_frame_format_prefs.get("Height"),
- pixelAspect=aspect_x / aspect_y,
- tileRendering=False,
- tilesX=0,
- tilesY=0,
- review="review" in instance_families,
- frameStart=inst.data["frameStart"],
- frameEnd=inst.data["frameEnd"],
- handleStart=inst.data["handleStart"],
- handleEnd=inst.data["handleEnd"],
- frameStartHandle=inst.data["frameStartHandle"],
- frameEndHandle=inst.data["frameEndHandle"],
- frameStep=1,
- fps=comp_frame_format_prefs.get("Rate"),
- app_version=comp.GetApp().Version,
- publish_attributes=inst.data.get("publish_attributes", {}),
-
- # The source instance this render instance replaces
- source_instance=inst
- )
-
- render_target = inst.data["creator_attributes"]["render_target"]
-
- # Add render target family
- render_target_family = f"render.{render_target}"
- if render_target_family not in instance.families:
- instance.families.append(render_target_family)
-
- # Add render target specific data
- if render_target in {"local", "frames"}:
- instance.projectEntity = project_entity
-
- if render_target == "farm":
- fam = "render.farm"
- if fam not in instance.families:
- instance.families.append(fam)
- instance.farm = True # to skip integrate
- if "review" in instance.families:
- # to skip ExtractReview locally
- instance.families.remove("review")
- instance.deadline = inst.data.get("deadline")
-
- instances.append(instance)
-
- return instances
-
- def post_collecting_action(self):
- for instance in self._context:
- if "render.frames" in instance.data.get("families", []):
- # adding representation data to the instance
- self._update_for_frames(instance)
-
- def get_expected_files(self, render_instance):
- """
- Returns list of rendered files that should be created by
- Deadline. These are not published directly, they are source
- for later 'submit_publish_job'.
-
- Args:
- render_instance (RenderInstance): to pull anatomy and parts used
- in url
-
- Returns:
- (list) of absolute urls to rendered file
- """
- start = render_instance.frameStart - render_instance.handleStart
- end = render_instance.frameEnd + render_instance.handleEnd
-
- comp = render_instance.workfileComp
- path = comp.MapPath(
- render_instance.tool["Clip"][
- render_instance.workfileComp.TIME_UNDEFINED
- ]
- )
- output_dir = os.path.dirname(path)
- render_instance.outputDir = output_dir
-
- basename = os.path.basename(path)
-
- head, padding, ext = get_frame_path(basename)
-
- expected_files = []
- for frame in range(start, end + 1):
- expected_files.append(
- os.path.join(
- output_dir,
- f"{head}{str(frame).zfill(padding)}{ext}"
- )
- )
-
- return expected_files
-
- def _update_for_frames(self, instance):
- """Updating instance for render.frames family
-
- Adding representation data to the instance. Also setting
- colorspaceData to the representation based on file rules.
- """
-
- expected_files = instance.data["expectedFiles"]
-
- start = instance.data["frameStart"] - instance.data["handleStart"]
-
- path = expected_files[0]
- basename = os.path.basename(path)
- staging_dir = os.path.dirname(path)
- _, padding, ext = get_frame_path(basename)
-
- repre = {
- "name": ext[1:],
- "ext": ext[1:],
- "frameStart": f"%0{padding}d" % start,
- "files": [os.path.basename(f) for f in expected_files],
- "stagingDir": staging_dir,
- }
-
- self.set_representation_colorspace(
- representation=repre,
- context=instance.context,
- )
-
- # review representation
- if instance.data.get("review", False):
- repre["tags"] = ["review"]
-
- # add the repre to the instance
- if "representations" not in instance.data:
- instance.data["representations"] = []
- instance.data["representations"].append(repre)
-
- return instance
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_workfile.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_workfile.py
deleted file mode 100644
index 4c288edb3e..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/collect_workfile.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import os
-
-import pyblish.api
-
-
-class CollectFusionWorkfile(pyblish.api.InstancePlugin):
- """Collect Fusion workfile representation."""
-
- order = pyblish.api.CollectorOrder + 0.1
- label = "Collect Workfile"
- hosts = ["fusion"]
- families = ["workfile"]
-
- def process(self, instance):
-
- current_file = instance.context.data["currentFile"]
-
- folder, file = os.path.split(current_file)
- filename, ext = os.path.splitext(file)
-
- instance.data['representations'] = [{
- 'name': ext.lstrip("."),
- 'ext': ext.lstrip("."),
- 'files': file,
- "stagingDir": folder,
- }]
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/extract_render_local.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/extract_render_local.py
deleted file mode 100644
index bbcba5366d..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/extract_render_local.py
+++ /dev/null
@@ -1,207 +0,0 @@
-import os
-import logging
-import contextlib
-import collections
-import pyblish.api
-
-from ayon_core.pipeline import publish
-from ayon_fusion.api import comp_lock_and_undo_chunk
-from ayon_fusion.api.lib import get_frame_path, maintained_comp_range
-
-log = logging.getLogger(__name__)
-
-
-@contextlib.contextmanager
-def enabled_savers(comp, savers):
- """Enable only the `savers` in Comp during the context.
-
- Any Saver tool in the passed composition that is not in the savers list
- will be set to passthrough during the context.
-
- Args:
- comp (object): Fusion composition object.
- savers (list): List of Saver tool objects.
-
- """
- passthrough_key = "TOOLB_PassThrough"
- original_states = {}
- enabled_saver_names = {saver.Name for saver in savers}
-
- all_savers = comp.GetToolList(False, "Saver").values()
- savers_by_name = {saver.Name: saver for saver in all_savers}
-
- try:
- for saver in all_savers:
- original_state = saver.GetAttrs()[passthrough_key]
- original_states[saver.Name] = original_state
-
- # The passthrough state we want to set (passthrough != enabled)
- state = saver.Name not in enabled_saver_names
- if state != original_state:
- saver.SetAttrs({passthrough_key: state})
- yield
- finally:
- for saver_name, original_state in original_states.items():
- saver = savers_by_name[saver_name]
- saver.SetAttrs({"TOOLB_PassThrough": original_state})
-
-
-class FusionRenderLocal(
- pyblish.api.InstancePlugin,
- publish.ColormanagedPyblishPluginMixin
-):
- """Render the current Fusion composition locally."""
-
- order = pyblish.api.ExtractorOrder - 0.2
- label = "Render Local"
- hosts = ["fusion"]
- families = ["render.local"]
-
- is_rendered_key = "_fusionrenderlocal_has_rendered"
-
- def process(self, instance):
-
- # Start render
- result = self.render(instance)
- if result is False:
- raise RuntimeError(f"Comp render failed for {instance}")
-
- self._add_representation(instance)
-
- # Log render status
- self.log.info(
- "Rendered '{}' for folder '{}' under the task '{}'".format(
- instance.data["name"],
- instance.data["folderPath"],
- instance.data["task"],
- )
- )
-
- def render(self, instance):
- """Render instance.
-
- We try to render the minimal amount of times by combining the instances
- that have a matching frame range in one Fusion render. Then for the
- batch of instances we store whether the render succeeded or failed.
-
- """
-
- if self.is_rendered_key in instance.data:
- # This instance was already processed in batch with another
- # instance, so we just return the render result directly
- self.log.debug(f"Instance {instance} was already rendered")
- return instance.data[self.is_rendered_key]
-
- instances_by_frame_range = self.get_render_instances_by_frame_range(
- instance.context
- )
-
- # Render matching batch of instances that share the same frame range
- frame_range = self.get_instance_render_frame_range(instance)
- render_instances = instances_by_frame_range[frame_range]
-
- # We initialize render state false to indicate it wasn't successful
- # yet to keep track of whether Fusion succeeded. This is for cases
- # where an error below this might cause the comp render result not
- # to be stored for the instances of this batch
- for render_instance in render_instances:
- render_instance.data[self.is_rendered_key] = False
-
- savers_to_render = [inst.data["tool"] for inst in render_instances]
- current_comp = instance.context.data["currentComp"]
- frame_start, frame_end = frame_range
-
- self.log.info(
- f"Starting Fusion render frame range {frame_start}-{frame_end}"
- )
- saver_names = ", ".join(saver.Name for saver in savers_to_render)
- self.log.info(f"Rendering tools: {saver_names}")
-
- with comp_lock_and_undo_chunk(current_comp):
- with maintained_comp_range(current_comp):
- with enabled_savers(current_comp, savers_to_render):
- result = current_comp.Render(
- {
- "Start": frame_start,
- "End": frame_end,
- "Wait": True,
- }
- )
-
- # Store the render state for all the rendered instances
- for render_instance in render_instances:
- render_instance.data[self.is_rendered_key] = bool(result)
-
- return result
-
- def _add_representation(self, instance):
- """Add representation to instance"""
-
- expected_files = instance.data["expectedFiles"]
-
- start = instance.data["frameStart"] - instance.data["handleStart"]
-
- path = expected_files[0]
- _, padding, ext = get_frame_path(path)
-
- staging_dir = os.path.dirname(path)
-
- files = [os.path.basename(f) for f in expected_files]
- if len(expected_files) == 1:
- files = files[0]
-
- repre = {
- "name": ext[1:],
- "ext": ext[1:],
- "frameStart": f"%0{padding}d" % start,
- "files": files,
- "stagingDir": staging_dir,
- }
-
- self.set_representation_colorspace(
- representation=repre,
- context=instance.context,
- )
-
- # review representation
- if instance.data.get("review", False):
- repre["tags"] = ["review"]
-
- # add the repre to the instance
- if "representations" not in instance.data:
- instance.data["representations"] = []
- instance.data["representations"].append(repre)
-
- return instance
-
- def get_render_instances_by_frame_range(self, context):
- """Return enabled render.local instances grouped by their frame range.
-
- Arguments:
- context (pyblish.Context): The pyblish context
-
- Returns:
- dict: (start, end): instances mapping
-
- """
-
- instances_to_render = [
- instance for instance in context if
- # Only active instances
- instance.data.get("publish", True) and
- # Only render.local instances
- "render.local" in instance.data.get("families", [])
- ]
-
- # Instances by frame ranges
- instances_by_frame_range = collections.defaultdict(list)
- for instance in instances_to_render:
- start, end = self.get_instance_render_frame_range(instance)
- instances_by_frame_range[(start, end)].append(instance)
-
- return dict(instances_by_frame_range)
-
- def get_instance_render_frame_range(self, instance):
- start = instance.data["frameStartHandle"]
- end = instance.data["frameEndHandle"]
- return start, end
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/increment_current_file.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/increment_current_file.py
deleted file mode 100644
index bcff27b988..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/increment_current_file.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import pyblish.api
-
-from ayon_core.pipeline import OptionalPyblishPluginMixin
-from ayon_core.pipeline import KnownPublishError
-
-
-class FusionIncrementCurrentFile(
- pyblish.api.ContextPlugin, OptionalPyblishPluginMixin
-):
- """Increment the current file.
-
- Saves the current file with an increased version number.
-
- """
-
- label = "Increment workfile version"
- order = pyblish.api.IntegratorOrder + 9.0
- hosts = ["fusion"]
- optional = True
-
- def process(self, context):
- if not self.is_active(context.data):
- return
-
- from ayon_core.lib import version_up
- from ayon_core.pipeline.publish import get_errored_plugins_from_context
-
- errored_plugins = get_errored_plugins_from_context(context)
- if any(
- plugin.__name__ == "FusionSubmitDeadline"
- for plugin in errored_plugins
- ):
- raise KnownPublishError(
- "Skipping incrementing current file because "
- "submission to render farm failed."
- )
-
- comp = context.data.get("currentComp")
- assert comp, "Must have comp"
-
- current_filepath = context.data["currentFile"]
- new_filepath = version_up(current_filepath)
-
- comp.Save(new_filepath)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/save_scene.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/save_scene.py
deleted file mode 100644
index da9b6ce41f..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/save_scene.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import pyblish.api
-
-
-class FusionSaveComp(pyblish.api.ContextPlugin):
- """Save current comp"""
-
- label = "Save current file"
- order = pyblish.api.ExtractorOrder - 0.49
- hosts = ["fusion"]
- families = ["render", "image", "workfile"]
-
- def process(self, context):
-
- comp = context.data.get("currentComp")
- assert comp, "Must have comp"
-
- current = comp.GetAttrs().get("COMPS_FileName", "")
- assert context.data['currentFile'] == current
-
- self.log.info("Saving current file: {}".format(current))
- comp.Save()
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_background_depth.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_background_depth.py
deleted file mode 100644
index 90b6b110a4..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_background_depth.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import pyblish.api
-
-from ayon_core.pipeline import (
- publish,
- OptionalPyblishPluginMixin,
- PublishValidationError,
-)
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateBackgroundDepth(
- pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
-):
- """Validate if all Background tool are set to float32 bit"""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Background Depth 32 bit"
- hosts = ["fusion"]
- families = ["render", "image"]
- optional = True
-
- actions = [SelectInvalidAction, publish.RepairAction]
-
- @classmethod
- def get_invalid(cls, instance):
- context = instance.context
- comp = context.data.get("currentComp")
- assert comp, "Must have Comp object"
-
- backgrounds = comp.GetToolList(False, "Background").values()
- if not backgrounds:
- return []
-
- return [i for i in backgrounds if i.GetInput("Depth") != 4.0]
-
- def process(self, instance):
- if not self.is_active(instance.data):
- return
-
- invalid = self.get_invalid(instance)
- if invalid:
- raise PublishValidationError(
- "Found {} Backgrounds tools which"
- " are not set to float32".format(len(invalid)),
- title=self.label,
- )
-
- @classmethod
- def repair(cls, instance):
- comp = instance.context.data.get("currentComp")
- invalid = cls.get_invalid(instance)
- for i in invalid:
- i.SetInput("Depth", 4.0, comp.TIME_UNDEFINED)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_comp_saved.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_comp_saved.py
deleted file mode 100644
index ba56c40b65..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_comp_saved.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import os
-
-import pyblish.api
-from ayon_core.pipeline import PublishValidationError
-
-
-class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
- """Ensure current comp is saved"""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Comp Saved"
- families = ["render", "image"]
- hosts = ["fusion"]
-
- def process(self, context):
-
- comp = context.data.get("currentComp")
- assert comp, "Must have Comp object"
- attrs = comp.GetAttrs()
-
- filename = attrs["COMPS_FileName"]
- if not filename:
- raise PublishValidationError("Comp is not saved.",
- title=self.label)
-
- if not os.path.exists(filename):
- raise PublishValidationError(
- "Comp file does not exist: %s" % filename, title=self.label)
-
- if attrs["COMPB_Modified"]:
- self.log.warning("Comp is modified. Save your comp to ensure your "
- "changes propagate correctly.")
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_create_folder_checked.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_create_folder_checked.py
deleted file mode 100644
index 1b910123f0..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_create_folder_checked.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import pyblish.api
-
-from ayon_core.pipeline.publish import RepairAction
-from ayon_core.pipeline import PublishValidationError
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
- """Valid if all savers have the input attribute CreateDir checked on
-
- This attribute ensures that the folders to which the saver will write
- will be created.
- """
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Create Folder Checked"
- families = ["render", "image"]
- hosts = ["fusion"]
- actions = [RepairAction, SelectInvalidAction]
-
- @classmethod
- def get_invalid(cls, instance):
- tool = instance.data["tool"]
- create_dir = tool.GetInput("CreateDir")
- if create_dir == 0.0:
- cls.log.error(
- "%s has Create Folder turned off" % instance[0].Name
- )
- return [tool]
-
- def process(self, instance):
- invalid = self.get_invalid(instance)
- if invalid:
- raise PublishValidationError(
- "Found Saver with Create Folder During Render checked off",
- title=self.label,
- )
-
- @classmethod
- def repair(cls, instance):
- invalid = cls.get_invalid(instance)
- for tool in invalid:
- tool.SetInput("CreateDir", 1.0)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_expected_frames_existence.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_expected_frames_existence.py
deleted file mode 100644
index 6dc9642581..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_expected_frames_existence.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import os
-import pyblish.api
-
-from ayon_core.pipeline.publish import RepairAction
-from ayon_core.pipeline import PublishValidationError
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateLocalFramesExistence(pyblish.api.InstancePlugin):
- """Checks if files for savers that's set
- to publish expected frames exists
- """
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Expected Frames Exists"
- families = ["render.frames"]
- hosts = ["fusion"]
- actions = [RepairAction, SelectInvalidAction]
-
- @classmethod
- def get_invalid(cls, instance, non_existing_frames=None):
- if non_existing_frames is None:
- non_existing_frames = []
-
- tool = instance.data["tool"]
-
- expected_files = instance.data["expectedFiles"]
-
- for file in expected_files:
- if not os.path.exists(file):
- cls.log.error(
- f"Missing file: {file}"
- )
- non_existing_frames.append(file)
-
- if len(non_existing_frames) > 0:
- cls.log.error(f"Some of {tool.Name}'s files does not exist")
- return [tool]
-
- def process(self, instance):
- non_existing_frames = []
- invalid = self.get_invalid(instance, non_existing_frames)
- if invalid:
- raise PublishValidationError(
- "{} is set to publish existing frames but "
- "some frames are missing. "
- "The missing file(s) are:\n\n{}".format(
- invalid[0].Name,
- "\n\n".join(non_existing_frames),
- ),
- title=self.label,
- )
-
- @classmethod
- def repair(cls, instance):
- invalid = cls.get_invalid(instance)
- if invalid:
- tool = instance.data["tool"]
- # Change render target to local to render locally
- tool.SetData("openpype.creator_attributes.render_target", "local")
-
- cls.log.info(
- f"Reload the publisher and {tool.Name} "
- "will be set to render locally"
- )
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_filename_has_extension.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_filename_has_extension.py
deleted file mode 100644
index 471c0ca31a..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_filename_has_extension.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-
-import pyblish.api
-from ayon_core.pipeline import PublishValidationError
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
- """Ensure the Saver has an extension in the filename path
-
- This disallows files written as `filename` instead of `filename.frame.ext`.
- Fusion does not always set an extension for your filename when
- changing the file format of the saver.
-
- """
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Filename Has Extension"
- families = ["render", "image"]
- hosts = ["fusion"]
- actions = [SelectInvalidAction]
-
- def process(self, instance):
- invalid = self.get_invalid(instance)
- if invalid:
- raise PublishValidationError("Found Saver without an extension",
- title=self.label)
-
- @classmethod
- def get_invalid(cls, instance):
-
- path = instance.data["expectedFiles"][0]
- fname, ext = os.path.splitext(path)
-
- if not ext:
- tool = instance.data["tool"]
- cls.log.error("%s has no extension specified" % tool.Name)
- return [tool]
-
- return []
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_image_frame.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_image_frame.py
deleted file mode 100644
index 70e5ed9279..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_image_frame.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import pyblish.api
-
-from ayon_core.pipeline import PublishValidationError
-
-
-class ValidateImageFrame(pyblish.api.InstancePlugin):
- """Validates that `image` product type contains only single frame."""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Image Frame"
- families = ["image"]
- hosts = ["fusion"]
-
- def process(self, instance):
- render_start = instance.data["frameStartHandle"]
- render_end = instance.data["frameEndHandle"]
- too_many_frames = (isinstance(instance.data["expectedFiles"], list)
- and len(instance.data["expectedFiles"]) > 1)
-
- if render_end - render_start > 0 or too_many_frames:
- desc = ("Trying to render multiple frames. 'image' product type "
- "is meant for single frame. Please use 'render' creator.")
- raise PublishValidationError(
- title="Frame range outside of comp range",
- message=desc,
- description=desc
- )
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_instance_frame_range.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_instance_frame_range.py
deleted file mode 100644
index 0f7ef1862d..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_instance_frame_range.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import pyblish.api
-
-from ayon_core.pipeline import PublishValidationError
-
-
-class ValidateInstanceFrameRange(pyblish.api.InstancePlugin):
- """Validate instance frame range is within comp's global render range."""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Frame Range"
- families = ["render", "image"]
- hosts = ["fusion"]
-
- def process(self, instance):
-
- context = instance.context
- global_start = context.data["compFrameStart"]
- global_end = context.data["compFrameEnd"]
-
- render_start = instance.data["frameStartHandle"]
- render_end = instance.data["frameEndHandle"]
-
- if render_start < global_start or render_end > global_end:
-
- message = (
- f"Instance {instance} render frame range "
- f"({render_start}-{render_end}) is outside of the comp's "
- f"global render range ({global_start}-{global_end}) and thus "
- f"can't be rendered. "
- )
- description = (
- f"{message}\n\n"
- f"Either update the comp's global range or the instance's "
- f"frame range to ensure the comp's frame range includes the "
- f"to render frame range for the instance."
- )
- raise PublishValidationError(
- title="Frame range outside of comp range",
- message=message,
- description=description
- )
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_instance_in_context.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_instance_in_context.py
deleted file mode 100644
index 7b8b70b2fb..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_instance_in_context.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Validate if instance context is the same as publish context."""
-
-import pyblish.api
-from ayon_fusion.api.action import SelectToolAction
-from ayon_core.pipeline.publish import (
- RepairAction,
- ValidateContentsOrder,
- PublishValidationError,
- OptionalPyblishPluginMixin
-)
-
-
-class ValidateInstanceInContextFusion(pyblish.api.InstancePlugin,
- OptionalPyblishPluginMixin):
- """Validator to check if instance context matches context of publish.
-
- When working in per-shot style you always publish data in context of
- current asset (shot). This validator checks if this is so. It is optional
- so it can be disabled when needed.
- """
- # Similar to maya and houdini-equivalent `ValidateInstanceInContext`
-
- order = ValidateContentsOrder
- label = "Instance in same Context"
- optional = True
- hosts = ["fusion"]
- actions = [SelectToolAction, RepairAction]
-
- def process(self, instance):
- if not self.is_active(instance.data):
- return
-
- instance_context = self.get_context(instance.data)
- context = self.get_context(instance.context.data)
- if instance_context != context:
- context_label = "{} > {}".format(*context)
- instance_label = "{} > {}".format(*instance_context)
-
- raise PublishValidationError(
- message=(
- "Instance '{}' publishes to different asset than current "
- "context: {}. Current context: {}".format(
- instance.name, instance_label, context_label
- )
- ),
- description=(
- "## Publishing to a different asset\n"
- "There are publish instances present which are publishing "
- "into a different asset than your current context.\n\n"
- "Usually this is not what you want but there can be cases "
- "where you might want to publish into another asset or "
- "shot. If that's the case you can disable the validation "
- "on the instance to ignore it."
- )
- )
-
- @classmethod
- def repair(cls, instance):
-
- create_context = instance.context.data["create_context"]
- instance_id = instance.data.get("instance_id")
- created_instance = create_context.get_instance_by_id(
- instance_id
- )
- if created_instance is None:
- raise RuntimeError(
- f"No CreatedInstances found with id '{instance_id} "
- f"in {create_context.instances_by_id}"
- )
-
- context_asset, context_task = cls.get_context(instance.context.data)
- created_instance["folderPath"] = context_asset
- created_instance["task"] = context_task
- create_context.save_changes()
-
- @staticmethod
- def get_context(data):
- """Return asset, task from publishing context data"""
- return data["folderPath"], data["task"]
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_has_input.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_has_input.py
deleted file mode 100644
index de2cd1d862..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_has_input.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import pyblish.api
-from ayon_core.pipeline import PublishValidationError
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateSaverHasInput(pyblish.api.InstancePlugin):
- """Validate saver has incoming connection
-
- This ensures a Saver has at least an input connection.
-
- """
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Saver Has Input"
- families = ["render", "image"]
- hosts = ["fusion"]
- actions = [SelectInvalidAction]
-
- @classmethod
- def get_invalid(cls, instance):
-
- saver = instance.data["tool"]
- if not saver.Input.GetConnectedOutput():
- return [saver]
-
- return []
-
- def process(self, instance):
- invalid = self.get_invalid(instance)
- if invalid:
- saver_name = invalid[0].Name
- raise PublishValidationError(
- "Saver has no incoming connection: {} ({})".format(instance,
- saver_name),
- title=self.label)
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_passthrough.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_passthrough.py
deleted file mode 100644
index caa17168bc..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_passthrough.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import pyblish.api
-from ayon_core.pipeline import PublishValidationError
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
- """Validate saver passthrough is similar to Pyblish publish state"""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Saver Passthrough"
- families = ["render", "image"]
- hosts = ["fusion"]
- actions = [SelectInvalidAction]
-
- def process(self, context):
-
- # Workaround for ContextPlugin always running, even if no instance
- # is present with the family
- instances = pyblish.api.instances_by_plugin(instances=list(context),
- plugin=self)
- if not instances:
- self.log.debug("Ignoring plugin.. (bugfix)")
-
- invalid_instances = []
- for instance in instances:
- invalid = self.is_invalid(instance)
- if invalid:
- invalid_instances.append(instance)
-
- if invalid_instances:
- self.log.info("Reset pyblish to collect your current scene state, "
- "that should fix error.")
- raise PublishValidationError(
- "Invalid instances: {0}".format(invalid_instances),
- title=self.label)
-
- def is_invalid(self, instance):
-
- saver = instance.data["tool"]
- attr = saver.GetAttrs()
- active = not attr["TOOLB_PassThrough"]
-
- if active != instance.data.get("publish", True):
- self.log.info("Saver has different passthrough state than "
- "Pyblish: {} ({})".format(instance, saver.Name))
- return [saver]
-
- return []
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_resolution.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_resolution.py
deleted file mode 100644
index 15d96a9afc..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_saver_resolution.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import pyblish.api
-from ayon_core.pipeline import (
- PublishValidationError,
- OptionalPyblishPluginMixin,
-)
-
-from ayon_fusion.api.action import SelectInvalidAction
-from ayon_fusion.api import comp_lock_and_undo_chunk
-
-
-class ValidateSaverResolution(
- pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
-):
- """Validate that the saver input resolution matches the folder resolution"""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Folder Resolution"
- families = ["render", "image"]
- hosts = ["fusion"]
- optional = True
- actions = [SelectInvalidAction]
-
- def process(self, instance):
- if not self.is_active(instance.data):
- return
-
- resolution = self.get_resolution(instance)
- expected_resolution = self.get_expected_resolution(instance)
- if resolution != expected_resolution:
- raise PublishValidationError(
- "The input's resolution does not match "
- "the folder's resolution {}x{}.\n\n"
- "The input's resolution is {}x{}.".format(
- expected_resolution[0], expected_resolution[1],
- resolution[0], resolution[1]
- )
- )
-
- @classmethod
- def get_invalid(cls, instance):
- saver = instance.data["tool"]
- try:
- resolution = cls.get_resolution(instance)
- except PublishValidationError:
- resolution = None
- expected_resolution = cls.get_expected_resolution(instance)
- if resolution != expected_resolution:
- return [saver]
-
- @classmethod
- def get_resolution(cls, instance):
- saver = instance.data["tool"]
- first_frame = instance.data["frameStartHandle"]
- return cls.get_tool_resolution(saver, frame=first_frame)
-
- @classmethod
- def get_expected_resolution(cls, instance):
- attributes = instance.data["folderEntity"]["attrib"]
- return attributes["resolutionWidth"], attributes["resolutionHeight"]
-
- @classmethod
- def get_tool_resolution(cls, tool, frame):
- """Return the 2D input resolution to a Fusion tool
-
- If the current tool hasn't been rendered its input resolution
- hasn't been saved. To combat this, add an expression in
- the comments field to read the resolution
-
- Args
- tool (Fusion Tool): The tool to query input resolution
- frame (int): The frame to query the resolution on.
-
- Returns:
- tuple: width, height as 2-tuple of integers
-
- """
- comp = tool.Composition
-
- # False undo removes the undo-stack from the undo list
- with comp_lock_and_undo_chunk(comp, "Read resolution", False):
- # Save old comment
- old_comment = ""
- has_expression = False
-
- if tool["Comments"][frame] not in ["", None]:
- if tool["Comments"].GetExpression() is not None:
- has_expression = True
- old_comment = tool["Comments"].GetExpression()
- tool["Comments"].SetExpression(None)
- else:
- old_comment = tool["Comments"][frame]
- tool["Comments"][frame] = ""
- # Get input width
- tool["Comments"].SetExpression("self.Input.OriginalWidth")
- if tool["Comments"][frame] is None:
- raise PublishValidationError(
- "Cannot get resolution info for frame '{}'.\n\n "
- "Please check that saver has connected input.".format(
- frame
- )
- )
-
- width = int(tool["Comments"][frame])
-
- # Get input height
- tool["Comments"].SetExpression("self.Input.OriginalHeight")
- height = int(tool["Comments"][frame])
-
- # Reset old comment
- tool["Comments"].SetExpression(None)
- if has_expression:
- tool["Comments"].SetExpression(old_comment)
- else:
- tool["Comments"][frame] = old_comment
-
- return width, height
diff --git a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_unique_subsets.py b/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_unique_subsets.py
deleted file mode 100644
index dd7df54da5..0000000000
--- a/server_addon/fusion/client/ayon_fusion/plugins/publish/validate_unique_subsets.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from collections import defaultdict
-
-import pyblish.api
-from ayon_core.pipeline import PublishValidationError
-
-from ayon_fusion.api.action import SelectInvalidAction
-
-
-class ValidateUniqueSubsets(pyblish.api.ContextPlugin):
- """Ensure all instances have a unique product name"""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Unique Products"
- families = ["render", "image"]
- hosts = ["fusion"]
- actions = [SelectInvalidAction]
-
- @classmethod
- def get_invalid(cls, context):
-
- # Collect instances per product per folder
- instances_per_product_folder = defaultdict(lambda: defaultdict(list))
- for instance in context:
- folder_path = instance.data["folderPath"]
- product_name = instance.data["productName"]
- instances_per_product_folder[folder_path][product_name].append(
- instance
- )
-
- # Find which folder + subset combination has more than one instance
- # Those are considered invalid because they'd integrate to the same
- # destination.
- invalid = []
- for folder_path, instances_per_product in (
- instances_per_product_folder.items()
- ):
- for product_name, instances in instances_per_product.items():
- if len(instances) > 1:
- cls.log.warning(
- (
- "{folder_path} > {product_name} used by more than "
- "one instance: {instances}"
- ).format(
- folder_path=folder_path,
- product_name=product_name,
- instances=instances
- )
- )
- invalid.extend(instances)
-
- # Return tools for the invalid instances so they can be selected
- invalid = [instance.data["tool"] for instance in invalid]
-
- return invalid
-
- def process(self, context):
- invalid = self.get_invalid(context)
- if invalid:
- raise PublishValidationError(
- "Multiple instances are set to the same folder > product.",
- title=self.label
- )
diff --git a/server_addon/fusion/client/ayon_fusion/scripts/__init__.py b/server_addon/fusion/client/ayon_fusion/scripts/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/server_addon/fusion/client/ayon_fusion/scripts/duplicate_with_inputs.py b/server_addon/fusion/client/ayon_fusion/scripts/duplicate_with_inputs.py
deleted file mode 100644
index 78edb1b3ba..0000000000
--- a/server_addon/fusion/client/ayon_fusion/scripts/duplicate_with_inputs.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from ayon_fusion.api import (
- comp_lock_and_undo_chunk,
- get_current_comp
-)
-
-
-def is_connected(input):
- """Return whether an input has incoming connection"""
- return input.GetAttrs()["INPB_Connected"]
-
-
-def duplicate_with_input_connections():
- """Duplicate selected tools with incoming connections."""
-
- comp = get_current_comp()
- original_tools = comp.GetToolList(True).values()
- if not original_tools:
- return # nothing selected
-
- with comp_lock_and_undo_chunk(
- comp, "Duplicate With Input Connections"):
-
- # Generate duplicates
- comp.Copy()
- comp.SetActiveTool()
- comp.Paste()
- duplicate_tools = comp.GetToolList(True).values()
-
- # Copy connections
- for original, new in zip(original_tools, duplicate_tools):
-
- original_inputs = original.GetInputList().values()
- new_inputs = new.GetInputList().values()
- assert len(original_inputs) == len(new_inputs)
-
- for original_input, new_input in zip(original_inputs, new_inputs):
-
- if is_connected(original_input):
-
- if is_connected(new_input):
- # Already connected if it is between the copied tools
- continue
-
- new_input.ConnectTo(original_input.GetConnectedOutput())
- assert is_connected(new_input), "Must be connected now"
diff --git a/server_addon/fusion/client/ayon_fusion/vendor/attr/__init__.py b/server_addon/fusion/client/ayon_fusion/vendor/attr/__init__.py
deleted file mode 100644
index b1ce7fe248..0000000000
--- a/server_addon/fusion/client/ayon_fusion/vendor/attr/__init__.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-from functools import partial
-
-from . import converters, exceptions, filters, setters, validators
-from ._cmp import cmp_using
-from ._config import get_run_validators, set_run_validators
-from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
-from ._make import (
- NOTHING,
- Attribute,
- Factory,
- attrib,
- attrs,
- fields,
- fields_dict,
- make_class,
- validate,
-)
-from ._version_info import VersionInfo
-
-
-__version__ = "21.2.0"
-__version_info__ = VersionInfo._from_version_string(__version__)
-
-__title__ = "attrs"
-__description__ = "Classes Without Boilerplate"
-__url__ = "https://www.attrs.org/"
-__uri__ = __url__
-__doc__ = __description__ + " <" + __uri__ + ">"
-
-__author__ = "Hynek Schlawack"
-__email__ = "hs@ox.cx"
-
-__license__ = "MIT"
-__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
-
-
-s = attributes = attrs
-ib = attr = attrib
-dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
-
-__all__ = [
- "Attribute",
- "Factory",
- "NOTHING",
- "asdict",
- "assoc",
- "astuple",
- "attr",
- "attrib",
- "attributes",
- "attrs",
- "cmp_using",
- "converters",
- "evolve",
- "exceptions",
- "fields",
- "fields_dict",
- "filters",
- "get_run_validators",
- "has",
- "ib",
- "make_class",
- "resolve_types",
- "s",
- "set_run_validators",
- "setters",
- "validate",
- "validators",
-]
-
-if sys.version_info[:2] >= (3, 6):
- from ._next_gen import define, field, frozen, mutable
-
- __all__.extend((define, field, frozen, mutable))
diff --git a/server_addon/fusion/client/ayon_fusion/vendor/attr/__init__.pyi b/server_addon/fusion/client/ayon_fusion/vendor/attr/__init__.pyi
deleted file mode 100644
index 3503b073b4..0000000000
--- a/server_addon/fusion/client/ayon_fusion/vendor/attr/__init__.pyi
+++ /dev/null
@@ -1,475 +0,0 @@
-import sys
-
-from typing import (
- Any,
- Callable,
- Dict,
- Generic,
- List,
- Mapping,
- Optional,
- Sequence,
- Tuple,
- Type,
- TypeVar,
- Union,
- overload,
-)
-
-# `import X as X` is required to make these public
-from . import converters as converters
-from . import exceptions as exceptions
-from . import filters as filters
-from . import setters as setters
-from . import validators as validators
-from ._version_info import VersionInfo
-
-
-__version__: str
-__version_info__: VersionInfo
-__title__: str
-__description__: str
-__url__: str
-__uri__: str
-__author__: str
-__email__: str
-__license__: str
-__copyright__: str
-
-_T = TypeVar("_T")
-_C = TypeVar("_C", bound=type)
-
-_EqOrderType = Union[bool, Callable[[Any], Any]]
-_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
-_ConverterType = Callable[[Any], Any]
-_FilterType = Callable[[Attribute[_T], _T], bool]
-_ReprType = Callable[[Any], str]
-_ReprArgType = Union[bool, _ReprType]
-_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
-_OnSetAttrArgType = Union[
- _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
-]
-_FieldTransformer = Callable[[type, List[Attribute[Any]]], List[Attribute[Any]]]
-# FIXME: in reality, if multiple validators are passed they must be in a list
-# or tuple, but those are invariant and so would prevent subtypes of
-# _ValidatorType from working when passed in a list or tuple.
-_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
-
-# _make --
-
-NOTHING: object
-
-# NOTE: Factory lies about its return type to make this possible:
-# `x: List[int] # = Factory(list)`
-# Work around mypy issue #4554 in the common case by using an overload.
-if sys.version_info >= (3, 8):
- from typing import Literal
-
- @overload
- def Factory(factory: Callable[[], _T]) -> _T: ...
- @overload
- def Factory(
- factory: Callable[[Any], _T],
- takes_self: Literal[True],
- ) -> _T: ...
- @overload
- def Factory(
- factory: Callable[[], _T],
- takes_self: Literal[False],
- ) -> _T: ...
-else:
- @overload
- def Factory(factory: Callable[[], _T]) -> _T: ...
- @overload
- def Factory(
- factory: Union[Callable[[Any], _T], Callable[[], _T]],
- takes_self: bool = ...,
- ) -> _T: ...
-
-# Static type inference support via __dataclass_transform__ implemented as per:
-# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
-# This annotation must be applied to all overloads of "define" and "attrs"
-#
-# NOTE: This is a typing construct and does not exist at runtime. Extensions
-# wrapping attrs decorators should declare a separate __dataclass_transform__
-# signature in the extension module using the specification linked above to
-# provide pyright support.
-def __dataclass_transform__(
- *,
- eq_default: bool = True,
- order_default: bool = False,
- kw_only_default: bool = False,
- field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
-) -> Callable[[_T], _T]: ...
-
-class Attribute(Generic[_T]):
- name: str
- default: Optional[_T]
- validator: Optional[_ValidatorType[_T]]
- repr: _ReprArgType
- cmp: _EqOrderType
- eq: _EqOrderType
- order: _EqOrderType
- hash: Optional[bool]
- init: bool
- converter: Optional[_ConverterType]
- metadata: Dict[Any, Any]
- type: Optional[Type[_T]]
- kw_only: bool
- on_setattr: _OnSetAttrType
-
- def evolve(self, **changes: Any) -> "Attribute[Any]": ...
-
-# NOTE: We had several choices for the annotation to use for type arg:
-# 1) Type[_T]
-# - Pros: Handles simple cases correctly
-# - Cons: Might produce less informative errors in the case of conflicting
-# TypeVars e.g. `attr.ib(default='bad', type=int)`
-# 2) Callable[..., _T]
-# - Pros: Better error messages than #1 for conflicting TypeVars
-# - Cons: Terrible error messages for validator checks.
-# e.g. attr.ib(type=int, validator=validate_str)
-# -> error: Cannot infer function type argument
-# 3) type (and do all of the work in the mypy plugin)
-# - Pros: Simple here, and we could customize the plugin with our own errors.
-# - Cons: Would need to write mypy plugin code to handle all the cases.
-# We chose option #1.
-
-# `attr` lies about its return type to make the following possible:
-# attr() -> Any
-# attr(8) -> int
-# attr(validator=Click to expand for details on scene save.
-
- Because Openpype tools does not deal well with folders for a single entity like a Harmony scene, this integration has implemented to use zip files to encapsulate the Harmony scene folders. Saving scene in Harmony via menu or CTRL+S will not result in producing zip file, only saving it from Workfiles will. This is because
- zipping process can take some time in which we cannot block user from saving again. If xstage file is changed during zipping process it will produce corrupted zip
- archive.
-
"
- invalid_setting_str = "Found invalid settings:
{}".\
- format(break_str.join(invalid_settings))
-
- formatting_data = {
- "invalid_setting_str": invalid_setting_str,
- "invalid_keys_str": invalid_keys_str
- }
- raise PublishXmlValidationError(self, msg,
- formatting_data=formatting_data)
-
- scene_url = instance.context.data.get("scenePath")
- if not os.path.exists(scene_url):
- msg = "Scene file {} not found (saved under wrong name)".format(
- scene_url
- )
- formatting_data = {
- "scene_url": scene_url
- }
- raise PublishXmlValidationError(self, msg, key="file_not_found",
- formatting_data=formatting_data)
-
-
-def _update_frames(expected_settings):
- """
- Calculate proper frame range including handles set in DB.
-
- Harmony requires rendering from 1, so frame range is always moved
- to 1.
- Args:
- expected_settings (dict): pulled from DB
-
- Returns:
- modified expected_setting (dict)
- """
- frames_count = expected_settings["frameEnd"] - \
- expected_settings["frameStart"] + 1
-
- expected_settings["frameStart"] = 1.0 + expected_settings["handleStart"]
- expected_settings["frameEnd"] = \
- expected_settings["frameStart"] + frames_count - 1
- return expected_settings
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/.eslintrc.json b/server_addon/harmony/client/ayon_harmony/vendor/.eslintrc.json
deleted file mode 100644
index 3aafb2b905..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/.eslintrc.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "env": {
- "browser": true
- },
- "extends": "eslint:recommended",
- "ignorePatterns": ["**/*.js"]
-}
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/.gitattributes b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/.gitattributes
deleted file mode 100644
index a9083715c5..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/.gitattributes
+++ /dev/null
@@ -1,36 +0,0 @@
-$.html merge=ours
-$.oAttribute.html merge=ours
-$.oBackdrop.html merge=ours
-$.oBox.html merge=ours
-$.oColor.html merge=ours
-$.oColorValue.html merge=ours
-$.oColumn.html merge=ours
-$.oDialog.html merge=ours
-$.oDialog.Progress.html merge=ours
-$.oDrawing.html merge=ours
-$.oDrawingColumn.html merge=ours
-$.oDrawingNode.html merge=ours
-$.oElement.html merge=ours
-$.oFile.html merge=ours
-$.oFolder.html merge=ours
-$.oFrame.html merge=ours
-$.oGroupNode.html merge=ours
-$.oList.html merge=ours
-$.oNetwork.html merge=ours
-$.oNode.html merge=ours
-$.oNodeLink.html merge=ours
-$.oPalette.html merge=ours
-$.oPathPoint.html merge=ours
-$.oPegNode.html merge=ours
-$.oPoint.html merge=ours
-$.oScene.html merge=ours
-$.oThread.html merge=ours
-$.oTimeline.html merge=ours
-$.oTimelineLayer.html merge=ours
-$.oUtils.html merge=ours
-$.index.html merge=ours
-$.global.html merge=ours
-$.oDatabase.html merge=ours
-$.oProgressDialog.html merge=ours
-$.oProcess.html merge=ours
-NodeTypes.html merge=ours
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/.gitignore b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/.gitignore
deleted file mode 100644
index 23ed1735fe..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-node_modules/*
-
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/Install.bat b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/Install.bat
deleted file mode 100644
index ee632a0fbf..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/Install.bat
+++ /dev/null
@@ -1,40 +0,0 @@
-@echo off
-SETLOCAL ENABLEDELAYEDEXPANSION
-SET dlPath=%~dp0
-set harmonyPrefsDir=%appdata%\Toon Boom Animation
-
-SETX LIB_OPENHARMONY_PATH %dlPath%
-
-echo -------------------------------------------------------------------
-echo -- Starting install of openHarmony open source scripting library --
-echo -------------------------------------------------------------------
-echo OpenHarmony will be installed to the folder :
-echo %dlpath%
-echo Do not delete the contents of this folder.
-
-REM Check Harmony Versions and make a list
-for /d %%D in ("%harmonyPrefsDir%\*Harmony*") do (
- set harmonyVersionDir=%%~fD
- for /d %%V in ("!harmonyVersionDir!\*-layouts*") do (
- set "folderName=%%~nD"
- set "versionName=%%~nV"
- set "harmonyFolder=!folderName:~-7!"
- set "harmonyVersions=!versionName:~0,2!"
- echo Found Toonboom Harmony !harmonyFolder! !harmonyVersions! - installing openHarmony for this version.
- set "installDir=!harmonyPrefsDir!\Toon Boom Harmony !harmonyFolder!\!harmonyVersions!00-scripts\"
-
- if not "!installDir!" == "!dlPath!" (
- REM creating a "openHarmony.js" file in script folders
- if not exist "!installDir!" mkdir "!installDir!"
-
- cd !installDir!
-
- set "script=include(System.getenv('LIB_OPENHARMONY_PATH')+'openHarmony.js');"
- echo !script!> openHarmony.js
- )
- echo ---- done. ----
- )
-)
-
-echo - Install Complete -
-pause
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/LICENSE b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/LICENSE
deleted file mode 100644
index a612ad9813..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/LICENSE
+++ /dev/null
@@ -1,373 +0,0 @@
-Mozilla Public License Version 2.0
-==================================
-
-1. Definitions
---------------
-
-1.1. "Contributor"
- means each individual or legal entity that creates, contributes to
- the creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
- means the combination of the Contributions of others (if any) used
- by a Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
- means Source Code Form to which the initial Contributor has attached
- the notice in Exhibit A, the Executable Form of such Source Code
- Form, and Modifications of such Source Code Form, in each case
- including portions thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- (a) that the initial Contributor has attached the notice described
- in Exhibit B to the Covered Software; or
-
- (b) that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the
- terms of a Secondary License.
-
-1.6. "Executable Form"
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
- means a work that combines Covered Software with other material, in
- a separate file or files, that is not Covered Software.
-
-1.8. "License"
- means this document.
-
-1.9. "Licensable"
- means having the right to grant, to the maximum extent possible,
- whether at the time of the initial grant or subsequently, any and
- all of the rights conveyed by this License.
-
-1.10. "Modifications"
- means any of the following:
-
- (a) any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered
- Software; or
-
- (b) any new file in Source Code Form that contains any Covered
- Software.
-
-1.11. "Patent Claims" of a Contributor
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the
- License, by the making, using, selling, offering for sale, having
- made, import, or transfer of either its Contributions or its
- Contributor Version.
-
-1.12. "Secondary License"
- means either the GNU General Public License, Version 2.0, the GNU
- Lesser General Public License, Version 2.1, the GNU Affero General
- Public License, Version 3.0, or any later versions of those
- licenses.
-
-1.13. "Source Code Form"
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that
- controls, is controlled by, or is under common control with You. For
- purposes of this definition, "control" means (a) the power, direct
- or indirect, to cause the direction or management of such entity,
- whether by contract or otherwise, or (b) ownership of more than
- fifty percent (50%) of the outstanding shares or beneficial
- ownership of such entity.
-
-2. License Grants and Conditions
---------------------------------
-
-2.1. Grants
-
-Each Contributor hereby grants You a world-wide, royalty-free,
-non-exclusive license:
-
-(a) under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
-(b) under Patent Claims of such Contributor to make, use, sell, offer
- for sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
-The licenses granted in Section 2.1 with respect to any Contribution
-become effective for each Contribution on the date the Contributor first
-distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
-The licenses granted in this Section 2 are the only rights granted under
-this License. No additional rights or licenses will be implied from the
-distribution or licensing of Covered Software under this License.
-Notwithstanding Section 2.1(b) above, no patent license is granted by a
-Contributor:
-
-(a) for any code that a Contributor has removed from Covered Software;
- or
-
-(b) for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
-(c) under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
-This License does not grant any rights in the trademarks, service marks,
-or logos of any Contributor (except as may be necessary to comply with
-the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
-No Contributor makes additional grants as a result of Your choice to
-distribute the Covered Software under a subsequent version of this
-License (see Section 10.2) or under the terms of a Secondary License (if
-permitted under the terms of Section 3.3).
-
-2.5. Representation
-
-Each Contributor represents that the Contributor believes its
-Contributions are its original creation(s) or it has sufficient rights
-to grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
-This License is not intended to limit any rights You have under
-applicable copyright doctrines of fair use, fair dealing, or other
-equivalents.
-
-2.7. Conditions
-
-Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
-in Section 2.1.
-
-3. Responsibilities
--------------------
-
-3.1. Distribution of Source Form
-
-All distribution of Covered Software in Source Code Form, including any
-Modifications that You create or to which You contribute, must be under
-the terms of this License. You must inform recipients that the Source
-Code Form of the Covered Software is governed by the terms of this
-License, and how they can obtain a copy of this License. You may not
-attempt to alter or restrict the recipients' rights in the Source Code
-Form.
-
-3.2. Distribution of Executable Form
-
-If You distribute Covered Software in Executable Form then:
-
-(a) such Covered Software must also be made available in Source Code
- Form, as described in Section 3.1, and You must inform recipients of
- the Executable Form how they can obtain a copy of such Source Code
- Form by reasonable means in a timely manner, at a charge no more
- than the cost of distribution to the recipient; and
-
-(b) You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter
- the recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
-You may create and distribute a Larger Work under terms of Your choice,
-provided that You also comply with the requirements of this License for
-the Covered Software. If the Larger Work is a combination of Covered
-Software with a work governed by one or more Secondary Licenses, and the
-Covered Software is not Incompatible With Secondary Licenses, this
-License permits You to additionally distribute such Covered Software
-under the terms of such Secondary License(s), so that the recipient of
-the Larger Work may, at their option, further distribute the Covered
-Software under the terms of either this License or such Secondary
-License(s).
-
-3.4. Notices
-
-You may not remove or alter the substance of any license notices
-(including copyright notices, patent notices, disclaimers of warranty,
-or limitations of liability) contained within the Source Code Form of
-the Covered Software, except that You may alter any license notices to
-the extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
-You may choose to offer, and to charge a fee for, warranty, support,
-indemnity or liability obligations to one or more recipients of Covered
-Software. However, You may do so only on Your own behalf, and not on
-behalf of any Contributor. You must make it absolutely clear that any
-such warranty, support, indemnity, or liability obligation is offered by
-You alone, and You hereby agree to indemnify every Contributor for any
-liability incurred by such Contributor as a result of warranty, support,
-indemnity or liability terms You offer. You may include additional
-disclaimers of warranty and limitations of liability specific to any
-jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
----------------------------------------------------
-
-If it is impossible for You to comply with any of the terms of this
-License with respect to some or all of the Covered Software due to
-statute, judicial order, or regulation then You must: (a) comply with
-the terms of this License to the maximum extent possible; and (b)
-describe the limitations and the code they affect. Such description must
-be placed in a text file included with all distributions of the Covered
-Software under this License. Except to the extent prohibited by statute
-or regulation, such description must be sufficiently detailed for a
-recipient of ordinary skill to be able to understand it.
-
-5. Termination
---------------
-
-5.1. The rights granted under this License will terminate automatically
-if You fail to comply with any of its terms. However, if You become
-compliant, then the rights granted under this License from a particular
-Contributor are reinstated (a) provisionally, unless and until such
-Contributor explicitly and finally terminates Your grants, and (b) on an
-ongoing basis, if such Contributor fails to notify You of the
-non-compliance by some reasonable means prior to 60 days after You have
-come back into compliance. Moreover, Your grants from a particular
-Contributor are reinstated on an ongoing basis if such Contributor
-notifies You of the non-compliance by some reasonable means, this is the
-first time You have received notice of non-compliance with this License
-from such Contributor, and You become compliant prior to 30 days after
-Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
-infringement claim (excluding declaratory judgment actions,
-counter-claims, and cross-claims) alleging that a Contributor Version
-directly or indirectly infringes any patent, then the rights granted to
-You by any and all Contributors for the Covered Software under Section
-2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all
-end user license agreements (excluding distributors and resellers) which
-have been validly granted by You or Your distributors under this License
-prior to termination shall survive termination.
-
-************************************************************************
-* *
-* 6. Disclaimer of Warranty *
-* ------------------------- *
-* *
-* Covered Software is provided under this License on an "as is" *
-* basis, without warranty of any kind, either expressed, implied, or *
-* statutory, including, without limitation, warranties that the *
-* Covered Software is free of defects, merchantable, fit for a *
-* particular purpose or non-infringing. The entire risk as to the *
-* quality and performance of the Covered Software is with You. *
-* Should any Covered Software prove defective in any respect, You *
-* (not any Contributor) assume the cost of any necessary servicing, *
-* repair, or correction. This disclaimer of warranty constitutes an *
-* essential part of this License. No use of any Covered Software is *
-* authorized under this License except under this disclaimer. *
-* *
-************************************************************************
-
-************************************************************************
-* *
-* 7. Limitation of Liability *
-* -------------------------- *
-* *
-* Under no circumstances and under no legal theory, whether tort *
-* (including negligence), contract, or otherwise, shall any *
-* Contributor, or anyone who distributes Covered Software as *
-* permitted above, be liable to You for any direct, indirect, *
-* special, incidental, or consequential damages of any character *
-* including, without limitation, damages for lost profits, loss of *
-* goodwill, work stoppage, computer failure or malfunction, or any *
-* and all other commercial damages or losses, even if such party *
-* shall have been informed of the possibility of such damages. This *
-* limitation of liability shall not apply to liability for death or *
-* personal injury resulting from such party's negligence to the *
-* extent applicable law prohibits such limitation. Some *
-* jurisdictions do not allow the exclusion or limitation of *
-* incidental or consequential damages, so this exclusion and *
-* limitation may not apply to You. *
-* *
-************************************************************************
-
-8. Litigation
--------------
-
-Any litigation relating to this License may be brought only in the
-courts of a jurisdiction where the defendant maintains its principal
-place of business and such litigation shall be governed by laws of that
-jurisdiction, without reference to its conflict-of-law provisions.
-Nothing in this Section shall prevent a party's ability to bring
-cross-claims or counter-claims.
-
-9. Miscellaneous
-----------------
-
-This License represents the complete agreement concerning the subject
-matter hereof. If any provision of this License is held to be
-unenforceable, such provision shall be reformed only to the extent
-necessary to make it enforceable. Any law or regulation which provides
-that the language of a contract shall be construed against the drafter
-shall not be used to construe this License against a Contributor.
-
-10. Versions of the License
----------------------------
-
-10.1. New Versions
-
-Mozilla Foundation is the license steward. Except as provided in Section
-10.3, no one other than the license steward has the right to modify or
-publish new versions of this License. Each version will be given a
-distinguishing version number.
-
-10.2. Effect of New Versions
-
-You may distribute the Covered Software under the terms of the version
-of the License under which You originally received the Covered Software,
-or under the terms of any subsequent version published by the license
-steward.
-
-10.3. Modified Versions
-
-If you create software not governed by this License, and you want to
-create a new license for such software, you may create and use a
-modified version of this License if you rename the license and remove
-any references to the name of the license steward (except to note that
-such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
-Licenses
-
-If You choose to distribute Source Code Form that is Incompatible With
-Secondary Licenses under the terms of this version of the License, the
-notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
--------------------------------------------
-
- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular
-file, then You may include the notice in a location (such as a LICENSE
-file in a relevant directory) where a recipient would be likely to look
-for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
----------------------------------------------------------
-
- This Source Code Form is "Incompatible With Secondary Licenses", as
- defined by the Mozilla Public License, v. 2.0.
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/README.md b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/README.md
deleted file mode 100644
index 064afca86c..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/README.md
+++ /dev/null
@@ -1,202 +0,0 @@
-# OpenHarmony - The Toonboom Harmony Open Source DOM Library
-
-## Why did we make this library ?
-
-Ever tried to make a simple script for toonboom Harmony, then got stumped by the numerous amount of steps required to execute the simplest action? Or bored of coding the same helper routines again and again for every studio you work for?
-
-Toonboom Harmony is a very powerful software, with hundreds of functions and tools, and it unlocks a great amount of possibilities for animation studios around the globe. And... being the produce of the hard work of a small team forced to prioritise, it can also be a bit rustic at times!
-
-We are users at heart, animators and riggers, who just want to interact with the software as simply as possible. Simplicity is at the heart of the design of openHarmony. But we also are developers, and we made the library for people like us who can't resist tweaking the software and bend it in all possible ways, and are looking for powerful functions to help them do it.
-
-This library's aim is to create a more direct way to interact with Toonboom through scripts, by providing a more intuitive way to access its elements, and help with the cumbersome and repetitive tasks as well as help unlock untapped potential in its many available systems. So we can go from having to do things like this:
-
-```javascript
- // adding a Drawing to the scene with the official API
- var myNodeName = "Drawing";
- var myColumnName = myNodeName;
- var myNode = node.add("Top", myNodeName, "READ",0,0,0);
- var myColumn = column.add(myColumnName, "DRAWING", "BOTTOM");
- var myElement = element.add (myNodeName, "COLOR", 12, "SCAN", "TVG");
- column.setElementIdOfDrawing(myColumnName, myElement);
- node.linkAttr (myNode, "DRAWING.ELEMENT", myColumnName);
- drawing.create (myElement, "1", false, false);
- column.setEntry (myColumnName, 0, 1, "1");
-```
-
-to simply writing :
-
-```javascript
- // with openHarmony
- var myNode = $.scene.root.addDrawingNode("Drawing");
- myNode.element.addDrawing(1);
-```
-
-Less time spent coding, more time spent having ideas!
-
------
-## Do I need any knowledge of toonboom scripting to use openHarmony?
-
-OpenHarmony aims to be self contained and to reimplement all the basic functions of the Harmony API. So, while it might help to have prior experience to understand what goes on under the hood, knowledge of the official API is not required.
-
-However, should you reach the limits of what openHarmony can offer at this time, you can always access the official API at any moment. Maybe you can submit a request and the missing parts will be added eventually, or you can even delve into the code and add the necessary functions yourself if you feel like it!
-
-You can access a list of all the functions, how to use them, as well as examples, from the online documentation:
-
-[https://cfourney.github.io/OpenHarmony/$.html](https://cfourney.github.io/OpenHarmony/$.html)
-
-To help you get started, here is a full example using the library to make and animate a small car, covering most of the basic features.
-
-[https://github.com/cfourney/OpenHarmony/blob/master/examples/openHarmonyExample.js](https://github.com/cfourney/OpenHarmony/blob/master/examples/openHarmonyExample.js)
-
------
-## The OpenHarmony Document Object Model or DOM
-
-OpenHarmony is based around the four principles of Object Oriented Programming: *Abstraction*, *Encapsulation*, *Inheritance*, *Polymorphism*.
-
-This means every element of the Harmony scene has a corresponding abstraction existing in the code as a class. We have oNode, oScene, oColumn, etc. Unlike in the official API, each class is designed to create objects that are instances of these classes and encapsulate them and all their actions. It means no more storing the path of nodes, column abstract names and element ids to interact with them; if you can create or call it, you can access all of its functionalities. Nodes are declined as DrawingNodes and PegNodes, which inherint from the Node Class, and so on.
-
-The openHarmony library doesn't merely provide *access* to the elements of a Toonboom Harmony file, it *models* them and their relationship to each others.
-
-
-
-The *Document Object Model* is a way to organise the elements of the Toonboom scene by highlighting the way they interact with each other. The Scene object has a root group, which contains Nodes, which have Attributes which can be linked to Columns which contain Frames, etc. This way it's always easy to find and access the content you are looking for. The attribute system has also been streamlined and you can now set values of node properties with a simple attribution synthax.
-
-We implemented a global access to all elements and functions through the standard **dot notation** for the hierarchy, for ease of use, and clarity of code.
-
-Functions and methods also make extensive use of **optional parameters** so no more need to fill in all arguments when calling functions when the default behavior is all that's needed.
-
-On the other hand, the "o" naming scheme allows us to retain full access to the official API at all times. This means you can use it only when it really makes your life better.
-
------
-## Adopting openHarmony for your project
-
-This library is made available under the [Mozilla Public license 2.0](https://www.mozilla.org/en-US/MPL/2.0/).
-
-OpenHarmony can be downloaded from [this repository](https://github.com/cfourney/OpenHarmony/releases/) directly. In order to make use of its functions, it needs to be unzipped next to the scripts you will be writing.
-
-All you have to do is call :
-```javascript
-include("openHarmony.js");
-```
-at the beginning of your script.
-
-You can ask your users to download their copy of the library and store it alongside, or bundle it as you wish as long as you include the license file provided on this repository.
-
-The entire library is documented at the address :
-
-https://cfourney.github.io/OpenHarmony/$.html
-
-This include a list of all the available functions as well as examples and references (such as the list of all available node attributes).
-
-As time goes by, more functions will be added and the documentation will also get richer as more examples get created.
-
------
-## Installation
-
-#### simple install:
-- download the zip from [the releases page](https://github.com/cfourney/OpenHarmony/releases/),
-- unzip the contents to [your scripts folder](https://docs.toonboom.com/help/harmony-17/advanced/scripting/import-script.html).
-
-#### advanced install (for developers):
-- clone the repository to the location of your choice
-
- -- or --
-
-- download the zip from [the releases page](https://github.com/cfourney/OpenHarmony/releases/)
-- unzip the contents where you want to store the library,
-
- -- then --
-
-- run `install.bat`.
-
-This last step will tell Harmony where to look to load the library, by setting the environment variable `LIB_OPENHARMONY_PATH` to the current folder.
-
-It will then create a `openHarmony.js` file into the user scripts folder which calls the files from the folder from the `LIB_OPENHARMONY_PATH` variable, so that scripts can make direct use of it without having to worry about where openHarmony is stored.
-
-##### Troubleshooting:
-- to test if the library is correctly installed, open the `Script Editor` window and type:
-```javascript
-include ("openHarmony.js");
-$.alert("hello world");
-```
-Run the script, and if there is an error (for ex `MAX_REENTRENCY `), check that the file `openHarmony.js` exists in the script folder, and contains only the line:
-```javascript
-include(System.getenv('LIB_OPENHARMONY_PATH')+'openHarmony.js');
-```
-Check that the environment variable `LIB_OPENHARMONY_PATH` is set correctly to the remote folder.
-
------
-## How to add openHarmony to vscode intellisense for autocompletion
-
-Although not fully supported, you can get most of the autocompletion features to work by adding the following lines to a `jsconfig.json` file placed at the root of your working folder.
-The paths need to be relative which means the openHarmony source code must be placed directly in your developping environment.
-
-For example, if your working folder contains the openHarmony source in a folder called `OpenHarmony` and your working scripts in a folder called `myScripts`, place the `jsconfig.json` file at the root of the folder and add these lines to the file:
-
-```javascript
-{
- include : [
- "OpenHarmony/*",
- "OpenHarmony/openHarmony/*",
- "myScripts/*",
- "*"
- ]
-}
-```
-
-[More information on vs code and jsconfig.json.](https://code.visualstudio.com/docs/nodejs/working-with-javascript)
-
------
-## Let's get technical. I can code, and want to contribute, where do I start?
-
-Reading and understanding the existing code, or at least the structure of the lib, is a great start, but not a requirement. You can simply start adding your classes to the $ object that is the root of the harmony lib, and start implementing. However, try to follow these guidelines as they are the underlying principles that make the library consistent:
-
- * There is a $ global object, which contains all the class declarations, and can be passed from one context to another to access the functions.
-
- * Each class is an abstract representation of a core concept of Harmony, so naming and consistency (within the lib) is essential. But we are not bound by the structure or naming of Harmony if we find a better way, for example to make nomenclatures more consistent between the scripting interface and the UI.
-
- * Each class defines a bunch of class properties with getter/setters for the values that are directly related to an entity of the scene. If you're thinking of making a getter function that doesn't require arguments, use a getter setter instead!
-
- * Each class also defines methods which can be called on the class instances to affect its contents, or its children's contents. For example, you'd go to the scene class to add the things that live in the scene, such as elements, columns and palettes. You wouldn't go to the column class or palette class to add one, because then what are you adding it *to*?
-
- * We use encapsulation over having to pass a function arguments every time we can. Instead of adding a node to the scene, and having to pass a group as argument, adding a node is done directly by calling a method of the parent group. This way the parent/child relationship is always clear and the arguments list kept to a minimum.
-
- * The goal is to make the most useful set of functions we can. Instead of making a large function that does a lot, consider extracting the small useful subroutines you need in your function into the existing classes directly.
-
- * Each method argument besides the core one (for example, for adding nodes, we have to specify the type of the new node we create) must have a default fallback to make the argument optional.
-
- * Don't use globals ever, but maybe use a class property if you need an enum for example.
-
- * Don't use the official API namespace, any function that exists in the official API must remain accessible otherwise things will break. Prefix your class names with "o" to avoid this and to signify this function is part of openHarmony.
-
- * We use the official API as little as we can in the code, so that if the implementation changes, we can easily fix it in a minimal amount of places. Wrap it, then use the wrapper. (ex: oScene.name)
-
- * Users of the lib should almost never have to use "new" to create instances of their classes. Create accessors/factories that will do that for them. For example, $.scn creates and return a oScene instance, and $.scn.nodes returns new oNodes instances, but users don't have to create them themselves, so it's like they were always there, contained within. It also lets you create different subclasses for one factory. For example, $.scn.$node("Top/myNode") will either return a oNode, oDrawingNode, oPegNode or oGroupNode object depending on the node type of the node represented by the object.
- Exceptions are small useful value containing objects that don't belong to the Harmony hierarchy like oPoint, oBox, oColorValue, etc.
-
- * It's a JS Library, so use camelCase naming and try to follow the google style guide for JS :
- https://google.github.io/styleguide/jsguide.html
-
- * Document your new functions using the JSDocs synthax : https://devdocs.io/jsdoc/howto-es2015-classes
-
- * Make a branch, create a merge request when you're done, and we'll add the new stuff you added to the lib :)
-
-
------
-## Credits
-
-This library was created by Mathieu Chaptel and Chris Fourney.
-
-If you're using openHarmony, and are noticing things that you would like to see in the library, please feel free to contribute to the code directly, or send us feedback through Github. This project will only be as good as people working together can make it, and we need every piece of code and feedback we can get, and would love to hear from you!
-
------
-## Community
-
-Join the discord community for help with the library and to contribute:
-https://discord.gg/kgT38MG
-
------
-## Acknowledgements
- * [Yu Ueda](https://github.com/yueda1984) for his help to understand Harmony coordinate systems
- * [Dash](https://github.com/35743) for his help to debug, test and develop the Pie Menus widgets
- * [All the contributors](https://github.com/cfourney/OpenHarmony/graphs/contributors) for their precious help.
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/build_doc.bat b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/build_doc.bat
deleted file mode 100644
index 57a6161e95..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/build_doc.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-jsdoc -c ./documentation.json -t ../node_modules/jaguarjs-jsdoc
-pause
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/documentation.json b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/documentation.json
deleted file mode 100644
index 1b3c2b9ee7..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/documentation.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "plugins": [],
- "recurseDepth": 10,
- "source": {
- "include": ["."],
- "includePattern": ".+\\.js(doc|x)?$",
- "exclude": [ "./openHarmony_tools.js" ]
- },
- "sourceType": "module",
- "tags": {
- "allowUnknownTags": true,
- "dictionaries": ["jsdoc","closure"]
- },
- "templates": {
- "cleverLinks": false,
- "monospaceLinks": false
- },
- "opts": {
- "encoding": "utf8",
- "destination": "./docs/",
- "recurse": true
- }
-}
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/install.sh b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/install.sh
deleted file mode 100644
index 7d311d84f3..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/install.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-
-set dlPath=pwd
-set harmonyPrefsDir=~/Library/Preferences/Toon Boom Animation/
-
-echo -------------------------------------------------------------------
-echo -- Starting install of openHarmony open source scripting library --
-echo -------------------------------------------------------------------
-echo OpenHarmony will be installed to the folder :
-echo $dlpath
-echo Do not delete the contents of this folder.
-
-REM Check Harmony Versions and make a list
-for /d %%D in ("%harmonyPrefsDir%\*Harmony*") do (
- set harmonyVersionDir=%%~fD
- for /d %%V in ("!harmonyVersionDir!\*-layouts*") do (
- set "folderName=%%~nD"
- set "versionName=%%~nV"
- set "harmonyFolder=!folderName:~-7!"
- set "harmonyVersions=!versionName:~0,2!"
- echo Found Toonboom Harmony !harmonyFolder! !harmonyVersions! - installing openHarmony for this version.
- set "installDir=!harmonyPrefsDir!\Toon Boom Harmony !harmonyFolder!\!harmonyVersions!00-scripts\"
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/oH_DOM.jpg b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/oH_DOM.jpg
deleted file mode 100644
index 3892cba69b..0000000000
Binary files a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/oH_DOM.jpg and /dev/null differ
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony.js
deleted file mode 100644
index ae65d32a2b..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony.js
+++ /dev/null
@@ -1,497 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $ (DOM) class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * All the classes can be accessed from it, and it can be passed to a different context.
- * @namespace
- * @classdesc The $ global object that holds all the functions of openHarmony.
- * @property {int} debug_level The debug level of the DOM.
- * @property {bool} batchMode Deactivate all ui and incompatible functions to ensure scripts run in batch.
- * @property {string} file The openHarmony base file - THIS!
- *
- * @property {$.oScene} getScene The harmony scene.
- * @property {$.oScene} scene The harmony scene.
- * @property {$.oScene} scn The harmony scene.
- * @property {$.oScene} s The harmony scene.
- * @property {$.oApp} getApplication The Harmony Application Object.
- * @property {$.oApp} application The Harmony Application Object.
- * @property {$.oApp} app The Harmony Application Object.
- * @property {$.oNetwork} network Access point for all the functions of the $.oNetwork class
- * @property {$.oUtils} utils Access point for all the functions of the $.oUtils class
- * @property {$.oDialog} dialog Access point for all the functions of the $.oDialog class
- * @property {Object} global The global scope.
- *
- * @example
- * // To access the functions, first call the $ object. It is made available after loading openHarmony like so:
- *
- * include ("openHarmony.js");
- *
- * var doc = $.scn; // grabbing the scene document
- * $.log("hello"); // prints out a message to the MessageLog.
- * var myPoint = new $.oPoint(0,0,0); // create a new class instance from an openHarmony class.
- *
- * // function members of the $ objects get published to the global scope, which means $ can be omitted
- *
- * log("hello");
- * var myPoint = new oPoint(0,0,0); // This is all valid
- * var doc = scn; // "scn" isn't a function so this one isn't
- *
- */
-$ = {
- debug_level : 0,
-
- /**
- * Enum to set the debug level of debug statements.
- * @name $#DEBUG_LEVEL
- * @enum
- */
- DEBUG_LEVEL : {
- 'ERROR' : 0,
- 'WARNING' : 1,
- 'LOG' : 2
- },
- file : __file__,
- directory : false,
- pi : 3.14159265359
-};
-
-
-/**
- * The openHarmony main Install directory
- * @name $#directory
- * @type {string}
- */
-Object.defineProperty( $, "directory", {
- get : function(){
- var currentFile = __file__
- return currentFile.split("\\").join("/").split( "/" ).slice(0, -1).join('/');
- }
-});
-
-
-/**
- * Whether Harmony is run with the interface or simply from command line
- */
-Object.defineProperty( $, "batchMode", {
- get: function(){
- // use a cache to avoid pulling the widgets every time
- if (!this.hasOwnProperty("_batchMode")){
- this._batchMode = true;
-
- // batchmode is false if there are any widgets visible in the application
- var _widgets = QApplication.topLevelWidgets();
- for (var i in _widgets){
- if (_widgets[i].visible) this._batchMode = false;
- }
- }
- return this._batchMode
- }
-})
-
-/**
- * Function to load openHarmony files from the %installdir%/openHarmony/ folder.
- * @name $#loadOpenHarmonyFiles
- * @private
- */
-var _ohDirectory = $.directory+"/openHarmony/";
-var _dir = new QDir(_ohDirectory);
-_dir.setNameFilters(["openHarmony*.js"]);
-_dir.setFilter( QDir.Files);
-var _files = _dir.entryList();
-
-for (var i in _files){
- include( _ohDirectory + "/" + _files[i]);
-}
-
-
-
-
-/**
- * The standard debug that uses logic and level to write to the messagelog. Everything should just call this to write internally to a log in OpenHarmony.
- * @function
- * @name $#debug
- * @param {obj} obj Description.
- * @param {int} level The debug level of the incoming message to log.
- */
-$.debug = function( obj, level ){
- if( level > this.debug_level ) return;
-
- try{
- if (typeof obj !== 'object') throw new Error();
- this.log(JSON.stringify(obj));
- }catch(err){
- this.log(obj);
- }
-}
-
-
-/**
- * Log the string to the MessageLog.
- * @function
- * @name $#log
- * @param {string} str Text to log.
- */
-$.log = function( str ){
- MessageLog.trace( str );
- System.println( str );
-}
-
-
-/**
- * Log the object and its contents.
- * @function
- * @name $#logObj
- * @param {object} object The object to log.
- * @param {int} debugLevel The debug level.
- */
-$.logObj = function( object ){
- for (var i in object){
- try {
- if (typeof object[i] === "function") continue;
- $.log(i+' : '+object[i])
- if (typeof object[i] == "Object"){
- $.log(' -> ')
- $.logObj(object[i])
- $.log(' ----- ')
- }
- }catch(error){}
- }
-}
-
-
-//---- App --------------
-$.app = new $.oApp();
-$.application = $.app;
-$.getApplication = $.app;
-
-
-//---- Scene --------------
-$.s = new $.oScene();
-$.scn = $.s;
-$.scene = $.s;
-$.getScene = $.s;
-
-
-/**
- * Prompts with a confirmation dialog (yes/no choice).
- * @function
- * @name $#confirm
- * @param {string} [labelText] The label/internal text of the dialog.
- * @param {string} [title] The title of the confirmation dialog.
- * @param {string} [okButtonText] The text on the OK button of the dialog.
- * @param {string} [cancelButtonText] The text on the CANCEL button of the dialog.
- *
- * @return {bool} Result of the confirmation dialog.
- */
-$.confirm = function(){ return $.dialog.confirm.apply( $.dialog, arguments ) };
-
-
-/**
- * Prompts with an alert dialog (informational).
- * @function
- * @name $#alert
- * @param {string} [labelText] The label/internal text of the dialog.
- * @param {string} [title] The title of the confirmation dialog.
- * @param {string} [okButtonText] The text on the OK button of the dialog.
- */
-$.alert = function(){ return $.dialog.alert.apply( $.dialog, arguments ) };
-
-
-
-/**
- * Prompts with an alert dialog with a text box which can be selected (informational).
- * @function
- * @name $#alertBox
- * @param {string} [labelText] The label/internal text of the dialog.
- * @param {string} [title] The title of the confirmation dialog.
- * @param {string} [okButtonText] The text on the OK button of the dialog.
- */
-$.alertBox = function(){ return $.dialog.alertBox.apply( $.dialog, arguments ) };
-
-
-
-/**
- * Prompts with an toast alert. This is a small message that can't be clicked and only stays on the screen for the duration specified.
- * @function
- * @name $#toast
- * @param {string} labelText The label/internal text of the dialog.
- * @param {$.oPoint} [position] The position on the screen where the toast will appear (by default, slightly under the middle of the screen).
- * @param {float} [duration=2000] The duration of the display (in milliseconds).
- * @param {$.oColorValue} [color="#000000"] The color of the background (a 50% alpha value will be applied).
- */
-$.toast = function(){ return $.dialog.toast.apply( $.dialog, arguments ) };
-
-
-
-/**
- * Prompts for a user input.
- * @function
- * @name $#prompt
- * @param {string} [labelText] The label/internal text of the dialog.
- * @param {string} [title] The title of the confirmation dialog.
- * @param {string} [prefilledText] The text to display in the input area.
- */
-$.prompt = function(){ return $.dialog.prompt.apply( $.dialog, arguments ) };
-
-
-/**
- * Prompts with a file selector window
- * @function
- * @name $#browseForFile
- * @param {string} [text="Select a file:"] The title of the file select dialog.
- * @param {string} [filter="*"] The filter for the file type and/or file name that can be selected. Accepts wildcard character "*".
- * @param {string} [getExisting=true] Whether to select an existing file or a save location
- * @param {string} [acceptMultiple=false] Whether or not selecting more than one file is ok. Is ignored if getExisting is false.
- * @param {string} [startDirectory] The directory showed at the opening of the dialog.
- *
- * @return {string[]} The list of selected Files, 'undefined' if the dialog is cancelled
- */
-$.browseForFile = function(){ return $.dialog.browseForFile.apply( $.dialog, arguments ) };
-
-
-/**
- * Prompts with a folder selector window.
- * @function
- * @name $#browseForFolder
- * @param {string} [text] The title of the confirmation dialog.
- * @param {string} [startDirectory] The directory showed at the opening of the dialog.
- *
- * @return {string} The path of the selected folder, 'undefined' if the dialog is cancelled
- */
-$.browseForFolder = function(){ return $.dialog.browseForFolder.apply( $.dialog, arguments ) };
-
-
-/**
- * Gets access to a widget from the Harmony Interface.
- * @function
- * @name $#getHarmonyUIWidget
- * @param {string} name The name of the widget to look for.
- * @param {string} [parentName] The name of the parent widget to look into, in case of duplicates.
- */
-$.getHarmonyUIWidget = function(){ return $.app.getWidgetByName.apply( $.app, arguments ) }
-
-
-//---- Cache Helpers ------
-$.cache_columnToNodeAttribute = {};
-$.cache_columnToNodeAttribute_date = (new Date()).getTime();
-$.cache_oNode = {};
-
-
-//------------------------------------------------
-//-- Undo operations
-
-/**
- * Starts the tracking of the undo accumulation, all subsequent actions are done in a single undo operation.
Close the undo accum with $.endUndo().
- * If this function is called multiple time, only the first time will count.
- * (this prevents small functions wrapped in their own undo block to interfere with global script undo)
- * @param {string} undoName The name of the operation that is being done in the undo accum.
- * @name $#beginUndo
- * @function
- * @see $.endUndo
- */
-$.beginUndo = function( undoName ){
- if ($.batchMode) return
- if (typeof undoName === 'undefined') var undoName = ''+((new Date()).getTime());
- if (!$.hasOwnProperty("undoStackSize")) $.undoStackSize = 0;
- if ($.undoStackSize == 0) scene.beginUndoRedoAccum( undoName );
- $.undoStackSize++;
-}
-
-/**
- * Cancels the tracking of the undo accumulation, everything between this and the start of the accumulation is undone.
- * @name $#cancelUndo
- * @function
- */
-$.cancelUndo = function( ){
- scene.cancelUndoRedoAccum( );
-}
-
-/**
- * Stops the tracking of the undo accumulation, everything between this and the start of the accumulation behaves as a single undo operation.
- * If beginUndo function is called multiple time, each call must be matched with this function.
- * (this prevents small functions wrapped in their own undo block to interfere with global script undo)
- * @name $#endUndo
- * @function
- * @see $.beginUndo
- */
-$.endUndo = function( ){
- if ($.batchMode) return
-
- if (!$.hasOwnProperty("undoStackSize")) $.undoStackSize = 1;
- $.undoStackSize--;
- if ($.undoStackSize == 0) scene.endUndoRedoAccum();
-}
-
-/**
- * Undoes the last n operations. If n is not specified, it will be 1
- * @name $#undo
- * @function
- * @param {int} dist The amount of operations to undo.
- */
-$.undo = function( dist ){
- if (typeof dist === 'undefined'){ var dist = 1; }
- scene.undo( dist );
-}
-
-/**
- * Redoes the last n operations. If n is not specified, it will be 1
- * @name $#redo
- * @function
- * @param {int} dist The amount of operations to undo.
- */
-$.redo = function( dist ){
- if (typeof dist === 'undefined'){ var dist = 1; }
- scene.redo( dist );
-}
-
-
-/**
- * Gets the preferences from the Harmony stage.
- * @name $#getPreferences
- * @function
- */
-$.getPreferences = function( ){
- return new $.oPreferences();
-}
-
-//---- Attach Helpers ------
-$.network = new $.oNetwork();
-$.utils = $.oUtils;
-$.dialog = new $.oDialog();
-$.global = this;
-
-
-//---- Self caching -----
-
-/**
- * change this value to allow self caching across openHarmony when initialising objects.
- * @name $#useCache
- * @type {bool}
- */
-$.useCache = false;
-
-
-/**
- * function to call in constructors of classes so that instances of this class
- * are cached and unique based on constructor arguments.
- * @returns a cached class instance or null if no cached instance exists.
- */
-$.getInstanceFromCache = function(){
- if (!this.__proto__.hasOwnProperty("__cache__")) {
- this.__proto__.__cache__ = {};
- }
- var _cache = this.__proto__.__cache__;
-
- if (!this.$.useCache) return;
-
- var key = [];
- for (var i=0; i
- * It is used internally to get and set values and link a oColumn to a parameter in order to animate it. (Users should never have to instantiate this class)
- * For a list of attributes existing in each node type and their type, as well as examples of the values they can hold, refer to :
- * {@link NodeType}.
- * @constructor
- * @param {$.oNode} oNodeObject The oNodeObject that the attribute is associated to.
- * @param {attr} attributeObject The internal harmony Attribute Object.
- * @param {$.oAttribute} parentAttribute The parent attribute of the subattribute.
- *
- * @property {$.oNode} node The oNode this attribute belongs to.
- * @property {attr} attributeObject The internal harmony Attribute Object.
- * @property {string} keyword The keyword describing this attribute. (always in lower case)
- * @property {string} shortKeyword The full keyword describing this attribute, including parent attributes separated with a "." (always in lower case)
- * @property {$.oAttribute} parentAttribute The parent oAttribute object
- * @property {$.oAttribute[]} subAttributes The subattributes of this attribute.
- * @example
- * // oAttribute objects can be grabbed from the node .attributes object with dot notation, by calling the attribute keyword in lowercase.
- *
- * var myNode = $.scn.getSelectedNodes()[0]; // grab the first selected node
- * var Xattribute = myNode.attributes.position.x; // gets the position.x attribute of the node if it has it (for example, PEG nodes have it)
- *
- * var Xcolumn = Xattribute.column; // retrieve the linked column to the element (The object that holds the animation)
- *
- * Xattribute.setValue(5, 5); // sets the value to 5 at frame 5
- *
- * // attribute values can also be set directly on the node when not animated:
- * myNode.position.x = 5;
- *
- */
-$.oAttribute = function( oNodeObject, attributeObject, parentAttribute ){
- this._type = "attribute";
-
- this.node = oNodeObject;
- this.attributeObject = attributeObject;
-
- this._shortKeyword = attributeObject.keyword();
-
- if( attributeObject.fullKeyword ){
- this._keyword = attributeObject.fullKeyword();
- }else{
- this._keyword = (parentAttribute?(parentAttribute._keyword+"."):"") + this._shortKeyword;
- }
-
- this.parentAttribute = parentAttribute; // only for subAttributes
-
- // recursively add all subattributes as properties on the object
- this.createSubAttributes(attributeObject);
-}
-
-
-/**
- * Private function to create subAttributes in an oAttribute object at initialisation.
- * @private
- * @return {void} Nothing returned.
- */
-$.oAttribute.prototype.createSubAttributes = function (attributeObject){
- var _subAttributes = [];
-
- // if harmony version supports getSubAttributes
- var _subAttributesList = [];
- if (attributeObject.getSubAttributes){
- _subAttributesList = attributeObject.getSubAttributes();
- }else{
- var sub_attrs = node.getAttrList( this.node.path, 1, this._keyword );
-
- if( sub_attrs && sub_attrs.length>0 ){
- _subAttributesList = sub_attrs;
- }
- }
-
- for (var i in _subAttributesList){
- var _subAttribute = new this.$.oAttribute( this.node, _subAttributesList[i], this );
- var _keyword = _subAttribute.shortKeyword;
-
- // creating a property on the attribute object with the subattribute name to access it
- this[_keyword] = _subAttribute;
- _subAttributes.push(_subAttribute)
- }
-
- // subAttributes is made available as an array for more formal access
- this.subAttributes = _subAttributes;
-}
-
-
-/**
- * Private function to add utility to subattributes on older versions of Harmony.
- * @private
- * @deprecated
- * @return {void} Nothing returned.
- */
-$.oAttribute.prototype.getSubAttributes_oldVersion = function (){
- var sub_attrs = [];
-
- switch( this.type ){
- case "POSITION_3D" :
- //hard coded subAttr handler for POSITION_3D in older versions of Harmony.
- sub_attrs = [ 'SEPARATE', 'X', 'Y', 'Z'];
- break
- case "ROTATION_3D" :
- sub_attrs = [ 'SEPARATE', 'ANGLEX', 'ANGLEY', 'ANGLEZ', "QUATERNIONPATH" ];
- break
- case "SCALE_3D" :
- sub_attrs = [ 'SEPARATE', 'IN_FIELDS', 'XY', 'X', 'Y', 'Z' ];
- break
- case "DRAWING" :
- sub_attrs = [ 'ELEMENT', 'ELEMENT_MODE', 'CUSTOM_NAME'];
- break
- case "ELEMENT" :
- sub_attrs = [ 'LAYER' ]
- break
- case "CUSTOM_NAME" :
- sub_attrs = [ 'NAME', 'TIMING', 'EXTENSION', 'FIELD_CHART' ]
- default:
- break
- }
-
- var _node = this.node.path;
- var _keyword = this._keyword;
-
- sub_attrs = sub_attrs.map(function(x){return node.getAttr( _node, 1, _keyword+"."+x )})
-
- return sub_attrs;
-}
-
-
-/**
- * The display name of the attribute
- * @name $.oAttribute#name
- * @type {string}
- */
-Object.defineProperty($.oAttribute.prototype, 'name', {
- get: function(){
- return this.attributeObject.name();
- }
-})
-
-/**
- * The full keyword of the attribute.
- * @name $.oAttribute#keyword
- * @type {string}
- */
-Object.defineProperty($.oAttribute.prototype, 'keyword', {
- get : function(){
- // formatting the keyword for our purposes
- // hard coding a fix for 3DPath attribute name which starts with a number
- var _keyword = this._keyword.toLowerCase();
- if (_keyword == "3dpath") _keyword = "path3d";
- return _keyword;
- }
-});
-
-
-/**
- * The part of the attribute's keyword that is after the "." for subAttributes.
- * @name $.oAttribute#shortKeyword
- * @type {string}
- */
-Object.defineProperty($.oAttribute.prototype, 'shortKeyword', {
- get : function(){
- // formatting the keyword for our purposes
- // hard coding a fix for 3DPath attribute name which starts with a number
- var _keyword = this._shortKeyword.toLowerCase();
- if (_keyword == "3dpath") _keyword = "path3d";
- return _keyword;
- }
-});
-
-
-/**
- * The type of the attribute.
- * @name $.oAttribute#type
- * @type {string}
- */
-Object.defineProperty($.oAttribute.prototype, 'type', {
- get : function(){
- return this.attributeObject.typeName();
- }
-});
-
-/**
- * The column attached to the attribute.
- * @name $.oAttribute#column
- * @type {$.oColumn}
- * @example
-// link a new column to an attribute by setting this value:
-var myColumn = $.scn.addColumn("BEZIER");
-myNode.attributes.position.x.column = myColumn; // values contained in "myColumn" now define the animation of our peg's x position
-
-// to automatically create a column and link it to the attribute, use:
-myNode.attributes.position.x.addColumn(); // if the column exist already, it will just be returned.
-
-// to unlink a column, just set it to null/undefined:
-myNode.attributes.position.x.column = null; // values are no longer animated.
- */
-Object.defineProperty($.oAttribute.prototype, 'column', {
- get : function(){
- var _column = node.linkedColumn ( this.node.path, this._keyword );
- if( _column && _column.length ){
- return this.node.scene.$column( _column, this );
- }else{
- return null;
- }
- },
-
- set : function(columnObject){
- // unlink if provided with null value or empty string
- if (!columnObject){
- node.unlinkAttr(this.node.path, this._keyword);
- }else{
- node.linkAttr(this.node.path, this._keyword, columnObject.uniqueName);
- columnObject.attributeObject = this;
- // TODO: transfer current value of attribute to a first key on the column if column is empty
- }
- }
-});
-
-
- /**
- * The frames array holding the values of the animation. Starts at 1, as array indexes correspond to frame numbers.
- * @name $.oAttribute#frames
- * @type {$.oFrame[]}
- */
-Object.defineProperty($.oAttribute.prototype, 'frames', {
- get : function(){
- var _column = this.column
- if (_column != null){
- return _column.frames;
- }else{
- //Need a method to get frames of non-column values. Local Values.
- return [ new this.$.oFrame( 1, this, false ) ];
- }
- },
-
- set : function(){
- throw "Not implemented."
- }
-});
-
-
-/**
- * An array of only the keyframes (frames with a set value) of the animation.
- * @name $.oAttribute#keyframes
- * @type {$.oFrame[]}
- */
-// MCNote: I would prefer if this could remain getKeyFrames()
-Object.defineProperty($.oAttribute.prototype, 'keyframes', {
- get : function(){
- var col = this.column;
- var frames = this.frames;
-
- if( !col ){
- return frames[1];
- }
-
- return this.column.keyframes;
- },
-
- set : function(){
- throw "Not implemented."
- }
-});
-
-/**
- * WIP.
- * @name $.oAttribute#useSeparate
- * @type {bool}
- * @private
- */
-//CF Note: Not sure if this should be a general attribute, or a subattribute.
-Object.defineProperty($.oAttribute.prototype, "useSeparate", {
- get : function(){
- // TODO
- throw new Error("not yet implemented");
- },
-
- set : function( _value ){
- // TODO: when swapping from one to the other, copy key values and link new columns if missing
- throw new Error("not yet implemented");
- }
-});
-
-
-/**
- * Returns the default value of the attribute for most keywords
- * @name $.oAttribute#defaultValue
- * @type {bool}
- * @todo switch the implementation to types?
- * @example
- * // to reset an attribute to its default value:
- * // (mostly used for position/angle/skew parameters of pegs and drawing nodes)
- * var myAttribute = $.scn.nodes[0].attributes.position.x;
- *
- * myAttribute.setValue(myAttribute.defaultValue);
- */
-Object.defineProperty($.oAttribute.prototype, "defaultValue", {
- get : function(){
- // TODO: we could use this to reset bones/deformers to their rest states
- var _keyword = this._keyword;
-
- switch (_keyword){
- case "OFFSET.X" :
- case "OFFSET.Y" :
- case "OFFSET.Z" :
-
- case "POSITION.X" :
- case "POSITION.Y" :
- case "POSITION.Z" :
-
- case "PIVOT.X":
- case "PIVOT.Y":
- case "PIVOT.Z":
-
- case "ROTATION.ANGLEX":
- case "ROTATION.ANGLEY":
- case "ROTATION.ANGLEZ":
-
- case "ANGLE":
- case "SKEW":
-
- case "SPLINE_OFFSET.X":
- case "SPLINE_OFFSET.Y":
- case "SPLINE_OFFSET.Z":
-
- return 0;
-
- case "SCALE.X" :
- case "SCALE.Y" :
- case "SCALE.Z" :
- return 1;
-
- case "OPACITY" :
- return 100;
-
- case "COLOR" :
- return new this.$.oColorValue();
-
- case "OFFSET.3DPATH":
- // pseudo oPathPoint
- // CFNote: is this supposed to be an object?
- // this is a fake object value that can be easily checked with a "==" operator.
- // oPathPoint will be converted to string for checking, and have the same format.
- // I made this to check if the value is default but I guess it's not ideal for assigning a default value, so maybe we should change it.
- return "{x:0, y:0, z:0}";
-
- default:
- return null; // for attributes that don't have a default value, we return null
- }
- }
-});
-
-
-// $.oAttribute Class methods
-
-/**
- * Provides the keyframes of the attribute.
- * @return {$.oFrame[]} The filtered keyframes.
- */
-$.oAttribute.prototype.getKeyframes = function(){
- var _frames = this.frames;
- _frames = _frames.filter(function(x){return x.isKeyframe});
- return _frames;
-}
-
-
-/**
- * Provides the keyframes of the attribute.
- * @return {$.oFrame[]} The filtered keyframes.
- * @deprecated For case consistency, keyframe will never have a capital F
- */
-$.oAttribute.prototype.getKeyFrames = function(){
- this.$.debug("oAttribute.getKeyFrames is deprecated. Use oAttribute.getKeyframes instead.", this.$.DEBUG_LEVEL.ERROR);
- var _frames = this.frames;
- _frames = _frames.filter(function(x){return x.isKeyframe});
- return _frames;
-}
-
-
-/**
- * Recursively get all the columns linked to the attribute and its subattributes
- * @return {$.oColumn[]} the list of columns linked to the subattributes
- */
-$.oAttribute.prototype.getLinkedColumns = function(){
- var _columns = [];
- var _subAttributes = this.subAttributes;
- var _ownColumn = this.column;
- if (_ownColumn != null) _columns.push(_ownColumn);
-
- for (var i=0; i<_subAttributes.length; i++) {
- _columns = _columns.concat(_subAttributes[i].getLinkedColumns());
- }
-
- return _columns;
-}
-
-
-/**
- * Recursively sets an attribute to the same value as another. Both must have the same keyword.
- * @param {bool} [duplicateColumns=false] In the case that the attribute has a column, whether to duplicate the column before linking
- * @private
- */
-$.oAttribute.prototype.setToAttributeValue = function(attributeToCopy, duplicateColumns){
- if (typeof duplicateColumns === 'undefined') var duplicateColumns = false;
-
- if (this.keyword !== attributeToCopy.keyword) return;
- var _subAttributes = this.subAttributes;
-
- var _column = attributeToCopy.column;
- if (_column == null) {
- var value = attributeToCopy.getValue();
- this.setValue(value);
- }else{
- if (duplicateColumns) var _column = _column.duplicate(this);
- this.column = _column;
- }
-
- var _subAttributesToCopy = attributeToCopy.subAttributes;
- for (var i=0; i<_subAttributes.length; i++){
- _subAttributes[i].setToAttributeValue(_subAttributesToCopy[i], duplicateColumns);
- }
-}
-
-
-//CFNote: Is it worth having a getValueType?
-/**
- * Gets the value of the attribute at the given frame.
- * @param {int} frame The frame at which to set the value, if not set, assumes 1
- *
- * @return {object} The value of the attribute in the native format of that attribute (contextual to the attribute).
- */
-$.oAttribute.prototype.getValue = function (frame) {
- if (typeof frame === 'undefined') var frame = 1;
- this.$.debug('getting value of frame :'+frame+' of attribute: '+this._keyword+' of node '+this.node+' - type '+this.type, this.$.DEBUG_LEVEL.LOG)
-
- var _attr = this.attributeObject;
- var _type = this.type;
- var _value;
- var _column = this.column;
-
- // handling conversion of all return types into our own types
- switch (_type){
- case 'BOOL':
- _value = _attr.boolValueAt(frame)
- break;
-
- case 'INT':
- _value = _attr.intValueAt(frame)
- break;
-
- case 'DOUBLE':
- case 'DOUBLEVB':
- _value = _attr.doubleValueAt(frame)
- break;
-
- case 'STRING':
- _value = _attr.textValueAt(frame)
- break;
-
- case 'COLOR':
- _value = new this.$.oColorValue(_attr.colorValueAt(frame))
- break;
-
- case 'POSITION_2D':
- _value = _attr.pos2dValueAt(frame)
- _value = new this.$.oPoint(_value.x, _value.y)
- break;
-
- case 'POSITION_3D':
- _value = _attr.pos3dValueAt(frame)
- _value = new this.$.oPoint(_value.x, _value.y, _value.z)
- break;
-
- case 'SCALE_3D':
- _value = _attr.pos3dValueAt(frame)
- _value = new this.$.oPoint(_value.x, _value.y, _value.z)
- break;
-
- case 'PATH_3D':
- _attr = this.parentAttribute.attributeObject;
- var _frame = _column?(new this.$.oFrame(frame, _column)):(new this.$.oFrame(frame, _attr));
- if(_column && _frame.isKeyframe){
- _value = new this.$.oPathPoint(_column, _frame);
- }else{
- _value = _attr.pos3dValueAt(frame);
- }
- break;
-
- /*case 'DRAWING':
- // override with returning an oElement object
- this.$.debug( "DRAWING: " + this.keyword , this.$.DEBUG_LEVEL.LOG);
-
- value = _column.element;
- break;*/
-
- case 'ELEMENT':
- // an element always has a column, so we'll fetch it from there
- _value = column.getEntry(_column.uniqueName, 1, frame);
-
- // Convert to an instance of oDrawing, with a safety in case of psd import
- _drawing = _column.element.getDrawingByName(_value);
- if (_drawing) _value = _drawing;
- break;
-
- // TODO: How does QUATERNION_PATH work? subcolumns I imagine
- // TODO: How to get types SCALE_3D, ROTATION_3D, DRAWING, GENERIC_ENUM? -> maybe we don't need to, they don't have intrinsic values
-
- default:
- // enums, etc
- _value = _attr.textValueAt(frame);
-
- // in case of subattributes, create a fake string that can have properties so we can create getter setters on it for its subattrs
- if ( _attr.hasSubAttributes && _attr.hasSubAttributes() ){
- _value = { value:_value };
- _value.toString = function(){ return this.value };
- }else{
- var sub_attrs = node.getAttrList( this.node.path, 1, this._keyword );
- if( sub_attrs && sub_attrs.length>0 ){
- _value = { value:_value };
- _value.toString = function(){ return this.value };
- }
- }
- }
-
- return _value;
-}
-
-
-/**
- * Sets the value of the attribute at the given frame.
- * @param {string} value The value to set on the attribute.
- * @param {int} [frame=1] The frame at which to set the value, if not set, assumes 1
- */
-$.oAttribute.prototype.setValue = function (value, frame) {
- var _attr = this.attributeObject;
- var _column = this.column;
- var _type = this.type;
- var _animate = false;
-
- if (!frame){
- // we don't animate
- var frame = 1;
- }else if (!_column){
- // generate a new column to be able to animate
- _column = this.addColumn();
- }
-
- if( _column ){
- _animate = true;
- }
-
- try{
- this.$.debug("setting attr "+this._keyword+" (type : "+this.type+") on node "+this.node+" to value "+JSON.stringify(value)+" at frame "+frame, this.$.DEBUG_LEVEL.LOG)
- }catch(err){
- this.$.debug("setting attr "+this._keyword+" at frame "+frame, this.$.DEBUG_LEVEL.LOG)
- };
-
- switch(_type){
- // TODO: sanitize input
- case "COLOR" :
- // doesn't work for burnin because it has color.Red, color.green etc and not .r .g ...
- value = (value instanceof this.$.oColorValue)?value: new this.$.oColorValue(value);
- value = ColorRGBA(value.r, value.g, value.b, value.a);
- _animate ? _attr.setValueAt(value, frame) : _attr.setValue(value);
- break;
-
- case "GENERIC_ENUM" :
- node.setTextAttr(this.node.path, this._keyword, frame, value);
- break;
-
- case "PATH_3D" :
- // check if frame is tied to a column or an attribute
- var _frame = _column?(new this.$.oFrame(frame, this.column)):(new this.$.oFrame(frame, _attr));
- if (_column){
- if (!_frame.isKeyframe) _frame.isKeyframe = true;
- var _point = new this.$.oPathPoint (this.column, _frame);
- _point.set(value);
- }else{
- // TODO: create keyframe?
- this.parentAttribute.setValue(value);
- }
- break;
-
- case "POSITION_2D":
- value = Point2d(value.x, value.y);
- _animate ? _attr.setValueAt(value, frame) : _attr.setValue(value);
- break;
-
- case "POSITION_3D":
- value = Point3d(value.x, value.y, value.z);
- _animate ? _attr.setValueAt(value, frame) : _attr.setValue(value);
- break;
-
- case "ELEMENT" :
- _column = this.column;
- value = (value instanceof this.$.oDrawing) ? value.name : value;
- column.setEntry(_column.uniqueName, 1, frame, value+"");
- break;
-
- case "QUATERNIONPATH" :
- // set quaternion paths as textattr until a better way is found
-
- default :
- try{
- _animate ? _attr.setValueAt( value, frame ) : _attr.setValue( value );
- }catch(err){
- this.$.debug("error setting attr "+this._keyword+" value "+value+": "+err, this.$.DEBUG_LEVEL.DEBUG);
- this.$.debug("setting text attr "+this._keyword+" value "+value+" as textAttr ", this.$.DEBUG_LEVEL.ERROR);
- node.setTextAttr( this.node.path, this._keyword, frame, value );
- }
- }
-}
-
-
-/**
- * Adds a column with a default name, based on the attribute type.
- * If a column already exists, it returns it.
- * @returns {$.oColumn} the created column
- */
-$.oAttribute.prototype.addColumn = function(){
- var _column = this.column;
- if (_column) return _column;
-
- if (this.hasSubAttributes){
- throw new Error("Can't create columns for attribute "+this.keyword+", column must be created for its subattributes.");
- }
-
- var _type = this.type;
- var _columnType = "";
- var _columnName = this.node.name+": "+this.name.replace(/\s/g, "_");
-
- switch(_type){
- case 'INT':
- case 'DOUBLE':
- case 'DOUBLEVB':
- _columnType = "BEZIER";
- break;
-
- case "QUATERNIONPATH" :
- _columnName = "QUARTERNION";
- break;
-
- case "PATH_3D" :
- _columnName = "3DPATH";
- break;
-
- case "ELEMENT" :
- _columnType = "DRAWING";
- _columnName = this.node.name;
- break;
-
- default :
- throw new Error("Can't create columns for attribute "+this.keyword+", not supported by attribute type '"+_type+"'");
- }
-
- var _column = this.$.scn.addColumn(_columnType, _columnName);
- this.column = _column;
-
- if (!this.column) {
- _column.remove();
- throw new Error("Can't create columns for attribute "+this.keyword+", animation not supported.");
- }
-
- return this.column;
-}
-
-
-/**
- * Gets the value of the attribute at the given frame.
- * @param {int} frame The frame at which to set the value, if not set, assumes 1
- * @deprecated use oAttribute.getValue(frame) instead (see: function names as verbs)
- * @return {object} The value of the attribute in the native format of that attribute (contextual to the attribute).
- */
-$.oAttribute.prototype.value = function(frame){
- return this.getValue( frame );
-}
-
-
-/**
- * Represents an oAttribute object in string form
- * @private
- * @returns {string}
- */
-$.oAttribute.prototype.toString = function(){
- return "[object $.oAttribute '"+this.keyword+(this.subAttributes.length?"' subAttributes: "+this.subAttributes.map(function(x){return x.shortKeyword}):"")+"]";
-}
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_backdrop.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_backdrop.js
deleted file mode 100644
index 1d359f93c4..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_backdrop.js
+++ /dev/null
@@ -1,415 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oBackdrop class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for the $.oBackdrop class.
- * @constructor
- * @classdesc The $.oBackdrop Class represents a backdrop in the node view, and allows users to add, remove and modify existing Backdrops. Accessing these functions is done through the oGroupNode class.
- * @param {string} groupPath The path to the object in which this backdrop is placed.
- * @param {backdropObject} backdropObject The harmony-internal backdrop object associated with this oBackdrop.
- * @example
- * function createColoredBackdrop(){
- * // This script will prompt for a color and create a backdrop around the selection
- * $.beginUndo()
- *
- * var doc = $.scn; // grab the scene
- * var nodes = doc.selectedNodes; // grab the selection
- *
- * if(!nodes) return // exit the function if no nodes are selected
- *
- * var color = pickColor(); // prompt for color
- *
- * var group = doc.root // get the group to add the backdrop to
- * var backdrop = group.addBackdropToNodes(nodes, "BackDrop", "", color)
- *
- * $.endUndo();
- *
- * // function to get the color chosen by the user
- * function pickColor(){
- * var d = new QColorDialog;
- * d.exec();
- * var color = d.selectedColor();
- * return new $.oColorValue({r:color.red(), g:color.green(), b:color.blue(), a:color.alpha()})
- * }
- * }
- */
-$.oBackdrop = function( groupPath, backdropObject ){
- this.group = ( groupPath instanceof this.$.oGroupNode )? groupPath.path: groupPath;
- this.backdropObject = backdropObject;
-}
-
-
-/**
- * The index of this backdrop in the current group.
- * @name $.oBackdrop#index
- * @type {int}
- */
-Object.defineProperty($.oBackdrop.prototype, 'index', {
- get : function(){
- var _groupBackdrops = Backdrop.backdrops(this.group).map(function(x){return x.title.text})
- return _groupBackdrops.indexOf(this.title)
- }
-})
-
-
-/**
- * The title of the backdrop.
- * @name $.oBackdrop#title
- * @type {string}
- */
-Object.defineProperty($.oBackdrop.prototype, 'title', {
- get : function(){
- var _title = this.backdropObject.title.text;
- return _title;
- },
-
- set : function(newTitle){
- var _backdrops = Backdrop.backdrops(this.group);
-
- // incrementing to prevent two backdrops to have the same title
- var names = _backdrops.map(function(x){return x.title.text})
- var count = 0;
- var title = newTitle
-
- while (names.indexOf(title) != -1){
- count++;
- title = newTitle+"_"+count;
- }
- newTitle = title;
-
- var _index = this.index;
-
- _backdrops[_index].title.text = newTitle;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The body text of the backdrop.
- * @name $.oBackdrop#body
- * @type {string}
- */
-Object.defineProperty($.oBackdrop.prototype, 'body', {
- get : function(){
- var _title = this.backdropObject.description.text;
- return _title;
- },
-
- set : function(newBody){
- var _backdrops = Backdrop.backdrops(this.group);
-
- var _index = this.index;
- _backdrops[_index].description.text = newBody;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The title font of the backdrop in form { family:"familyName", "size":int, "color": oColorValue }
- * @name $.oBackdrop#titleFont
- * @type {object}
- */
-Object.defineProperty($.oBackdrop.prototype, 'titleFont', {
- get : function(){
- var _font = {family : this.backdropObject.title.font,
- size : this.backdropObject.title.size,
- color : ( new oColorValue() ).parseColorFromInt(this.backdropObject.title.color)}
- return _font;
- },
-
- set : function(newFont){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].title.font = newFont.family;
- _backdrops[_index].title.size = newFont.size;
- _backdrops[_index].title.color = newFont.color.toInt();
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The body font of the backdrop in form { family:"familyName", "size":int, "color": oColorValue }
- * @name $.oBackdrop#bodyFont
- * @type {object}
- */
-Object.defineProperty($.oBackdrop.prototype, 'bodyFont', {
- get : function(){
- var _font = {family : this.backdropObject.description.font,
- size : this.backdropObject.description.size,
- color : ( new oColorValue() ).parseColorFromInt(this.backdropObject.description.color)}
- return _font;
- },
-
- set : function(newFont){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].title.font = newFont.family;
- _backdrops[_index].title.size = newFont.size;
- _backdrops[_index].title.color = newFont.color.toInt();
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The nodes contained within this backdrop
- * @name $.oBackdrop#parent
- * @type {$.oNode[]}
- * @readonly
- */
- Object.defineProperty($.oBackdrop.prototype, 'parent', {
- get : function(){
- if (!this.hasOwnProperty("_parent")){
- this._parent = this.$.scn.getNodeByPath(this.group);
- }
- return this._parent
- }
-})
-
-
-/**
- * The nodes contained within this backdrop
- * @name $.oBackdrop#nodes
- * @type {$.oNode[]}
- * @readonly
- */
-Object.defineProperty($.oBackdrop.prototype, 'nodes', {
- get : function(){
- var _nodes = this.parent.nodes;
- var _bounds = this.bounds;
- _nodes = _nodes.filter(function(x){
- return _bounds.contains(x.bounds);
- })
-
- return _nodes;
- }
-})
-
-/**
- * The position of the backdrop on the horizontal axis.
- * @name $.oBackdrop#x
- * @type {float}
- */
-Object.defineProperty($.oBackdrop.prototype, 'x', {
- get : function(){
- var _x = this.backdropObject.position.x;
- return _x;
- },
-
- set : function(newX){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].position.x = newX;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The position of the backdrop on the vertical axis.
- * @name $.oBackdrop#y
- * @type {float}
- */
-Object.defineProperty($.oBackdrop.prototype, 'y', {
- get : function(){
- var _y = this.backdropObject.position.y;
- return _y;
- },
-
- set : function(newY){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].position.y = newY;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The width of the backdrop.
- * @name $.oBackdrop#width
- * @type {float}
- */
-Object.defineProperty($.oBackdrop.prototype, 'width', {
- get : function(){
- var _width = this.backdropObject.position.w;
- return _width;
- },
-
- set : function(newWidth){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].position.w = newWidth;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The height of the backdrop.
- * @name $.oBackdrop#height
- * @memberof $.oBackdrop#
- * @type {float}
- */
-Object.defineProperty($.oBackdrop.prototype, 'height', {
- get : function(){
- var _height = this.backdropObject.position.h;
- return _height;
- },
-
- set : function(newHeight){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].position.h = newHeight;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The position of the backdrop.
- * @name $.oBackdrop#position
- * @type {oPoint}
- */
-Object.defineProperty($.oBackdrop.prototype, 'position', {
- get : function(){
- var _position = new oPoint(this.x, this.y, this.index)
- return _position;
- },
-
- set : function(newPos){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].position.x = newPos.x;
- _backdrops[_index].position.y = newPos.y;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The bounds of the backdrop.
- * @name $.oBackdrop#bounds
- * @type {oBox}
- */
-Object.defineProperty($.oBackdrop.prototype, 'bounds', {
- get : function(){
- var _box = new oBox(this.x, this.y, this.width+this.x, this.height+this.y)
- return _box;
- },
-
- set : function(newBounds){
- var _backdrops = Backdrop.backdrops(this.group);
- var _index = this.index;
-
- _backdrops[_index].position.x = newBounds.top;
- _backdrops[_index].position.y = newBounds.left;
- _backdrops[_index].position.w = newBounds.width;
- _backdrops[_index].position.h = newBounds.height;
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
-
-
-/**
- * The color of the backdrop.
- * @name $.oBackdrop#color
- * @type {oColorValue}
- */
-Object.defineProperty($.oBackdrop.prototype, 'color', {
- get : function(){
- var _color = this.backdropObject.color;
- // TODO: get the rgba values from the int
- return _color;
- },
-
- set : function(newOColorValue){
- var _color = new oColorValue(newOColorValue);
- var _index = this.index;
-
- var _backdrops = Backdrop.backdrops(this.group);
- _backdrops[_index].color = _color.toInt();
-
- this.backdropObject = _backdrops[_index];
- Backdrop.setBackdrops(this.group, _backdrops);
- }
-})
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_color.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_color.js
deleted file mode 100644
index ff06688e66..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_color.js
+++ /dev/null
@@ -1,661 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oColorValue class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-/**
- * This class holds a color value. It can be used to set color attributes to a specific value and to convert colors between different formats such as hex strings, RGBA decompositions, as well as HSL values.
- * @constructor
- * @classdesc Constructor for the $.oColorValue Class.
- * @param {string/object} colorValue Hex string value, or object in form {rgba}
- *
- * @property {int} r The int value of the red component.
- * @property {int} g The int value of the green component.
- * @property {int} b The int value of the blue component.
- * @property {int} a The int value of the alpha component.
- * @example
- * // initialise the class to start setting up attributes and making conversions by creating a new instance
- *
- * var myColor = new $.oColorValue("#336600ff");
- * $.log(myColor.r+" "+mycolor.g+" "+myColor.b+" "+myColor+a) // you can then access each component of the color
- *
- * var myBackdrop = $.scn.root.addBackdrop("Backdrop")
- * var myBackdrop.color = myColor // can be used to set the color of a backdrop
- *
- */
-$.oColorValue = function( colorValue ){
- if (typeof colorValue === 'undefined') var colorValue = "#000000ff";
-
- this.r = 0;
- this.g = 0;
- this.b = 0;
- this.a = 255;
-
- //Special case in which RGBA values are defined directly.
- switch( arguments.length ){
- case 4:
- this.a = ( (typeof arguments[3]) == "number" ) ? arguments[3] : 0;
- case 3:
- this.r = ( (typeof arguments[0]) == "number" ) ? arguments[0] : 0;
- this.g = ( (typeof arguments[1]) == "number" ) ? arguments[1] : 0;
- this.b = ( (typeof arguments[2]) == "number" ) ? arguments[2] : 0;
- return;
- default:
- }
-
- if (typeof colorValue === 'string'){
- this.fromColorString(colorValue);
- }else{
- if (typeof colorValue.r === 'undefined') colorValue.r = 0;
- if (typeof colorValue.g === 'undefined') colorValue.g = 0;
- if (typeof colorValue.b === 'undefined') colorValue.b = 0;
- if (typeof colorValue.a === 'undefined') colorValue.a = 255;
-
- this.r = colorValue.r;
- this.g = colorValue.g;
- this.b = colorValue.b;
- this.a = colorValue.a;
- }
-}
-
-
-/**
- * Creates an int from the color value, as used for backdrop colors.
- * @return: {string} ALPHA<<24 RED<<16 GREEN<<8 BLUE
- */
-$.oColorValue.prototype.toInt = function (){
- return ((this.a & 0xff) << 24) | ((this.r & 0xff) << 16) | ((this.g & 0xff) << 8) | (this.b & 0xff);
-}
-
-
-/**
- * The colour value represented as a string.
- * @return: {string} RGBA components in a string in format #RRGGBBAA
- */
-$.oColorValue.prototype.toString = function (){
- var _hex = "#";
-
- var r = ("00"+this.r.toString(16)).slice(-2);
- var g = ("00"+this.g.toString(16)).slice(-2);
- var b = ("00"+this.b.toString(16)).slice(-2);
- var a = ("00"+this.a.toString(16)).slice(-2);
-
- _hex += r + g + b + a;
-
- return _hex;
-}
-
-/**
- * The colour value represented as a string.
- * @return: {string} RGBA components in a string in format #RRGGBBAA
- */
-$.oColorValue.prototype.toHex = function (){
- return this.toString();
-}
-
-/**
- * Ingest a hex string in form #RRGGBBAA to define the colour.
- * @param {string} hexString The colour in form #RRGGBBAA
- */
-$.oColorValue.prototype.fromColorString = function (hexString){
- hexString = hexString.replace("#","");
- if (hexString.length == 6) hexString += "ff";
- if (hexString.length != 8) throw new Error("incorrect color string format");
-
- this.$.debug( "HEX : " + hexString, this.$.DEBUG_LEVEL.LOG);
-
- this.r = parseInt(hexString.slice(0,2), 16);
- this.g = parseInt(hexString.slice(2,4), 16);
- this.b = parseInt(hexString.slice(4,6), 16);
- this.a = parseInt(hexString.slice(6,8), 16);
-}
-
-
-/**
- * Uses a color integer (used in backdrops) and parses the INT; applies the RGBA components of the INT to the oColorValue
- * @param { int } colorInt 24 bit-shifted integer containing RGBA values
- */
-$.oColorValue.prototype.parseColorFromInt = function(colorInt){
- this.r = colorInt >> 16 & 0xFF;
- this.g = colorInt >> 8 & 0xFF;
- this.b = colorInt & 0xFF;
- this.a = colorInt >> 24 & 0xFF;
-}
-
-
-/**
- * Gets the color's HUE value.
- * @name $.oColorValue#h
- * @type {float}
- */
-Object.defineProperty($.oColorValue.prototype, 'h', {
- get : function(){
- var r = this.r;
- var g = this.g;
- var b = this.b;
-
- var cmin = Math.min(r,g,b);
- var cmax = Math.max(r,g,b);
- var delta = cmax - cmin;
- var h = 0;
- var s = 0;
- var l = 0;
-
- if (delta == 0){
- h = 0.0;
- // Red is max
- }else if (cmax == r){
- h = ((g - b) / delta) % 6.0;
- // Green is max
- }else if (cmax == g){
- h = (b - r) / delta + 2.0;
- // Blue is max
- }else{
- h = (r - g) / delta + 4.0;
- }
-
- h = Math.round(h * 60.0);
-
- //WRAP IN 360.
- if (h < 0){
- h += 360.0;
- }
-
- // // Calculate lightness
- // l = (cmax + cmin) / 2.0;
-
- // // Calculate saturation
- // s = delta == 0 ? 0 : delta / (1.0 - Math.abs(2.0 * l - 1.0));
-
- // s = Math.min( Math.abs(s)*100.0, 100.0 );
- // l = (Math.abs(l)/255.0)*100.0;
-
- return h;
- },
-
- set : function( new_h ){
- var h = Math.min( new_h, 360.0 );
- var s = Math.min( this.s, 100.0 )/100.0;
- var l = Math.min( this.l, 100.0 )/100.0;
-
- var c = (1.0 - Math.abs(2.0 * l - 1.0)) * s;
- var x = c * (1 - Math.abs((h / 60.0) % 2.0 - 1.0));
- var m = l - c/2.0;
- var r = 0.0;
- var g = 0.0;
- var b = 0.0;
-
- if (0.0 <= h && h < 60.0) {
- r = c; g = x; b = 0;
- } else if (60.0 <= h && h < 120.0) {
- r = x; g = c; b = 0;
- } else if (120.0 <= h && h < 180.0) {
- r = 0; g = c; b = x;
- } else if (180.0 <= h && h < 240.0) {
- r = 0; g = x; b = c;
- } else if (240.0 <= h && h < 300.0) {
- r = x; g = 0; b = c;
- } else if (300.0 <= h && h < 360.0) {
- r = c; g = 0; b = x;
- }
-
- this.r = (r + m) * 255.0;
- this.g = (g + m) * 255.0;
- this.b = (b + m) * 255.0;
- }
-});
-
-/**
- * Gets the color's SATURATION value.
- * @name $.oColorValue#s
- * @type {float}
- */
-Object.defineProperty($.oColorValue.prototype, 's', {
- get : function(){
- var r = this.r;
- var g = this.g;
- var b = this.b;
-
- var cmin = Math.min(r,g,b);
- var cmax = Math.max(r,g,b);
- var delta = cmax - cmin;
- var s = 0;
- var l = 0;
-
- // Calculate lightness
- l = (cmax + cmin) / 2.0;
- s = delta == 0 ? 0 : delta / (1.0 - Math.abs(2.0 * l - 1.0));
-
- // Calculate saturation
- s = Math.min( Math.abs(s)*100.0, 100.0 );
-
- return s;
- },
-
- set : function( new_s ){
- var h = Math.min( this.h, 360.0 );
- var s = Math.min( new_s, 100.0 )/100.0;
- var l = Math.min( this.l, 100.0 )/100.0;
-
- var c = (1.0 - Math.abs(2.0 * l - 1.0)) * s;
- var x = c * (1 - Math.abs((h / 60.0) % 2.0 - 1.0));
- var m = l - c/2.0;
- var r = 0.0;
- var g = 0.0;
- var b = 0.0;
-
- if (0.0 <= h && h < 60.0) {
- r = c; g = x; b = 0;
- } else if (60.0 <= h && h < 120.0) {
- r = x; g = c; b = 0;
- } else if (120.0 <= h && h < 180.0) {
- r = 0; g = c; b = x;
- } else if (180.0 <= h && h < 240.0) {
- r = 0; g = x; b = c;
- } else if (240.0 <= h && h < 300.0) {
- r = x; g = 0; b = c;
- } else if (300.0 <= h && h < 360.0) {
- r = c; g = 0; b = x;
- }
-
- this.r = (r + m) * 255.0;
- this.g = (g + m) * 255.0;
- this.b = (b + m) * 255.0;
- }
-});
-
-/**
- * Gets the color's LIGHTNESS value.
- * @name $.oColorValue#l
- * @type {float}
- */
-Object.defineProperty($.oColorValue.prototype, 'l', {
- get : function(){
- var r = this.r;
- var g = this.g;
- var b = this.b;
-
- var cmin = Math.min(r,g,b);
- var cmax = Math.max(r,g,b);
- var delta = cmax - cmin;
- var s = 0;
- var l = 0;
-
-
- // Calculate lightness
- l = (cmax + cmin) / 2.0;
- l = (Math.abs(l)/255.0)*100.0;
- return l;
- },
-
- set : function( new_l ){
- var h = Math.min( this.h, 360.0 );
- var s = Math.min( this.s, 100.0 )/100.0;
- var l = Math.min( new_l, 100.0 )/100.0;
-
- var c = (1.0 - Math.abs(2.0 * l - 1.0)) * s;
- var x = c * (1 - Math.abs((h / 60.0) % 2.0 - 1.0));
- var m = l - c/2.0;
- var r = 0.0;
- var g = 0.0;
- var b = 0.0;
-
- if (0.0 <= h && h < 60.0) {
- r = c; g = x; b = 0;
- } else if (60.0 <= h && h < 120.0) {
- r = x; g = c; b = 0;
- } else if (120.0 <= h && h < 180.0) {
- r = 0; g = c; b = x;
- } else if (180.0 <= h && h < 240.0) {
- r = 0; g = x; b = c;
- } else if (240.0 <= h && h < 300.0) {
- r = x; g = 0; b = c;
- } else if (300.0 <= h && h < 360.0) {
- r = c; g = 0; b = x;
- }
-
- this.r = (r + m) * 255.0;
- this.g = (g + m) * 255.0;
- this.b = (b + m) * 255.0;
- }
-});
-
-
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oColor class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-// oPalette constructor
-
-/**
- * The base class for the $.oColor.
- * @constructor
- * @classdesc $.oColor Base Class
- * @param {$.oPalette} oPaletteObject The palette to which the color belongs.
- * @param {int} attributeObject The index of the color in the palette.
- *
- * @property {$.oPalette} palette The palette to which the color belongs.
- */
-$.oColor = function( oPaletteObject, index ){
- // We don't use id in the constructor as multiple colors with the same id can exist in the same palette.
- this._type = "color";
-
- this.palette = oPaletteObject;
- this._index = index;
-}
-
-// $.oColor Object Properties
-
-/**
- * The Harmony color object.
- * @name $.oColor#colorObject
- * @type {BaseColor}
- */
-Object.defineProperty($.oColor.prototype, 'colorObject', {
- get : function(){
- return this.palette.paletteObject.getColorByIndex(this._index);
- }
-});
-
-
-
-/**
- * The name of the color.
- * @name $.oColor#name
- * @type {string}
- */
-Object.defineProperty($.oColor.prototype, 'name', {
- get : function(){
- var _color = this.colorObject;
- return _color.name;
- },
-
- set : function(newName){
- var _color = this.colorObject;
- _color.setName(newName);
- }
-});
-
-
-/**
- * The id of the color.
- * @name $.oColor#id
- * @type {string}
- */
-Object.defineProperty($.oColor.prototype, 'id', {
- get : function(){
- var _color = this.colorObject;
- return _color.id
- },
-
- set : function(newId){
- // TODO: figure out a way to change id? Create a new color with specific id in the palette?
- throw new Error("setting oColor.id Not yet implemented");
- }
-});
-
-
-/**
- * The index of the color.
- * @name $.oColor#index
- * @type {int}
- */
-Object.defineProperty($.oColor.prototype, 'index', {
- get : function(){
- return this._index;
- },
-
- set : function(newIndex){
- var _color = this.palette.paletteObject.moveColor(this._index, newIndex);
- this._index = newIndex;
- }
-});
-
-
-/**
- * The type of the color.
- * @name $.oColor#type
- * @type {int}
- */
-Object.defineProperty($.oColor.prototype, 'type', {
- set : function(){
- throw new Error("setting oColor.type Not yet implemented.");
- },
-
- get : function(){
- var _color = this.colorObject;
- if (_color.isTexture) return "texture";
-
- switch (_color.colorType) {
- case PaletteObjectManager.Constants.ColorType.SOLID_COLOR:
- return "solid";
- case PaletteObjectManager.Constants.ColorType.LINEAR_GRADIENT :
- return "gradient";
- case PaletteObjectManager.Constants.ColorType.RADIAL_GRADIENT:
- return "radial gradient";
- default:
- }
- }
-});
-
-
-/**
- * Whether the color is selected.
- * @name $.oColor#selected
- * @type {bool}
- */
-Object.defineProperty($.oColor.prototype, 'selected', {
- get : function(){
- var _currentId = PaletteManager.getCurrentColorId()
- var _colors = this.palette.colors;
- var _ids = _colors.map(function(x){return x.id})
- return this._index == _ids.indexOf(_currentId);
- },
-
- set : function(isSelected){
- // TODO: find a way to work with index as more than one color can have the same id, also, can there be no selected color when removing selection?
- if (isSelected){
- var _id = this.id;
- PaletteManager.setCurrentColorById(_id);
- }
- }
-});
-
-
-/**
- * Takes a string or array of strings for gradients and filename for textures. Instead of passing rgba objects, it accepts "#rrggbbaa" hex strings for convenience.
set gradients, provide an object with keys from 0 to 1 for the position of each color.
(ex: {0: new $.oColorValue("000000ff"), 1:new $.oColorValue("ffffffff")}).
- * @name $.oColor#value
- * @type {$.oColorValue}
- */
-Object.defineProperty($.oColor.prototype, 'value', {
- get : function(){
- var _color = this.colorObject;
-
- switch(this.type){
- case "solid":
- return new this.$.oColorValue(_color.colorData);
- case "texture":
- return this.palette.path.parent.path + this.palette.name+"_textures/" + this.id + ".tga";
- case "gradient":
- case "radial gradient":
- var _gradientArray = _color.colorData;
- var _value = {};
- for (var i in _gradientArray){
- var _data = _gradientArray[i];
- _value[_gradientArray[i].t] = new this.$.oColorValue(_data.r, _data.g, _data.b, _data.a);
- }
- return _value;
- default:
- }
- },
-
- set : function(newValue){
- var _color = this.colorObject;
-
- switch(this.type){
- case "solid":
- _value = new $.oColorValue(newValue);
- _color.setColorData(_value);
- break;
- case "texture":
- // TODO: need to copy the file into the folder first?
- _color.setTextureFile(newValue);
- break;
- case "gradient":
- case "radial gradient":
- var _value = [];
- var _gradient = newValue;
- for (var i in _gradient){
- var _color = _gradient[i];
- var _tack = {r:_color.r, g:_color.g, b:_color.b, a:_color.a, t:parseFloat(i, 10)}
- _value.push(_tack);
- }
- _color.setColorData(_value);
- break;
- default:
- };
- }
-});
-
-
-// Methods
-
-/**
- * Moves the palette to another Palette Object (CFNote: perhaps have it push to paletteObject, instead of being done at the color level)
- * @param {$.oPalette} oPaletteObject The paletteObject to move this color into.
- * @param {int} index Need clarification from mchap
- *
- * @return: {$.oColor} The new resulting $.oColor object.
- */
-$.oColor.prototype.moveToPalette = function (oPaletteObject, index){
- if (typeof index === 'undefined') var index = oPaletteObject.paletteObject.nColors;
- var _duplicate = this.copyToPalette(oPaletteObject, index)
- this.remove()
-
- return _duplicate;
-}
-
-
-/**
- * Copies the palette to another Palette Object (CFNote: perhaps have it push to paletteObject, instead of being done at the color level)
- * @param {$.oPalette} oPaletteObject The paletteObject to move this color into.
- * @param {int} index Need clarification from mchap
- *
- * @return: {$.oColor} The new resulting $.oColor object.
- */
-$.oColor.prototype.copyToPalette = function (oPaletteObject, index){
- var _color = this.colorObject;
-
- oPaletteObject.paletteObject.cloneColor(_color);
- var _colors = oPaletteObject.colors;
- var _duplicate = _colors.pop();
-
- if (typeof index !== 'undefined') _duplicate.index = index;
-
- return _duplicate;
-}
-
-
-/**
- * Removes the color from the palette it belongs to.
- */
-$.oColor.prototype.remove = function (){
- // TODO: find a way to work with index as more than one color can have the same id
- this.palette.paletteObject.removeColor(this.id);
-}
-
-
-/**
- * Static helper function to convert from {r:int, g:int, b:int, a:int} to a hex string in format #FFFFFFFF
- * Consider moving this to a helper function.
- * @param { obj } rgbaObject RGB object
- * @static
- * @return: { string } Hex color string in format #FFFFFFFF.
- */
-$.oColor.prototype.rgbaToHex = function (rgbaObject){
- var _hex = "#";
- _hex += rvbObject.r.toString(16)
- _hex += rvbObject.g.toString(16)
- _hex += rvbObject.b.toString(16)
- _hex += rvbObject.a.toString(16)
-
- return _hex;
-}
-
-
-/**
- * Static helper function to convert from hex string in format #FFFFFFFF to {r:int, g:int, b:int, a:int}
- * Consider moving this to a helper function.
- * @param { string } hexString RGB object
- * @static
- * @return: { obj } The hex object returned { r:int, g:int, b:int, a:int }
- */
-$.oColor.prototype.hexToRgba = function (hexString){
- var _rgba = {};
- //Needs a better fail state.
-
- _rgba.r = parseInt(hexString.slice(1,3), 16)
- _rgba.g = parseInt(hexString.slice(3,5), 16)
- _rgba.b = parseInt(hexString.slice(5,7), 16)
- _rgba.a = parseInt(hexString.slice(7,9), 16)
-
- return _rgba;
-}
-
-
-
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_column.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_column.js
deleted file mode 100644
index f73309049e..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_column.js
+++ /dev/null
@@ -1,649 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oColumn class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for the $.oColumn class.
- * @classdesc Columns are the objects that hold all the animation information of an attribute. Any animated value in Harmony is so thanks to a column linked to the attribute representing the node parameter. Columns can be added from the scene class, or are directly created when giving a non 1 value when setting an attribute.
- * @constructor
- * @param {string} uniqueName The unique name of the column.
- * @param {$.oAttribute} oAttributeObject The oAttribute thats connected to the column.
- *
- * @property {string} uniqueName The unique name of the column.
- * @property {$.oAttribute} attributeObject The attribute object that the column is attached to.
- * @example
- * // You can get the entirety of the columns in the scene by calling:
- * var doc = $.scn;
- * var allColumns = doc.columns;
- *
- * // However, to get a specific column, you can retrieve it from its linked attribute:
- *
- * var myAttribute = doc.nodes[0].attributes.position.x
- * var mycolumn = myAttribute.column;
- *
- * // once you have the column, you can do things like remove duplicates keys to simplify an animation;
- * myColumn.removeDuplicateKeys();
- *
- * // you can extract all the keys to be able to iterate over it:
- * var keyFrames = myColumn.getKeyFrames();
- *
- * for (var i in keyFrames){
- * $.log (keyFrames[i].frameNumber);
- * }
- *
- * // you can also link a given column to more than one attribute so they share the same animated values:
- *
- * doc.nodes[0].attributes.position.y.column = myColumn; // now position.x and position.y will share the same animation on the node.
- */
-$.oColumn = function( uniqueName, oAttributeObject ){
- var instance = this.$.getInstanceFromCache.call(this, uniqueName);
- if (instance) return instance;
-
- this._type = "column";
-
- this.uniqueName = uniqueName;
- this.attributeObject = oAttributeObject;
-
- this._cacheFrames = [];
-
- //Helper cache for subsequent actions.
- try{
- // fails when the column has no attribute
- if( !this.$.cache_columnToNodeAttribute ){ this.$.cache_columnToNodeAttribute = {}; }
- this.$.cache_columnToNodeAttribute[this.uniqueName] = { "node":oAttributeObject.node, "attribute": this.attributeObject, "date": (new Date()).getTime() };
- }catch(err){}
-}
-
-
-// $.oColumn Object Properties
-/**
- * The name of the column.
- * @name $.oColumn#name
- * @type {string}
- */
-Object.defineProperty( $.oColumn.prototype, 'name', {
- get : function(){
- return column.getDisplayName(this.uniqueName);
- },
-
- set : function(newName){
- var _success = column.rename(this.uniqueName, newName)
- if (_success){
- this.uniqueName = newName;
- }else{
- throw new Error("Failed to rename column "+this.uniqueName+" to "+newName+".")
- }
- }
-});
-
-
-/**
- * The type of the column. There are nine column types: drawing (DRAWING), sound (SOUND), 3D Path (3DPATH), Bezier Curve (BEZIER), Ease Curve (EASE), Expression (EXPR), Timing (TIMING) for timing columns, Quaternion path (QUATERNIONPATH) for 3D rotation and Annotation (ANNOTATION) for annotation columns.
- * @name $.oColumn#type
- * @readonly
- * @type {string}
- */
-Object.defineProperty( $.oColumn.prototype, 'type', {
- get : function(){
- return column.type(this.uniqueName)
- }
-});
-
-
-/**
- * Whether the column is selected.
- * @name $.oColumn#selected
- * @type {bool}
- */
-Object.defineProperty($.oColumn.prototype, 'selected', {
- get : function(){
- var sel_num = selection.numberOfColumnsSelected();
- for( var n=0;n
- *
- * This class is a subclass of QPushButton and all the methods from that class are available to modify this button.
- * @param {string} iconFile The icon file for the button
- * @param {string} text A text to display next to the icon
- * @param {QWidget} parent The parent QWidget for the button. Automatically set during initialisation of the menu.
- *
- */
- $.oPieButton = function(iconFile, text, parent) {
- // if icon isnt provided
- if (typeof parent === 'undefined') var parent = $.app.mainWindow
- if (typeof text === 'undefined') var text = ""
- if (typeof iconFile === 'undefined') var iconFile = specialFolders.resource+"/icons/script/qtgeneric.svg"
-
- QPushButton.call(this, text, parent);
-
- this.minimumHeight = 24;
- this.minimumWidth = 24;
-
- // set during addition to the pie Menu
- this.pieIndex = undefined;
-
- UiLoader.setSvgIcon(this, iconFile)
- this.setIconSize(new QSize(this.minimumWidth, this.minimumHeight));
- this.cursor = new QCursor(Qt.PointingHandCursor);
-
- var styleSheet = "QPushButton{ background-color: rgba(0, 0, 0, 1%); }" +
- "QPushButton:hover{ background-color: rgba(0, 200, 255, 80%); }"+
- "QToolTip{ background-color: rgba(0, 255, 255, 100%); }"
- this.setStyleSheet(styleSheet);
-
- var button = this;
- this.clicked.connect(function(){button.activate()})
-}
-$.oPieButton.prototype = Object.create(QPushButton.prototype);
-
-
-/**
- * Closes the parent menu of the button and all its subWidgets.
- */
-$.oPieButton.prototype.closeMenu = function(){
- var menu = this.parentMenu;
- while (menu && menu.parentMenu){
- menu = menu.parentMenu;
- }
- menu.closeMenu()
-}
-
-/**
- * Reimplement this function in order to activate the button and also close the menu.
- */
-$.oPieButton.prototype.activate = function(){
- // reimplement to change the behavior when the button is activated.
- // by default, will just close the menu.
- this.closeMenu();
-}
-
-
-/**
- * sets a parent and assigns it to this.parentMenu.
- * using the normal setParent from QPushButton creates a weird bug
- * where calling parent() returns a QWidget and not a $.oPieButton
- * @private
- */
-$.oPieButton.prototype.setParent = function(parent){
- QPushButton.prototype.setParent.call(this, parent);
- this.parentMenu = parent;
-}
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oToolButton class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for $.oToolButton
- * @name $.oToolButton
- * @constructor
- * @classdescription This subclass of QPushButton provides an easy way to create a button for a tool.
- * This class is a subclass of QPushButton and all the methods from that class are available to modify this button.
- * @param {string} toolName The path to the script file that will be launched
- * @param {string} scriptFunction The function name to launch from the script
- * @param {QWidget} parent The parent QWidget for the button. Automatically set during initialisation of the menu.
- *
- */
- $.oToolButton = function(toolName, iconFile, parent) {
- this.toolName = toolName;
-
- if (typeof iconFile === "undefined"){
- // find an icon for the function in the script-icons folder
- var scriptIconsFolder = new this.$.oFolder(specialFolders.resource+"/icons/drawingtool");
- var iconFiles = scriptIconsFolder.getFiles(toolName.replace(" ", "").toLowerCase() + ".*");
-
- if (iconFiles.length > 0){
- var iconFile = iconFiles[0].path;
- }else{
- // choose default toonboom "missing icon" script icon
- // currently svg icons seem unsupported?
- var iconFile = specialFolders.resource+"/icons/script/qtgeneric.svg";
- }
- }
- this.$.oPieButton.call(this, iconFile, parent);
-
- this.toolTip = this.toolName;
-}
-$.oToolButton.prototype = Object.create($.oPieButton.prototype);
-
-
-$.oToolButton.prototype.activate = function(){
- this.$.app.currentTool = this.toolName;
- this.closeMenu()
-}
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oActionButton class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for $.oActionButton
- * @name $.oActionButton
- * @constructor
- * @classdescription This subclass of QPushButton provides an easy way to create a button for a tool.
- * This class is a subclass of QPushButton and all the methods from that class are available to modify this button.
- * @param {string} actionName The action string that will be executed with Action.perform
- * @param {string} responder The responder for the action
- * @param {string} text A text for the button display.
- * @param {string} iconFile An icon path for the button.
- * @param {QWidget} parent The parent QWidget for the button. Automatically set during initialisation of the menu.
- */
- $.oActionButton = function(actionName, responder, text, iconFile, parent) {
- this.action = actionName;
- this.responder = responder;
-
- if (typeof text === 'undefined') var text = "action";
-
- if (typeof iconFile === 'undefined') var iconFile = specialFolders.resource+"/icons/old/exec.png";
-
- this.$.oPieButton.call(this, iconFile, text, parent);
- this.toolTip = this.toolName;
-}
-$.oActionButton.prototype = Object.create($.oPieButton.prototype);
-
-
-$.oActionButton.prototype.activate = function(){
- if (this.responder){
- // log("Validating : "+ this.actionName + " ? "+ Action.validate(this.actionName, this.responder).enabled)
- if (Action.validate(this.action, this.responder).enabled){
- Action.perform(this.action, this.responder);
- }
- }else{
- if (Action.Validate(this.action).enabled){
- Action.perform(this.action);
- }
- }
- view.refreshViews();
- this.closeMenu()
-}
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oColorButton class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for $.oColorButton
- * @name $.oColorButton
- * @constructor
- * @classdescription This subclass of QPushButton provides an easy way to create a button to choose a color from a palette.
- * This class is a subclass of QPushButton and all the methods from that class are available to modify this button.
- * @param {string} paletteName The name of the palette that contains the color
- * @param {string} colorName The name of the color (if more than one is present, will pick the first match)
- * @param {bool} showName Whether to display the name of the color on the button
- * @param {QWidget} parent The parent QWidget for the button. Automatically set during initialisation of the menu.
- *
- */
- $.oColorButton = function(paletteName, colorName, showName, parent) {
- this.paletteName = paletteName;
- this.colorName = colorName;
-
- if (typeof showName === "undefined") var showName = false;
-
- this.$.oPieButton.call(this, "", showName?colorName:"", parent);
-
- var palette = this.$.scn.getPaletteByName(paletteName);
- var color = palette.getColorByName(colorName);
- var colorValue = color.value
-
- var iconMap = new QPixmap(this.minimumHeight,this.minimumHeight)
- iconMap.fill(new QColor(colorValue.r, colorValue.g, colorValue.b, colorValue.a))
- var icon = new QIcon(iconMap);
-
- this.icon = icon;
-
- this.toolTip = this.paletteName + ": " + this.colorName;
-}
-$.oColorButton.prototype = Object.create($.oPieButton.prototype);
-
-
-$.oColorButton.prototype.activate = function(){
- var palette = this.$.scn.getPaletteByName(this.paletteName);
- var color = palette.getColorByName(this.colorName);
-
- this.$.scn.currentPalette = palette;
- palette.currentColor = color;
- this.closeMenu()
-}
-
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oScriptButton class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for $.oScriptButton
- * @name $.oScriptButton
- * @constructor
- * @classdescription This subclass of QPushButton provides an easy way to create a button for a widget that will launch a function from another script file.
- * The buttons created this way automatically load the icon named after the script if it finds one named like the function in a script-icons folder next to the script file.
- * It will also automatically set the callback to lanch the function from the script.
- * This class is a subclass of QPushButton and all the methods from that class are available to modify this button.
- * @param {string} scriptFile The path to the script file that will be launched
- * @param {string} scriptFunction The function name to launch from the script
- * @param {QWidget} parent The parent QWidget for the button. Automatically set during initialisation of the menu.
- */
-$.oScriptButton = function(scriptFile, scriptFunction, parent) {
- this.scriptFile = scriptFile;
- this.scriptFunction = scriptFunction;
-
- // find an icon for the function in the script-icons folder
- var scriptFile = new this.$.oFile(scriptFile)
- var scriptIconsFolder = new this.$.oFolder(scriptFile.folder.path+"/script-icons");
- var iconFiles = scriptIconsFolder.getFiles(scriptFunction+".*");
- if (iconFiles.length > 0){
- var iconFile = iconFiles[0].path;
- }else{
- // choose default toonboom "missing icon" script icon
- // currently svg icons seem unsupported?
- var iconFile = specialFolders.resource+"/icons/script/qtgeneric.svg";
- }
-
- this.$.oPieButton.call(this, iconFile, "", parent);
-
- this.toolTip = this.scriptFunction;
-}
-$.oScriptButton.prototype = Object.create($.oPieButton.prototype);
-
-$.oScriptButton.prototype.activate = function(){
- include(this.scriptFile);
- eval(this.scriptFunction)();
- this.closeMenu()
-}
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oPrefButton class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for $.oPrefButton
- * @name $.oPrefButton
- * @constructor
- * @classdescription This subclass of QPushButton provides an easy way to create a button to change a boolean preference.
- * This class is a subclass of QPushButton and all the methods from that class are available to modify this button.
- * @param {string} preferenceString The name of the preference to show/change.
- * @param {string} text A text for the button display.
- * @param {string} iconFile An icon path for the button.
- * @param {QWidget} parent The parent QWidget for the button. Automatically set during initialisation of the menu.
- */
-$.oPrefButton = function(preferenceString, text, iconFile, parent) {
- this.preferenceString = preferenceString;
-
- if (typeof iconFile === 'undefined') var iconFile = specialFolders.resource+"/icons/toolproperties/settings.svg";
- this.checkable = true;
- this.checked = preferences.getBool(preferenceString, true);
-
- $.oPieButton.call(this, iconFile, text, parent);
-
- this.toolTip = this.preferenceString;
-}
-$.oPrefButton.prototype = Object.create($.oPieButton.prototype);
-
-
-$.oPrefButton.prototype.activate = function(){
- var value = preferences.getBool(this.preferenceString, true);
- this.checked != value;
- preferences.setBool(this.preferenceString, value);
- this.closeMenu()
-}
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oStencilButton class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-// not currently working
-$.oStencilButton = function(stencilName, parent) {
- this.stencilName = stencilName;
-
- var iconFile = specialFolders.resource+"/icons/brushpreset/default.svg";
-
- $.oPieButton.call(this, iconFile, stencilName, parent);
-
- this.toolTip = stencilName;
-}
-$.oStencilButton.prototype = Object.create($.oPieButton.prototype);
-
-$.oStencilButton.prototype.activate = function(){
- this.$.app.currentStencil = this.stencilName;
-
- this.closeMenu()
-}
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_drawing.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_drawing.js
deleted file mode 100644
index 6f2bc19c0c..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_drawing.js
+++ /dev/null
@@ -1,2181 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oDrawing class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The $.oDrawing constructor.
- * @constructor
- * @classdesc The $.oDrawing Class represents a single drawing from an element.
- * @param {int} name The name of the drawing.
- * @param {$.oElement} oElementObject The element object associated to the element.
- *
- * @property {int} name The name of the drawing.
- * @property {$.oElement} element The element object associated to the element.
- */
-$.oDrawing = function (name, oElementObject) {
- this._type = "drawing";
- this._name = name;
- this.element = oElementObject;
-
- this._key = Drawing.Key({
- elementId: oElementObject.id,
- exposure: name
- });
-
- //log(JSON.stringify(this._key))
-
- this._overlay = new this.$.oArtLayer(3, this);
- this._lineArt = new this.$.oArtLayer(2, this);
- this._colorArt = new this.$.oArtLayer(1, this);
- this._underlay = new this.$.oArtLayer(0, this);
- this._artLayers = [this._underlay, this._colorArt, this._lineArt, this._overlay];
-}
-
-
-/**
- * The different types of lines ends.
- * @name $.oDrawing#LINE_END_TYPE
- * @enum
- */
-$.oDrawing.LINE_END_TYPE = {
- ROUND: 1,
- FLAT: 2,
- BEVEL: 3
-};
-
-
-/**
- * The reference to the art layers to use with oDrawing.setAsActiveDrawing()
- * @name $.oDrawing#ART_LAYER
- * @enum
- */
-$.oDrawing.ART_LAYER = {
- OVERLAY: 8,
- LINEART: 4,
- COLORART: 2,
- UNDERLAY: 1
-};
-
-
-/**
- * The name of the drawing.
- * @name $.oDrawing#name
- * @type {string}
- */
-Object.defineProperty($.oDrawing.prototype, 'name', {
- get: function () {
- return this._name;
- },
-
- set: function (newName) {
- if (this._name == newName) return;
-
- var _column = this.element.column.uniqueName;
- // this ripples recursively
-
- if (Drawing.isExists(this.element.id, newName)) this.element.getDrawingByName(newName).name = newName + "_1";
- column.renameDrawing(_column, this._name, newName);
- this._name = newName;
- }
-})
-
-
-/**
- * The internal Id used to identify drawings.
- * @name $.oDrawing#id
- * @readonly
- * @type {int}
- */
-Object.defineProperty($.oDrawing.prototype, 'id', {
- get: function () {
- return this._key.drawingId;
- }
-})
-
-
-/**
- * The folder path of the drawing on the filesystem.
- * @name $.oDrawing#path
- * @readonly
- * @type {string}
- */
-Object.defineProperty($.oDrawing.prototype, 'path', {
- get: function () {
- return fileMapper.toNativePath(Drawing.filename(this.element.id, this.name))
- }
-})
-
-
-/**
- * The drawing pivot of the drawing.
- * @name $.oDrawing#pivot
- * @type {$.oPoint}
- */
-Object.defineProperty($.oDrawing.prototype, 'pivot', {
- get: function () {
- if (this.$.batchMode){
- throw new Error("oDrawing.pivot is not available in batch mode.")
- }
-
- var _pivot = Drawing.getPivot({ "drawing": this._key });
- return new this.$.oPoint(_pivot.x, _pivot.y, 0);
- },
-
- set: function (newPivot) {
- var _pivot = { x: newPivot.x, y: newPivot.y };
- Drawing.setPivot({ drawing: this._key, pivot: _pivot });
- }
-})
-
-
-/**
- * The color Ids present on the drawing.
- * @name $.oDrawing#usedColorIds
- * @type {string[]}
- */
-Object.defineProperty($.oDrawing.prototype, 'usedColorIds', {
- get: function () {
- var _colorIds = DrawingTools.getDrawingUsedColors(this._key);
- return _colorIds;
- }
-})
-
-
-/**
- * The bounding box of the drawing, in drawing space coordinates. (null if the drawing is empty.)
- * @name $.oDrawing#boundingBox
- * @readonly
- * @type {$.oBox}
- */
-Object.defineProperty($.oDrawing.prototype, 'boundingBox', {
- get: function () {
- if (this.$.batchMode){
- throw new Error("oDrawing.boudingBox is not available in batch mode.")
- }
-
- var _box = new this.$.oBox()
- for (var i in this.artLayers) {
- var _layerBox = this.artLayers[i].boundingBox
- if (_layerBox) _box.include(_layerBox)
- }
-
- return _box
- }
-})
-
-
-/**
- * Access the underlay art layer's content through this object.
- * @name $.oDrawing#underlay
- * @readonly
- * @type {$.oArtLayer}
- */
-Object.defineProperty($.oDrawing.prototype, 'underlay', {
- get: function () {
- return this._underlay;
- }
-})
-
-
-/**
- * Access the color art layer's content through this object.
- * @name $.oDrawing#colorArt
- * @readonly
- * @type {$.oArtLayer}
- */
-Object.defineProperty($.oDrawing.prototype, 'colorArt', {
- get: function () {
- return this._colorArt;
- }
-})
-
-
-/**
- * Access the line art layer's content through this object.
- * @name $.oDrawing#lineArt
- * @readonly
- * @type {$.oArtLayer}
- */
-Object.defineProperty($.oDrawing.prototype, 'lineArt', {
- get: function () {
- return this._lineArt;
- }
-})
-
-
-/**
- * Access the overlay art layer's content through this object.
- * @name $.oDrawing#overlay
- * @readonly
- * @type {$.oArtLayer}
- */
-Object.defineProperty($.oDrawing.prototype, 'overlay', {
- get: function () {
- return this._overlay;
- }
-})
-
-
-/**
- * The list of artLayers of this drawing.
- * @name $.oDrawing#artLayers
- * @readonly
- * @type {$.oArtLayer[]}
- */
-Object.defineProperty($.oDrawing.prototype, 'artLayers', {
- get: function () {
- return this._artLayers;
- }
-})
-
-
-
-/**
- * the shapes contained amongst all artLayers of this drawing.
- * @name $.oDrawing#shapes
- * @readonly
- * @type {$.oShape[]}
- */
-Object.defineProperty($.oDrawing.prototype, 'shapes', {
- get: function () {
- var _shapes = [];
- for (var i in this.artLayers) {
- _shapes = _shapes.concat(this.artLayers[i].shapes);
- }
-
- return _shapes;
- }
-})
-
-
-/**
- * the strokes contained amongst all artLayers of this drawing.
- * @name $.oDrawing#strokes
- * @readonly
- * @type {$.oStroke[]}
- */
-Object.defineProperty($.oDrawing.prototype, 'strokes', {
- get: function () {
- var _strokes = [];
- for (var i in this.artLayers) {
- _strokes = _strokes.concat(this.artLayers[i].strokes);
- }
-
- return _strokes;
- }
-})
-
-
-/**
- * The contours contained amongst all the shapes of the artLayer.
- * @name $.oDrawing#contours
- * @type {$.oContour[]}
- */
- Object.defineProperty($.oDrawing.prototype, 'contours', {
- get: function () {
- var _contours = []
-
- for (var i in this.artLayers) {
- _contours = _contours.concat(this.artLayers[i].contours)
- }
-
- return _contours
- }
-})
-
-
-
-/**
- * the currently active art layer of this drawing.
- * @name $.oDrawing#activeArtLayer
- * @type {$.oArtLayer}
- */
-Object.defineProperty($.oDrawing.prototype, 'activeArtLayer', {
- get: function () {
- var settings = Tools.getToolSettings();
- if (!settings.currentDrawing) return null;
-
- return this.artLayers[settings.activeArt]
- },
- set: function (newArtLayer) {
- var layers = this.$.oDrawing.ART_LAYER
- var index = layers[newArtLayer.name.toUpperCase()]
- this.setAsActiveDrawing(index);
- }
-})
-
-
-/**
- * the selected shapes on this drawing
- * @name $.oDrawing#selectedShapes
- * @type {$.oShape}
- */
-Object.defineProperty($.oDrawing.prototype, 'selectedShapes', {
- get: function () {
- var _selectedShapes = [];
- for (var i in this.artLayers) {
- _selectedShapes = _selectedShapes.concat(this.artLayers[i].selectedShapes);
- }
-
- return _selectedShapes;
- }
-})
-
-
-/**
- * the selected shapes on this drawing
- * @name $.oDrawing#selectedStrokes
- * @type {$.oShape}
- */
-Object.defineProperty($.oDrawing.prototype, 'selectedStrokes', {
- get: function () {
- var _selectedStrokes = [];
- for (var i in this.artLayers) {
- _selectedStrokes = _selectedStrokes.concat(this.artLayers[i].selectedStrokes);
- }
-
- return _selectedStrokes;
- }
-})
-
-
-/**
- * the selected shapes on this drawing
- * @name $.oDrawing#selectedContours
- * @type {$.oShape}
- */
-Object.defineProperty($.oDrawing.prototype, 'selectedContours', {
- get: function () {
- var _selectedContours = [];
- for (var i in this.artLayers) {
- _selectedContours = _selectedContours.concat(this.artLayers[i].selectedContours);
- }
-
- return _selectedContours;
- }
-})
-
-
-/**
- * all the data from this drawing. For internal use.
- * @name $.oDrawing#drawingData
- * @type {Object}
- * @readonly
- * @private
- */
-Object.defineProperty($.oDrawing.prototype, 'drawingData', {
- get: function () {
- var _data = Drawing.query.getData({drawing: this._key});
- if (!_data) throw new Error("Data unavailable for drawing "+this.name)
- return _data;
- }
-})
-
-
-
-
-// $.oDrawing Class methods
-
-/**
- * Import a given file into an existing drawing.
- * @param {$.oFile} file The path to the file
- * @param {bool} [convertToTvg=false] Whether to convert the bitmap to the tvg format (this doesn't vectorise the drawing)
- *
- * @return { $.oFile } the oFile object pointing to the drawing file after being it has been imported into the element folder.
- */
-$.oDrawing.prototype.importBitmap = function (file, convertToTvg) {
- var _path = new this.$.oFile(this.path);
- if (!(file instanceof this.$.oFile)) file = new this.$.oFile(file);
- if (!file.exists) throw new Error ("Can't import bitmap "+file.path+", file doesn't exist");
-
- if (convertToTvg && file.extension.toLowerCase() != "tvg"){
- // use utransform binary to perform conversion
- var _bin = specialFolders.bin + "/utransform";
-
- var tempFolder = this.$.scn.tempFolder;
-
- var _convertedFilePath = tempFolder.path + "/" + file.name + ".tvg";
- var _convertProcess = new this.$.oProcess(_bin, ["-outformat", "TVG", "-debug", "-scale", "1", "-bboxtvgincrease","0" , "-outfile", _convertedFilePath, file.path]);
- log(_convertProcess.execute())
-
- var convertedFile = new this.$.oFile(_convertedFilePath);
- if (!convertedFile.exists) throw new Error ("Converting " + file.path + " to TVG has failed.");
-
- file = convertedFile;
- }
-
- return file.copy(_path.folder, _path.name, true);
-}
-
-
-/**
- * @returns {int[]} The frame numbers at which this drawing appears.
- */
-$.oDrawing.prototype.getVisibleFrames = function () {
- var _element = this.element;
- var _column = _element.column;
-
- if (!_column) {
- this.$.debug("Column missing: can't get visible frames for drawing " + this.name + " of element " + _element.name, this.$.DEBUG_LEVEL.ERROR);
- return null;
- }
-
- var _frames = [];
- var _keys = _column.keyframes;
- for (var i in _keys) {
- if (_keys[i].value == this.name) _frames.push(_keys[i].frameNumber);
- }
-
- return _frames;
-}
-
-
-/**
- * Remove the drawing from the element.
- */
-$.oDrawing.prototype.remove = function () {
- var _element = this.element;
- var _column = _element.column;
-
- if (!_column) {
- throw new Error ("Column missing: impossible to delete drawing " + this.name + " of element " + _element.name);
- }
-
- var _frames = _column.frames;
- var _lastFrame = _frames.pop();
-
- var _thisDrawing = this;
-
- // we have to expose the drawing on the column to delete it. Exposing at the last frame...
- this.$.debug("deleting drawing " + _thisDrawing + " from element " + _element.name, this.$.DEBUG_LEVEL.LOG);
- var _lastDrawing = _lastFrame.value;
- var _keyFrame = _lastFrame.isKeyFrame;
- _lastFrame.value = _thisDrawing;
-
- column.deleteDrawingAt(_column.uniqueName, _lastFrame.frameNumber);
-
- // resetting the last frame
- _lastFrame.value = _lastDrawing;
- _lastFrame.isKeyFrame = _keyFrame;
-}
-
-
-
-/**
- * refresh the preview of the drawing.
- */
-$.oDrawing.prototype.refreshPreview = function () {
- if (this.element.format == "TVG") return;
-
- var _path = new this.$.oFile(this.path);
- var _elementFolder = _path.folder;
- var _previewFiles = _elementFolder.getFiles(_path.name + "-*.tga");
-
- for (var i in _previewFiles) {
- _previewFiles[i].remove();
- }
-}
-
-
-/**
-* Change the currently active drawing. Can specify an art Layer
-* Doesn't work in batch mode.
-* @param {oDrawing.ART_LAYER} [artLayer] activate the given art layer
-* @return {bool} success of setting the drawing as current
-*/
-$.oDrawing.prototype.setAsActiveDrawing = function (artLayer) {
- if (this.$.batchMode) {
- this.$.debug("Setting as active drawing not available in batch mode", this.$.DEBUG_LEVEL.ERROR);
- return false;
- }
-
- var _column = this.element.column;
- if (!_column) {
- this.$.debug("Column missing: impossible to set as active drawing " + this.name + " of element " + _element.name, this.$.DEBUG_LEVEL.ERROR);
- return false;
- }
-
- var _frame = this.getVisibleFrames();
- if (_frame.length == 0) {
- this.$.debug("Drawing not exposed: impossible to set as active drawing " + this.name + " of element " + _element.name, this.$.DEBUG_LEVEL.ERROR);
- return false;
- }
-
- DrawingTools.setCurrentDrawingFromColumnName(_column.uniqueName, _frame[0]);
-
- if (artLayer) DrawingTools.setCurrentArt(artLayer);
-
- return true;
-}
-
-
-/**
- * Duplicates the drawing to the given frame, and renames the drawing with the given name.
- * @param {int} [frame] the frame at which to create the drawing. By default, the current frame.
- * @param {string} [newName] A new name for the drawing. By default, the name will be the number of the frame.
- * @returns {$.oDrawing} the newly created drawing
- */
-$.oDrawing.prototype.duplicate = function(frame, newName){
- var _element = this.element
- if (typeof frame ==='undefined') var frame = this.$.scn.currentFrame;
- if (typeof newName === 'undefined') var newName = frame;
- var newDrawing = _element.addDrawing(frame, newName, this.path)
- return newDrawing;
-}
-
-/**
- * Replaces a color Id present on the drawing by another.
- * @param {string} currentId
- * @param {string} newId
- */
-$.oDrawing.prototype.replaceColorId = function (currentId, newId){
- DrawingTools.recolorDrawing( this._key, [{from:currentId, to:newId}]);
-}
-
-
-/**
- * Copies the contents of the Drawing into the clipboard
- * @param {oDrawing.ART_LAYER} [artLayer] Specify to only copy the contents of the specified artLayer
- */
-$.oDrawing.prototype.copyContents = function (artLayer) {
-
- var _current = this.setAsActiveDrawing(artLayer);
- if (!_current) {
- this.$.debug("Impossible to copy contents of drawing " + this.name + " of element " + _element.name + ", the drawing cannot be set as active.", this.DEBUG_LEVEL.ERROR);
- return;
- }
- ToolProperties.setApplyAllArts(!artLayer);
- Action.perform("deselect()", "cameraView");
- Action.perform("onActionChooseSelectTool()");
- Action.perform("selectAll()", "cameraView");
-
- if (Action.validate("copy()", "cameraView").enabled) Action.perform("copy()", "cameraView");
-}
-
-
-/**
- * Pastes the contents of the clipboard into the Drawing
- * @param {oDrawing.ART_LAYER} [artLayer] Specify to only paste the contents onto the specified artLayer
- */
-$.oDrawing.prototype.pasteContents = function (artLayer) {
-
- var _current = this.setAsActiveDrawing(artLayer);
- if (!_current) {
- this.$.debug("Impossible to copy contents of drawing " + this.name + " of element " + _element.name + ", the drawing cannot be set as active.", this.DEBUG_LEVEL.ERROR);
- return;
- }
- ToolProperties.setApplyAllArts(!artLayer);
- Action.perform("deselect()", "cameraView");
- Action.perform("onActionChooseSelectTool()");
- if (Action.validate("paste()", "cameraView").enabled) Action.perform("paste()", "cameraView");
-}
-
-
-/**
-* Converts the line ends of the Drawing object to the defined type.
-* Doesn't work in batch mode. This function modifies the selection.
-*
-* @param {oDrawing.LINE_END_TYPE} endType the type of line ends to set.
-* @param {oDrawing.ART_LAYER} [artLayer] only apply to provided art Layer.
-*/
-$.oDrawing.prototype.setLineEnds = function (endType, artLayer) {
- if (this.$.batchMode) {
- this.$.debug("setting line ends not available in batch mode", this.DEBUG_LEVEL.ERROR);
- return;
- }
-
- var _current = this.setAsActiveDrawing(artLayer);
- if (!_current) {
- this.$.debug("Impossible to change line ends on drawing " + this.name + " of element " + _element.name + ", the drawing cannot be set as active.", this.DEBUG_LEVEL.ERROR);
- return;
- }
-
- // apply to all arts only if art layer not specified
- ToolProperties.setApplyAllArts(!artLayer);
- Action.perform("deselect()", "cameraView");
- Action.perform("onActionChooseSelectTool()");
- Action.perform("selectAll()", "cameraView");
-
- var widget = $.getHarmonyUIWidget("pencilShape", "frameBrushParameters");
- if (widget) {
- widget.onChangeTipStart(endType);
- widget.onChangeTipEnd(endType);
- widget.onChangeJoin(endType);
- }
- Action.perform("deselect()", "cameraView");
-}
-
-
-/**
-* Converts the Drawing object to a string of the drawing name.
-* @return: { string } The name of the drawing.
-*/
-$.oDrawing.prototype.toString = function () {
- return this.name;
-}
-
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oArtLayer class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for the $.oArtLayer class.
- * @constructor
- * @classdesc $.oArtLayer represents art layers, as described by the artlayer toolbar. Access the drawing contents of the layers through this class.
- * @param {int} index The artLayerIndex (0: underlay, 1: line art, 2: color art, 3:overlay).
- * @param {$.oDrawing} oDrawingObject The oDrawing this layer belongs to.
- */
-$.oArtLayer = function (index, oDrawingObject) {
- this._layerIndex = index;
- this._drawing = oDrawingObject;
- //log(this._drawing._key)
- this._key = { "drawing": this._drawing._key, "art": index }
-}
-
-
-/**
- * The name of the artLayer (lineArt, colorArt, etc)
- * @name $.oArtLayer#name
- * @type {string}
- */
-Object.defineProperty($.oArtLayer.prototype, 'name', {
- get: function(){
- var names = ["underlay", "colorArt", "lineArt", "overlay"];
- return names[this._layerIndex];
- }
-})
-
-
-/**
- * The shapes contained on the artLayer.
- * @name $.oArtLayer#shapes
- * @type {$.oShape[]}
- */
-Object.defineProperty($.oArtLayer.prototype, 'shapes', {
- get: function () {
- if (!this.hasOwnProperty("_shapes")){
- var _shapesNum = Drawing.query.getNumberOfLayers(this._key);
- var _shapes = [];
- for (var i = 0; i < _shapesNum; i++) {
- _shapes.push(this.getShapeByIndex(i));
- }
- this._shapes = _shapes;
- }
- return this._shapes;
- }
-})
-
-
-/**
- * The strokes contained amongst all the shapes of the artLayer.
- * @name $.oArtLayer#strokes
- * @type {$.oStroke[]}
- */
-Object.defineProperty($.oArtLayer.prototype, 'strokes', {
- get: function () {
- var _strokes = [];
-
- var _shapes = this.shapes;
- for (var i in _shapes) {
- _strokes = _strokes.concat(_shapes[i].strokes);
- }
-
- return _strokes;
- }
-})
-
-
-/**
- * The contours contained amongst all the shapes of the artLayer.
- * @name $.oArtLayer#contours
- * @type {$.oContour[]}
- */
-Object.defineProperty($.oArtLayer.prototype, 'contours', {
- get: function () {
- var _contours = [];
-
- var _shapes = this.shapes;
- for (var i in _shapes) {
- _contours = _contours.concat(_shapes[i].contours);
- }
-
- return _contours;
- }
-})
-
-
-/**
- * The bounds of the layer, in drawing space coordinates. (null if the drawing is empty.)
- * @name $.oArtLayer#boundingBox
- * @type {$.oBox}
- */
-Object.defineProperty($.oArtLayer.prototype, 'boundingBox', {
- get: function () {
- var _box = Drawing.query.getBox(this._key);
- if (_box.empty) return null;
-
- var _boundingBox = new $.oBox(_box.x0, _box.y0, _box.x1, _box.y1);
- return _boundingBox;
- }
-})
-
-
-/**
- * the currently selected shapes on the ArtLayer.
- * @name $.oArtLayer#selectedShapes
- * @type {$.oShape[]}
- */
-Object.defineProperty($.oArtLayer.prototype, 'selectedShapes', {
- get: function () {
- var _shapes = Drawing.selection.get(this._key).selectedLayers;
- var _artLayer = this;
- return _shapes.map(function (x) { return _artLayer.getShapeByIndex(x) });
- }
-})
-
-
-
-/**
- * the currently selected strokes on the ArtLayer.
- * @name $.oArtLayer#selectedStrokes
- * @type {$.oStroke[]}
- */
-Object.defineProperty($.oArtLayer.prototype, 'selectedStrokes', {
- get: function () {
- var _shapes = this.selectedShapes;
- var _strokes = [];
-
- for (var i in _shapes) {
- _strokes = _strokes.concat(_shapes[i].strokes);
- }
-
- return _strokes;
- }
-})
-
-
-/**
- * the currently selected contours on the ArtLayer.
- * @name $.oArtLayer#selectedContours
- * @type {$.oContour[]}
- */
-Object.defineProperty($.oArtLayer.prototype, 'selectedContours', {
- get: function () {
- var _shapes = this.selectedShapes;
- var _contours = [];
-
- for (var i in _shapes) {
- _contours = _contours.concat(_shapes[i].contours);
- }
-
- return _contours;
- }
-})
-
-
-
-/**
- * all the data from this artLayer. For internal use.
- * @name $.oArtLayer#drawingData
- * @type {$.oStroke[]}
- * @readonly
- * @private
- */
-Object.defineProperty($.oArtLayer.prototype, 'drawingData', {
- get: function () {
- var _data = this._drawing.drawingData
- for (var i in _data.arts){
- if (_data.arts[i].art == this._layerIndex) {
- return _data.arts[i];
- }
- }
-
- // in case of empty layerArt, return a default object
- return {art:this._layerIndex, artName:this.name, layers:[]};
- }
-})
-
-
-/**
- * Draws a circle on the artLayer.
- * @param {$.oPoint} center The center of the circle
- * @param {float} radius The radius of the circle
- * @param {$.oLineStyle} [lineStyle] Provide a $.oLineStyle object to specify how the line will look
- * @param {object} [fillStyle=null] The fill information to fill the circle with.
- * @returns {$.oShape} the created shape containing the circle.
-*/
-$.oArtLayer.prototype.drawCircle = function(center, radius, lineStyle, fillStyle){
- if (typeof fillStyle === 'undefined') var fillStyle = null;
-
- var arg = {
- x: center.x,
- y: center.y,
- radius: radius
- };
- var _path = Drawing.geometry.createCircle(arg);
-
- return this.drawShape(_path, lineStyle, fillStyle);
-}
-
-/**
- * Draws the given path on the artLayer.
- * @param {$.oVertex[]} path an array of $.oVertex objects that describe a path.
- * @param {$.oLineStyle} [lineStyle] the line style to draw with. (By default, will use the current stencil selection)
- * @param {$.oFillStyle} [fillStyle] the fill information for the path. (By default, will use the current palette selection)
- * @param {bool} [polygon] Whether bezier handles should be created for the points in the path (ignores "onCurve" properties of oVertex from path)
- * @param {bool} [createUnderneath] Whether the new shape will appear on top or underneath the contents of the layer. (not working yet)
- */
-$.oArtLayer.prototype.drawShape = function(path, lineStyle, fillStyle, polygon, createUnderneath){
- if (typeof fillStyle === 'undefined') var fillStyle = new this.$.oFillStyle();
- if (typeof lineStyle === 'undefined') var lineStyle = new this.$.oLineStyle();
- if (typeof polygon === 'undefined') var polygon = false;
- if (typeof createUnderneath === 'undefined') var createUnderneath = false;
-
- var index = this.shapes.length;
-
- var _lineStyle = {};
-
- if (lineStyle){
- _lineStyle.pencilColorId = lineStyle.colorId;
- _lineStyle.thickness = {
- "minThickness": lineStyle.minThickness,
- "maxThickness": lineStyle.maxThickness,
- "thicknessPath": 0
- };
- }
-
- if (fillStyle) _lineStyle.shaderLeft = 0;
- if (polygon) _lineStyle.polygon = true;
- _lineStyle.under = createUnderneath;
- _lineStyle.stroke = !!lineStyle;
-
- var strokeDesciption = _lineStyle;
- strokeDesciption.path = path;
- strokeDesciption.closed = !!fillStyle;
-
- var shapeDescription = {}
- if (fillStyle) shapeDescription.shaders = [{ colorId : fillStyle.colorId }]
- shapeDescription.strokes = [strokeDesciption]
- if (lineStyle) shapeDescription.thicknessPaths = [lineStyle.stencil.thicknessPath]
-
- var config = {
- label: "draw shape",
- drawing: this._key.drawing,
- art: this._key.art,
- layers: [shapeDescription]
- };
-
-
- var layers = DrawingTools.createLayers(config);
-
- var newShape = this.getShapeByIndex(index);
- this._shapes.push(newShape);
- return newShape;
-};
-
-
-/**
- * Draws the given path on the artLayer.
- * @param {$.oVertex[]} path an array of $.oVertex objects that describe a path.
- * @param {$.oLineStyle} lineStyle the line style to draw with.
- * @returns {$.oShape} the shape containing the added stroke.
- */
-$.oArtLayer.prototype.drawStroke = function(path, lineStyle){
- return this.drawShape(path, lineStyle, null);
-};
-
-
-/**
- * Draws the given path on the artLayer as a contour.
- * @param {$.oVertex[]} path an array of $.oVertex objects that describe a path.
- * @param {$.oFillStyle} fillStyle the fill style to draw with.
- * @returns {$.oShape} the shape newly created from the path.
- */
-$.oArtLayer.prototype.drawContour = function(path, fillStyle){
- return this.drawShape(path, null, fillStyle);
-};
-
-
-/**
- * Draws a rectangle on the artLayer.
- * @param {float} x the x coordinate of the top left corner.
- * @param {float} y the y coordinate of the top left corner.
- * @param {float} width the width of the rectangle.
- * @param {float} height the height of the rectangle.
- * @param {$.oLineStyle} lineStyle a line style to use for the rectangle stroke.
- * @param {$.oFillStyle} fillStyle a fill style to use for the rectangle fill.
- * @returns {$.oShape} the shape containing the added stroke.
- */
-$.oArtLayer.prototype.drawRectangle = function(x, y, width, height, lineStyle, fillStyle){
- if (typeof fillStyle === 'undefined') var fillStyle = null;
-
- var path = [
- {x:x,y:y,onCurve:true},
- {x:x+width,y:y,onCurve:true},
- {x:x+width,y:y-height,onCurve:true},
- {x:x,y:y-height,onCurve:true},
- {x:x,y:y,onCurve:true}
- ];
-
- return this.drawShape(path, lineStyle, fillStyle);
-}
-
-
-
-/**
- * Draws a line on the artLayer
- * @param {$.oPoint} startPoint
- * @param {$.oPoint} endPoint
- * @param {$.oLineStyle} lineStyle
- * @returns {$.oShape} the shape containing the added line.
- */
-$.oArtLayer.prototype.drawLine = function(startPoint, endPoint, lineStyle){
- var path = [{x:startPoint.x,y:startPoint.y,onCurve:true},{x:endPoint.x,y:endPoint.y,onCurve:true}];
-
- return this.drawShape(path, lineStyle, null);
-}
-
-
-/**
- * Removes the contents of the art layer.
- */
-$.oArtLayer.prototype.clear = function(){
- var _shapes = this.shapes;
- this.$.debug(_shapes, this.$.DEBUG_LEVEL.DEBUG);
- for (var i=_shapes.length - 1; i>=0; i--){
- _shapes[i].remove();
- }
-}
-
-
-/**
- * get a shape from the artLayer by its index
- * @param {int} index
- *
- * @return {$.oShape}
- */
-$.oArtLayer.prototype.getShapeByIndex = function (index) {
- return new this.$.oShape(index, this);
-}
-
-
-/**
- * @private
- */
-$.oArtLayer.prototype.toString = function(){
- return "Object $.oArtLayer ["+this.name+"]";
-}
-
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oLineStyle class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for the $.oLineStyle class.
- * @constructor
- * @classdesc
- * The $.oLineStyle class describes a lineStyle used to describe the appearance of strokes and perform drawing operations.
- * Initializing a $.oLineStyle without any parameters attempts to get the current pencil thickness settings and color.
- * @param {string} colorId the color Id to paint the line with.
- * @param {$.oStencil} stencil the stencil object representing the thickness keys
- */
-$.oLineStyle = function (colorId, stencil) {
- if (typeof minThickness === 'undefined') var minThickness = PenstyleManager.getCurrentPenstyleMinimumSize();
- if (typeof maxThickness === 'undefined') {
- var maxThickness = PenstyleManager.getCurrentPenstyleMaximumSize();
- if (!maxThickness && !minThickness) maxThickness = 1;
- }
- if (typeof stencil === 'undefined') {
- var stencil = new $.oStencil("", "pencil", {maxThickness:maxThickness, minThickness:minThickness, keys:[]});
- }
-
- if (typeof colorId === 'undefined'){
- var _palette = this.$.scn.selectedPalette;
- if (_palette) {
- var _color = this.$.scn.selectedPalette.currentColor;
- if (_color) {
- var colorId = _color.id;
- } else{
- var colorId = "0000000000000003";
- }
- }
- }
-
- this.colorId = colorId;
- this.stencil = stencil;
-
- // this.$.debug(colorId+" "+minThickness+" "+maxThickness+" "+stencil, this.$.DEBUG_LEVEL.DEBUG)
-}
-
-
-/**
- * The minimum thickness of the line using this lineStyle
- * @name $.oLineStyle#minThickness
- * @type {float}
- */
-Object.defineProperty($.oLineStyle.prototype, "minThickness", {
- get: function(){
- return this.stencil.minThickness;
- },
-
- set: function(newMinThickness){
- this.stencil.minThickness = newMinThickness;
- }
-})
-
-
-/**
- * The minimum thickness of the line using this lineStyle
- * @name $.oLineStyle#maxThickness
- * @type {float}
- */
-Object.defineProperty($.oLineStyle.prototype, "maxThickness", {
- get: function(){
- return this.stencil.maxThickness;
- },
-
- set: function(newMaxThickness){
- this.stencil.maxThickness = newMaxThickness;
- }
-})
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oShape class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for the $.oShape class. These types of objects are not supported for harmony versions < 16
- * @constructor
- * @classdesc $.oShape represents shapes drawn on the art layer. Strokes, colors, line styles, can be accessed through this class.
Warning, Toonboom stores strokes by index, so stroke objects may become obsolete when modifying the contents of the drawing.
- * @param {int} index The index of the shape on the artLayer
- * @param {$.oArtLayer} oArtLayerObject The oArtLayer this layer belongs to.
- *
- * @property {int} index the index of the shape in the parent artLayer
- * @property {$.oArtLayer} artLayer the art layer that contains this shape
- */
-$.oShape = function (index, oArtLayerObject) {
- this.index = index;
- this.artLayer = oArtLayerObject;
-}
-
-
-/**
- * the toonboom key object identifying this shape.
- * @name $.oShape#_key
- * @type {object}
- * @private
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, '_key', {
- get: function () {
- var _key = this.artLayer._key;
- return { drawing: _key.drawing, art: _key.art, layers: [this.index] };
- }
-})
-
-
-/**
- * The underlying data describing the shape.
- * @name $.oShape#_data
- * @type {$.oShape[]}
- * @readonly
- * @private
- */
-Object.defineProperty($.oShape.prototype, '_data', {
- get: function () {
- return this.artLayer.drawingData.layers[this.index];
- }
-})
-
-
-/**
- * The strokes making up the shape.
- * @name $.oShape#strokes
- * @type {$.oShape[]}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'strokes', {
- get: function () {
- if (!this.hasOwnProperty("_strokes")) {
- var _data = this._data;
-
- if (!_data.hasOwnProperty("strokes")) return [];
-
- var _shape = this;
- var _strokes = _data.strokes.map(function (x, idx) { return new _shape.$.oStroke(idx, x, _shape) })
- this._strokes = _strokes;
- }
- return this._strokes;
- }
-})
-
-
-/**
- * The contours (invisible strokes that can delimit colored areas) making up the shape.
- * @name $.oShape#contours
- * @type {$.oContour[]}
- * @readonly
- */
- Object.defineProperty($.oShape.prototype, 'contours', {
- get: function () {
- if (!this.hasOwnProperty("_contours")) {
- var _data = this._data
-
- if (!_data.hasOwnProperty("contours")) return [];
-
- var _shape = this;
- var _contours = _data.contours.map(function (x, idx) { return new this.$.oContour(idx, x, _shape) })
- this._contours = _contours;
- }
- return this._contours;
- }
-})
-
-
-/**
- * The fills styles contained in the shape
- * @name $.oShape#fills
- * @type {$.oFillStyle[]}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'fills', {
- get: function () {
- if (!this.hasOwnProperty("_fills")) {
- var _data = this._data
-
- if (!_data.hasOwnProperty("contours")) return [];
-
- var _fills = _data.contours.map(function (x) { return new this.$.oFillStyle(x.colorId, x.matrix) })
- this._fills = _fills;
- }
- return this._fills;
- }
-})
-
-/**
- * The stencils used by the shape.
- * @name $.oShape#stencils
- * @type {$.oStencil[]}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'stencils', {
- get: function () {
- if (!this.hasOwnProperty("_stencils")) {
- var _data = this._data;
- var _shape = this;
- var _stencils = _data.thicknessPaths.map(function (x) { return new _shape.$.oStencil("", "pencil", x) })
- this._stencils = _stencils;
- }
- return this._stencils;
- }
-})
-
-
-/**
- * The bounding box of the shape.
- * @name $.oShape#bounds
- * @type {$.oBox}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'bounds', {
- get: function () {
- var _bounds = new this.$.oBox();
- var _contours = this.contours;
- var _strokes = this.strokes;
-
- for (var i in _contours){
- _bounds.include(_contours[i].bounds);
- }
-
- for (var i in _strokes){
- _bounds.include(_strokes[i].bounds);
- }
-
- return _bounds;
- }
-})
-
-/**
- * The x coordinate of the shape.
- * @name $.oShape#x
- * @type {float}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'x', {
- get: function () {
- return this.bounds.left;
- }
-})
-
-
-/**
- * The x coordinate of the shape.
- * @name $.oShape#x
- * @type {float}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'y', {
- get: function () {
- return this.bounds.top;
- }
-})
-
-
-/**
- * The width of the shape.
- * @name $.oShape#width
- * @type {float}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'width', {
- get: function () {
- return this.bounds.width;
- }
-})
-
-
-/**
- * The height coordinate of the shape.
- * @name $.oShape#height
- * @type {float}
- * @readonly
- */
-Object.defineProperty($.oShape.prototype, 'height', {
- get: function () {
- return this.bounds.height;
- }
-})
-
-
-/**
- * Retrieve and set the selected status of each shape.
- * @name $.oShape#selected
- * @type {bool}
- */
-Object.defineProperty($.oShape.prototype, 'selected', {
- get: function () {
- var _selection = this.artLayer._selectedShapes;
- var _indices = _selection.map(function (x) { return x.index });
- return (_indices.indexOf(this.index) != -1)
- },
- set: function (newSelectedState) {
- var _key = this.artLayer._key;
-
- var currentSelection = Drawing.selection.get(_key);
- var config = {drawing:_key.drawing, art:_key.art};
-
- if (newSelectedState){
- // adding elements to selection
- config.selectedLayers = currentSelection.selectedLayers.concat([this.index]);
- config.selectedStrokes = currentSelection.selectedStrokes;
- }else{
- config.selectedLayers = currentSelection.selectedLayers;
- config.selectedStrokes = currentSelection.selectedStrokes;
-
- // remove current element from selection before setting again
- for (var i=config.selectedLayers.length-1; i>=0; i--){
- if (config.selectedLayers[i] == this.index) config.selectedLayers.splice(i, 1);
- }
- for (var i=config.selectedStrokes.length-1; i>=0; i--){
- if (config.selectedStrokes[i].layer == this.index) config.selectedStrokes.splice(i, 1);
- }
- }
-
- Drawing.selection.set(config);
- }
-})
-
-
-/**
- * Deletes the shape from its artlayer.
- * Updates the index of all other oShapes on the artLayer in order to
- * keep tracking all of them without having to query the drawing again.
- */
-$.oShape.prototype.remove = function(){
- DrawingTools.deleteLayers(this._key);
-
- // update shapes list for this artLayer
- var shapes = this.artLayer.shapes
- for (var i in shapes){
- if (i > this.index){
- shapes[i].index--;
- }
- }
- shapes.splice(this.index, 1);
-}
-
-
-/**
- * Deletes the shape from its artlayer.
- * Warning : Because shapes are referenced by index, deleting a shape
- * that isn't at the end of the list of shapes from this layer
- * might render other shape objects from this layer obsolete.
- * Get them again with artlayer.shapes.
- * @deprecated use oShape.remove instead
- */
-$.oShape.prototype.deleteShape = function(){
- this.remove();
-}
-
-
-/**
- * Gets a stroke from this shape by its index
- * @param {int} index
- *
- * @returns {$.oStroke}
- */
-$.oShape.prototype.getStrokeByIndex = function (index) {
- return this.strokes[index];
-}
-
-
-$.oShape.prototype.toString = function (){
- return "
- * Initializing a $.oFillStyle without any parameters attempts to get the current color id.
- * @param {string} colorId the color Id to paint the line with.
- * @param {object} fillMatrix
- */
-$.oFillStyle = function (colorId, fillMatrix) {
- if (typeof fillMatrix === 'undefined') var fillMatrix = {
- "ox": 1,
- "oy": 1,
- "xx": 1,
- "xy": 0,
- "yx": 0,
- "yy": 1
- }
-
- if (typeof colorId === 'undefined'){
- var _palette = this.$.scn.selectedPalette;
- if (_palette) {
- var _color = this.$.scn.selectedPalette.currentColor;
- if (_color) {
- var colorId = _color.id;
- } else{
- var colorId = "0000000000000003";
- }
- }
- }
-
- this.colorId = colorId;
- this.fillMatrix = fillMatrix;
-
- this.$.log("new fill created: " + colorId + " " + JSON.stringify(this.fillMatrix))
-}
-
-
-$.oFillStyle.prototype.toString = function(){
- return "
- * $.oContour is a subclass of $.oSroke and shares its properties, but represents a stroke with a fill.
- * @extends $.oStroke
- * @param {int} index The index of the contour in the shape.
- * @param {object} contourObject The stroke object descriptor that contains the info for the stroke
- * @param {oShape} oShapeObject The parent oShape
- *
- * @property {int} index the index of the stroke in the parent shape
- * @property {$.oShape} shape the shape that contains this stroke
- * @property {$.oArtLayer} artLayer the art layer that contains this stroke
- */
-$.oContour = function (index, contourObject, oShapeObject) {
- this.$.oStroke.call(this, index, contourObject, oShapeObject)
-}
-$.oContour.prototype = Object.create($.oStroke.prototype)
-
-
-/**
- * The information about the fill of this contour
- * @name $.oContour#fill
- * @type {$.oFillStyle}
- */
-Object.defineProperty($.oContour.prototype, "fill", {
- get: function () {
- var _data = this._data;
- return new this.$.oFillStyle(_data.colorId, _data.matrix);
- }
-})
-
-
-/**
- * The bounding box of the contour.
- * @name $.oContour#bounds
- * @type {$.oBox}
- * @readonly
- */
- Object.defineProperty($.oContour.prototype, 'bounds', {
- get: function () {
- var _data = this._data;
- var _box = _data.box;
- var _bounds = new this.$.oBox(_box.x0,_box.y0, _box.x1, _box.y1);
- return _bounds;
- }
-})
-
-/**
- * @private
- */
-$.oContour.prototype.toString = function(){
- return "
- * Harmony stencils can have the following types: "pencil", "penciltemplate", "brush", "texture", "bitmapbrush" and "bitmaperaser". Each type is only available to specific tools.
- * Access the main size information of the brush with the mainBrushShape property.
- * @param {string} xmlDescription the part of the penstyles.xml file between
Elements hold the drawings displayed by a "READ" Node or Drawing Node. They can be used to create new drawings, rename them, etc.
- * @constructor
- * @classdesc $.oElement Class
- * @param {int} id The element ID.
- * @param {$.oColumn} oColumnObject The column object associated to the element.
- *
- * @property {int} id The element ID.
- * @property {$.oColumn} oColumnObject The column object associated to the element.
- */
-$.oElement = function( id, oColumnObject){
- this._type = "element";
-
- this.id = id;
- this.column = oColumnObject;
-}
-
-// $.oElement Object Properties
-
-/**
- * The name of the element.
- * @name $.oElement#name
- * @type {string}
- */
-Object.defineProperty($.oElement.prototype, 'name', {
- get : function(){
- return element.getNameById(this.id)
- },
-
- set : function(newName){
- element.renameById(this.id, newName);
- }
-})
-
-
-/**
- * The folder path of the element on the filesystem.
- * @name $.oElement#path
- * @type {string}
- */
-Object.defineProperty($.oElement.prototype, 'path', {
- get : function(){
- return fileMapper.toNativePath(element.completeFolder(this.id))
- }
-})
-
-
-/**
- * The drawings available in the element.
- * @name $.oElement#drawings
- * @type {$.oDrawing[]}
- */
-Object.defineProperty($.oElement.prototype, 'drawings', {
- get : function(){
- var _drawingsNumber = Drawing.numberOf(this.id);
- var _drawings = [];
- for (var i=0; i<_drawingsNumber; i++){
- _drawings.push( new this.$.oDrawing(Drawing.name(this.id, i), this) );
- }
- return _drawings;
- }
-})
-
-
-/**
- * The file format of the element.
- * @name $.oElement#format
- * @type {string}
- */
-Object.defineProperty($.oElement.prototype, 'format', {
- get : function(){
- var _type = element.pixmapFormat(this.id);
- if (element.vectorType(this.id)) _type = "TVG";
- return _type;
- }
-})
-
-
-/**
- * The palettes linked to this element.
- * @name $.oElement#palettes
- * @type {$.oPalette[]}
- */
-Object.defineProperty($.oElement.prototype, 'palettes', {
- get: function(){
- var _paletteList = PaletteObjectManager.getPaletteListByElementId(this.id);
- var _palettes = [];
- for (var i=0; i<_paletteList.numPalettes; i++){
- _palettes.push( new this.$.oPalette( _paletteList.getPaletteByIndex(i), _paletteList ) );
- }
-
- return _palettes;
- }
-})
-
-
-// $.oElement Class methods
-
-/**
- * Adds a drawing to the element. Provide a filename to import an external file as a drawing.
- * @param {int} [atFrame=1] The frame at which to add the drawing on the $.oDrawingColumn. Values < 1 create no exposure.
- * @param {name} [name] The name of the drawing to add.
- * @param {string} [filename] Optionally, a path for a drawing file to use for this drawing. Can pass an oFile object as well.
- * @param {bool} [convertToTvg=false] If the filename isn't a tvg file, specify if you want it converted (this doesn't vectorize the drawing).
- *
- * @return {$.oDrawing} The added drawing
- */
-$.oElement.prototype.addDrawing = function( atFrame, name, filename, convertToTvg ){
- if (typeof atFrame === 'undefined') var atFrame = 1;
- if (typeof filename === 'undefined') var filename = null;
- var nameByFrame = this.$.app.preferences.XSHEET_NAME_BY_FRAME;
- if (typeof name === 'undefined') var name = nameByFrame?atFrame:1;
- var name = name +""; // convert name to string
-
- // ensure a new drawing is always created by incrementing depending on preference
- var _drawingNames = this.drawings.map(function(x){return x.name}); // index of existing names
- var _nameFormat = /(.*?)_(\d+)$/
- while (_drawingNames.indexOf(name) != -1){
- if (nameByFrame || isNaN(name)){
- var nameGroups = name.match(_nameFormat);
- if (nameGroups){
- // increment the part after the underscore
- name = nameGroups[1] + "_" + (parseInt(nameGroups[2])+1);
- }else{
- name += "_1";
- }
- }else{
- name = parseInt(name, 10);
- if (isNaN(name)) name = 0;
- name = name + 1 + ""; // increment and convert back to string
- }
- }
-
- if (!(filename instanceof this.$.oFile)) filename = new this.$.oFile(filename);
- var _fileExists = filename.exists;
- Drawing.create (this.id, name, _fileExists, true);
-
- var _drawing = new this.$.oDrawing( name, this );
-
- if (_fileExists) _drawing.importBitmap(filename, convertToTvg);
-
- // place drawing on the column at the provided frame
- if (this.column != null || this.column != undefined && atFrame >= 1){
- column.setEntry(this.column.uniqueName, 1, atFrame, name);
- }
-
- return _drawing;
-}
-
-
-/**
- * Gets a drawing object by the name.
- * @param {string} name The name of the drawing to get.
- *
- * @return {$.oDrawing} The drawing found by the search
- */
-$.oElement.prototype.getDrawingByName = function ( name ){
- var _drawings = this.drawings;
- for (var i in _drawings){
- if (_drawings[i].name == name) return _drawings[i];
- }
- return null;
-}
-
-
-/**
- * Link a provided palette to an element as an Element palette.
- * @param {$.oPalette} oPaletteObject The oPalette object to link
- * @param {int} [listIndex] The index in the element palette list at which to add the newly linked palette
- * @return {$.oPalette} The linked element palette.
- */
-$.oElement.prototype.linkPalette = function ( oPaletteObject , listIndex){
- var _paletteList = PaletteObjectManager.getPaletteListByElementId(this.id);
- if (typeof listIndex === 'undefined') var listIndex = _paletteList.numPalettes;
-
- var _palettePath = oPaletteObject.path.path.replace(".plt", "");
-
- var _palette = new this.$.oPalette(_paletteList.insertPalette (_palettePath, listIndex), _paletteList);
- return _palette;
-}
-
-
-/**
- * If the palette passed as a parameter is linked to this element, it will be unlinked, and moved to the scene palette list.
- * @param {$.oPalette} oPaletteObject
- * @return {bool} the success of the unlinking process.
- */
-$.oElement.prototype.unlinkPalette = function (oPaletteObject) {
- var _palettes = this.palettes;
- var _ids = _palettes.map(function(x){return x.id});
- var _paletteId = oPaletteObject.id;
- var _paletteIndex = _ids.indexOf(_paletteId);
-
- if (_paletteIndex == -1) return; // palette already isn't linked
-
- var _palette = _palettes[_paletteIndex];
- try{
- _palette.remove(false);
- return true;
- }catch(err){
- this.$.debug("Failed to unlink palette "+_palette.name+" from element "+this.name);
- return false;
- }
-}
-
-
-
-/**
- * Duplicate an element.
- * @param {string} [name] The new name for the duplicated element.
- * @return {$.oElement} The duplicate element
- */
-$.oElement.prototype.duplicate = function(name){
- if (typeof name === 'undefined') var name = this.name;
-
- var _fieldGuide = element.fieldChart(this.id);
- var _scanType = element.scanType(this.id);
-
- var _duplicateElement = this.$.scene.addElement(name, this.format, _fieldGuide, _scanType);
-
- var _drawings = this.drawings;
- var _elementFolder = new this.$.oFolder(_duplicateElement.path);
-
- for (var i in _drawings){
- var _drawingFile = new this.$.oFile(_drawings[i].path);
- try{
- var duplicateDrawing = _duplicateElement.addDrawing(0, _drawings[i].name, _drawingFile);
- _drawingFile.copy(_elementFolder, duplicateDrawing.name, true);
- }catch(err){
- this.debug("could not copy drawing file "+drawingFile.name+" into element "+_duplicateElement.name, this.DEBUG_LEVEL.ERROR);
- }
- }
- return _duplicateElement;
-}
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_file.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_file.js
deleted file mode 100644
index 50e4b0d475..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_file.js
+++ /dev/null
@@ -1,855 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oFolder class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The $.oFolder helper class -- providing utilities for folder manipulation and access.
- * @constructor
- * @classdesc $.oFolder Base Class
- * @param {string} path The path to the folder.
- *
- * @property {string} path The path to the folder.
- */
-$.oFolder = function(path){
- this._type = "folder";
- this._path = fileMapper.toNativePath(path).split("\\").join("/");
-
- // fix lowercase drive letter
- var path_components = this._path.split("/");
- if (path_components[0] && about.isWindowsArch()){
- // local path that starts with a drive letter
- path_components[0] = path_components[0].toUpperCase()
- this._path = path_components.join("/");
- }
-}
-
-
-/**
- * The path of the folder. Setting a path doesn't move the file, only changes where the file object is pointing.
- * @name $.oFolder#path
- * @type {string}
- */
-Object.defineProperty($.oFolder.prototype, 'path', {
- get: function(){
- return this._path;
- },
- set: function( newPath ){
- this._path = fileMapper.toNativePath( newPath ).split("\\").join("/");
- }
-});
-
-
-/**
- * The path of the file encoded as a toonboom relative path.
- * @name $.oFile#toonboomPath
- * @readonly
- * @type {string}
- */
-Object.defineProperty( $.oFolder.prototype, 'toonboomPath', {
- get: function(){
- var _path = this._path;
- if (!this.$.scene.online) return _path;
- if (_path.slice(0,2) != ("//")) return _path;
-
- var _pathComponents = _path.replace("//", "").split("/");
- var _drive = (_pathComponents[1]=="usadata000")?_pathComponents[1]:_pathComponents[1].toUpperCase();
- var _path = _pathComponents.slice(2);
-
- return ["",_drive].concat(_path).join("/");
- }
-});
-
-
-/**
- * The name of the folder.
- * @name $.oFolder#name
- * @type {string}
- */
-Object.defineProperty($.oFolder.prototype, 'name', {
- get: function(){
- var _name = this.path.split("/");
- _name = _name.pop();
- return _name;
- },
- set: function(newName){
- this.rename(newName)
- }
-});
-
-
-/**
- * The parent folder.
- * @name $.oFolder#folder
- * @type {$.oFolder}
- */
-Object.defineProperty($.oFolder.prototype, 'folder', {
- get: function(){
- var _folder = this.path.slice(0,this.path.lastIndexOf("/", this.path.length-2));
- return new this.$.oFolder(_folder);
- }
-});
-
-
-/**
- * The parent folder.
- * @name $.oFolder#exists
- * @type {string}
- */
-Object.defineProperty($.oFolder.prototype, 'exists', {
- get: function(){
- var dir = new QDir;
- dir.setPath(this.path)
- return dir.exists();
- }
-});
-
-
-/**
- * The files in the folder.
- * @name $.oFolder#files
- * @type {$.oFile[]}
- * @deprecated use oFolder.getFiles() instead to specify filter
- */
-Object.defineProperty($.oFolder.prototype, 'files', {
- get: function(){
- var dir = new QDir;
- dir.setPath(this.path);
- dir.setFilter( QDir.Files );
-
- if (!dir.exists) throw new Error("can't get files from folder "+this.path+" because it doesn't exist");
-
- return dir.entryList().map(function(x){return new this.$.oFile(dir.path()+"/"+x)});
- }
-});
-
-
-/**
- * The folders within this folder.
- * @name $.oFolder#folders
- * @type {$.oFile[]}
- * @deprecated oFolder.folder is the containing parent folder, it can't also mean the children folders
- */
-Object.defineProperty($.oFolder.prototype, 'folders', {
- get: function(){
- var _dir = new QDir;
- _dir.setPath(this.path);
- if (!_dir.exists) throw new Error("can't get files from folder "+this.path+" because it doesn't exist");
- _dir.setFilter(QDir.Dirs);
- var _folders = _dir.entryList();
-
- for (var i = _folders.length-1; i>=0; i--){
- if (_folders[i] == "." || _folders[i] == "..") _folders.splice(i,1);
- }
-
- return _folders.map(function(x){return new this.$.oFolder( _dir.path() + "/" + x )});
- }
-});
-
-
-/**
- * The content within the folder -- both folders and files.
- * @name $.oFolder#content
- * @type {$.oFile/$.oFolder[] }
- */
-Object.defineProperty($.oFolder.prototype, 'content', {
- get: function(){
- var content = this.files;
- content = content.concat( this.folders );
- return content;
- }
-});
-
-
-/**
- * Lists the file names contained inside the folder.
- * @param {string} [filter] Filter wildcards for the content of the folder.
- *
- * @returns {string[]} The names of the files contained in the folder that match the filter.
- */
-$.oFolder.prototype.listFiles = function(filter){
- if (typeof filter === 'undefined') var filter = "*";
-
- var _dir = new QDir;
- _dir.setPath(this.path);
- if (!_dir.exists) throw new Error("can't get files from folder "+this.path+" because it doesn't exist");
- _dir.setNameFilters([filter]);
- _dir.setFilter( QDir.Files);
- var _files = _dir.entryList();
-
- return _files;
-}
-
-
-/**
- * get the files from the folder
- * @param {string} [filter] Filter wildcards for the content of the folder.
- *
- * @returns {$.oFile[]} A list of files contained in the folder as oFile objects.
- */
-$.oFolder.prototype.getFiles = function( filter ){
- if (typeof filter === 'undefined') var filter = "*";
- // returns the list of $.oFile in a directory that match a filter
-
- var _path = this.path;
-
- var _files = [];
- var _file_list = this.listFiles(filter);
- for( var i in _file_list){
- _files.push( new this.$.oFile( _path+'/'+_file_list[i] ) );
- }
-
- return _files;
-}
-
-
-/**
- * lists the folder names contained inside the folder.
- * @param {string} [filter="*.*"] Filter wildcards for the content of the folder.
- *
- * @returns {string[]} The names of the files contained in the folder that match the filter.
- */
-$.oFolder.prototype.listFolders = function(filter){
-
- if (typeof filter === 'undefined') var filter = "*";
-
- var _dir = new QDir;
- _dir.setPath(this.path);
-
- if (!_dir.exists){
- this.$.debug("can't get files from folder "+this.path+" because it doesn't exist", this.$.DEBUG_LEVEL.ERROR);
- return [];
- }
-
- _dir.setNameFilters([filter]);
- _dir.setFilter(QDir.Dirs); //QDir.NoDotAndDotDot not supported?
- var _folders = _dir.entryList();
-
- _folders = _folders.filter(function(x){return x!= "." && x!= ".."})
-
- return _folders;
-}
-
-
-/**
- * gets the folders inside the oFolder
- * @param {string} [filter] Filter wildcards for the content of the folder.
- *
- * @returns {$.oFolder[]} A list of folders contained in the folder, as oFolder objects.
- */
-$.oFolder.prototype.getFolders = function( filter ){
- if (typeof filter === 'undefined') var filter = "*";
- // returns the list of $.oFile in a directory that match a filter
-
- var _path = this.path;
-
- var _folders = [];
- var _folders_list = this.listFolders(filter);
- for( var i in _folders_list){
- _folders.push( new this.$.oFolder(_path+'/'+_folders_list[i]));
- }
-
- return _folders;
-}
-
-
- /**
- * Creates the folder, if it doesn't already exist.
- * @returns { bool } The existence of the newly created folder.
- */
-$.oFolder.prototype.create = function(){
- if( this.exists ){
- this.$.debug("folder "+this.path+" already exists and will not be created", this.$.DEBUG_LEVEL.WARNING)
- return true;
- }
-
- var dir = new QDir(this.path);
-
- dir.mkpath(this.path);
- if (!this.exists) throw new Error ("folder " + this.path + " could not be created.")
-}
-
-
-/**
- * Copy the folder and its contents to another path.
- * @param {string} folderPath The path to an existing folder in which to copy this folder. (Can provide an oFolder)
- * @param {string} [copyName] Optionally, a name for the folder copy, if different from the original
- * @param {bool} [overwrite=false] Whether to overwrite the files that are already present at the copy location.
- * @returns {$.oFolder} the oFolder describing the newly created copy.
- */
-$.oFolder.prototype.copy = function( folderPath, copyName, overwrite ){
- // TODO: it should propagate errors from the recursive copy and throw them before ending?
- if (typeof overwrite === 'undefined') var overwrite = false;
- if (typeof copyName === 'undefined' || !copyName) var copyName = this.name;
- if (!(folderPath instanceof this.$.oFolder)) folderPath = new $.oFolder(folderPath);
- if (this.name == copyName && folderPath == this.folder.path) copyName += "_copy";
-
- if (!folderPath.exists) throw new Error("Target folder " + folderPath +" doesn't exist. Can't copy folder "+this.path)
-
- var nextFolder = new $.oFolder(folderPath.path + "/" + copyName);
- nextFolder.create();
- var files = this.getFiles();
- for (var i in files){
- var _file = files[i];
- var targetFile = new $.oFile(nextFolder.path + "/" + _file.fullName);
-
- // deal with overwriting
- if (targetFile.exists && !overwrite){
- this.$.debug("File " + targetFile + " already exists, skipping copy of "+ _file, this.$.DEBUG_LEVEL.ERROR);
- continue;
- }
-
- _file.copy(nextFolder, undefined, overwrite);
- }
- var folders = this.getFolders();
- for (var i in folders){
- folders[i].copy(nextFolder, undefined, overwrite);
- }
-
- return nextFolder;
-}
-
-
-/**
- * Move this folder to the specified path.
- * @param {string} destFolderPath The new complete path of the folder after the move
- * @param {bool} [overwrite=false] Whether to overwrite the target.
- *
- * @return { bool } The result of the move.
- * @todo implement with Robocopy
- */
-$.oFolder.prototype.move = function( destFolderPath, overwrite ){
- if (typeof overwrite === 'undefined') var overwrite = false;
-
- if (destFolderPath instanceof this.$.oFolder) destFolderPath = destFolderPath.path;
-
- var dir = new Dir;
- dir.path = destFolderPath;
-
- if (dir.exists && !overwrite)
- throw new Error("destination file "+dir.path+" exists and will not be overwritten. Can't move folder.");
-
- var path = fileMapper.toNativePath(this.path);
- var destPath = fileMapper.toNativePath(dir.path+"/");
-
- var destDir = new Dir;
- try {
- destDir.rename( path, destPath );
- this._path = destPath;
-
- return true;
- }catch (err){
- throw new Error ("Couldn't move folder "+this.path+" to new address "+destPath + ": " + err);
- }
-}
-
-
-/**
- * Move this folder to a different parent folder, while retaining its content and base name.
- * @param {string} destFolderPath The path of the destination to copy the folder into.
- * @param {bool} [overwrite=false] Whether to overwrite the target. Default is false.
- *
- * @return: { bool } The result of the move.
- */
-$.oFolder.prototype.moveToFolder = function( destFolderPath, overwrite ){
- destFolderPath = (destFolderPath instanceof this.$.oFolder)?destFolderPath:new this.$.oFolder(destFolderPath)
-
- var folder = destFolderPath.path;
- var name = this.name;
-
- this.move(folder+"/"+name, overwrite);
-}
-
-
-/**
- * Renames the folder
- * @param {string} newName
- */
-$.oFolder.prototype.rename = function(newName){
- var destFolderPath = this.folder.path+"/"+newName
- if ((new this.$.oFolder(destFolderPath)).exists) throw new Error("Can't rename folder "+this.path + " to "+newName+", a folder already exists at this location")
-
- this.move(destFolderPath)
-}
-
-
-/**
- * Deletes the folder.
- * @param {bool} removeContents Whether to check if the folder contains files before deleting.
- */
-$.oFolder.prototype.remove = function (removeContents){
- if (typeof removeContents === 'undefined') var removeContents = false;
-
- if (this.listFiles.length > 0 && this.listFolders.length > 0 && !removeContents) throw new Error("Can't remove folder "+this.path+", it is not empty.")
- var _folder = new Dir(this.path);
- _folder.rmdirs();
-}
-
-
-/**
- * Get the sub folder or file by name.
- * @param {string} name The sub name of a folder or file within a directory.
- * @return: {$.oFolder/$.oFile} The resulting oFile or oFolder.
- */
-$.oFolder.prototype.get = function( destName ){
- var new_path = this.path + "/" + destName;
- var new_folder = new $.oFolder( new_path );
- if( new_folder.exists ){
- return new_folder;
- }
-
- var new_file = new $.oFile( new_path );
- if( new_file.exists ){
- return new_file;
- }
-
- return false;
-}
-
-
- /**
- * Used in converting the folder to a string value, provides the string-path.
- * @return {string} The folder path's as a string.
- */
-$.oFolder.prototype.toString = function(){
- return this.path;
-}
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oFile class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The $.oFile helper class -- providing utilities for file manipulation and access.
- * @constructor
- * @classdesc $.oFile Base Class
- * @param {string} path The path to the file.
- *
- * @property {string} path The path to the file.
- */
-$.oFile = function(path){
- this._type = "file";
- this._path = fileMapper.toNativePath(path).split('\\').join('/');
-
- // fix lowercase drive letter
- var path_components = this._path.split("/");
- if (path_components[0] && about.isWindowsArch()){
- // local path that starts with a drive letter
- path_components[0] = path_components[0].toUpperCase()
- this._path = path_components.join("/");
- }
-}
-
-
-/**
- * The name of the file with extension.
- * @name $.oFile#fullName
- * @type {string}
- */
-Object.defineProperty($.oFile.prototype, 'fullName', {
- get: function(){
- var _name = this.path.slice( this.path.lastIndexOf("/")+1 );
- return _name;
- }
-});
-
-
-/**
- * The name of the file without extension.
- * @name $.oFile#name
- * @type {string}
- */
-Object.defineProperty($.oFile.prototype, 'name', {
- get: function(){
- var _fullName = this.fullName;
- if (_fullName.indexOf(".") == -1) return _fullName;
-
- var _name = _fullName.slice(0, _fullName.lastIndexOf("."));
- return _name;
- },
- set: function(newName){
- this.rename(newName)
- }
-});
-
-
-/**
- * The extension of the file.
- * @name $.oFile#extension
- * @type {string}
- */
-Object.defineProperty($.oFile.prototype, 'extension', {
- get: function(){
- var _fullName = this.fullName;
- if (_fullName.indexOf(".") == -1) return "";
-
- var _extension = _fullName.slice(_fullName.lastIndexOf(".")+1);
- return _extension;
- }
-});
-
-
-/**
- * The folder containing the file.
- * @name $.oFile#folder
- * @type {$.oFolder}
- */
-Object.defineProperty($.oFile.prototype, 'folder', {
- get: function(){
- var _folder = this.path.slice(0,this.path.lastIndexOf("/"));
- return new this.$.oFolder(_folder);
- }
-});
-
-
-/**
- * Whether the file exists already.
- * @name $.oFile#exists
- * @type {bool}
- */
-Object.defineProperty($.oFile.prototype, 'exists', {
- get: function(){
- var _file = new File( this.path );
- return _file.exists;
- }
-})
-
-
-/**
- * The path of the file. Setting a path doesn't move the file, only changes where the file object is pointing.
- * @name $.oFile#path
- * @type {string}
- */
-Object.defineProperty( $.oFile.prototype, 'path', {
- get: function(){
- return this._path;
- },
-
- set: function( newPath ){
- this._path = fileMapper.toNativePath( newPath ).split("\\").join("/");
- }
-});
-
-
-/**
- * The path of the file encoded as a toonboom relative path.
- * @name $.oFile#toonboomPath
- * @readonly
- * @type {string}
- */
-Object.defineProperty( $.oFile.prototype, 'toonboomPath', {
- get: function(){
- var _path = this._path;
- if (!this.$.scene.online) return _path;
- if (_path.slice(0,2) != ("//")) return _path;
-
- var _pathComponents = _path.replace("//", "").split("/");
- var _drive = (_pathComponents[1]=="usadata000")?_pathComponents[1]:_pathComponents[1].toUpperCase();
- var _path = _pathComponents.slice(2);
-
- return ["",_drive].concat(_path).join("/");
- }
-});
-
-
-//Todo, Size, Date Created, Date Modified
-
-
-/**
- * Reads the content of the file.
- *
- * @return: { string } The contents of the file.
- */
-$.oFile.prototype.read = function() {
- var file = new File(this.path);
-
- try {
- if (file.exists) {
- file.open(FileAccess.ReadOnly);
- var string = file.read();
- file.close();
- return string;
- }
- } catch (err) {
- this.$.debug(err, this.DEBUG_LEVEL.ERROR)
- return null
- }
-}
-
-
-/**
- * Writes to the file.
- * @param {string} content Content to write to the file.
- * @param {bool} [append=false] Whether to append to the file.
- */
-$.oFile.prototype.write = function(content, append){
- if (typeof append === 'undefined') var append = false
-
- var file = new File(this.path);
- try {
- if (append){
- file.open(FileAccess.Append);
- }else{
- file.open(FileAccess.WriteOnly);
- }
- file.write(content);
- file.close();
- return true
- } catch (err) {return false;}
-}
-
-
-/**
- * Moves the file to the specified path.
- * @param {string} folder destination folder for the file.
- * @param {bool} [overwrite=false] Whether to overwrite the file.
- *
- * @return: { bool } The result of the move.
- */
-$.oFile.prototype.move = function( newPath, overwrite ){
- if (typeof overwrite === 'undefined') var overwrite = false;
-
- if(newPath instanceof this.$.oFile) newPath = newPath.path;
-
- var _file = new PermanentFile(this.path);
- var _dest = new PermanentFile(newPath);
- // this.$.alert("moving "+_file.path()+" to "+_dest.path()+" exists?"+_dest.exists())
-
- if (_dest.exists()){
- if (!overwrite){
- this.$.debug("destination file "+newPath+" exists and will not be overwritten. Can't move file.", this.$.DEBUG_LEVEL.ERROR);
- return false;
- }else{
- _dest.remove()
- }
- }
-
- var success = _file.move(_dest);
- // this.$.alert(success)
- if (success) {
- this.path = _dest.path()
- return this;
- }
- return false;
-}
-
-
- /**
- * Moves the file to the folder.
- * @param {string} folder destination folder for the file.
- * @param {bool} [overwrite=false] Whether to overwrite the file.
- *
- * @return: { bool } The result of the move.
- */
-$.oFile.prototype.moveToFolder = function( folder, overwrite ){
- if (folder instanceof this.$.oFolder) folder = folder.path;
- var _fileName = this.fullName;
-
- return this.move(folder+"/"+_fileName, overwrite)
-}
-
-
- /**
- * Renames the file.
- * @param {string} newName the new name for the file, without the extension.
- * @param {bool} [overwrite=false] Whether to replace a file of the same name if it exists in the folder.
- *
- * @return: { bool } The result of the renaming.
- */
-$.oFile.prototype.rename = function( newName, overwrite){
- if (newName == this.name) return true;
- if (this.extension != "") newName += "."+this.extension;
- return this.move(this.folder.path+"/"+newName, overwrite);
-}
-
-
-
-/**
- * Copies the file to the folder.
- * @param {string} [folder] Content to write to the file.
- * @param {string} [copyName] Name of the copied file without the extension. If not specified, the copy will keep its name unless another file is present in which case it will be called "_copy"
- * @param {bool} [overwrite=false] Whether to overwrite the file.
- *
- * @return: { bool } The result of the copy.
- */
-$.oFile.prototype.copy = function( destfolder, copyName, overwrite){
- if (typeof overwrite === 'undefined') var overwrite = false;
- if (typeof copyName === 'undefined') var copyName = this.name;
- if (typeof destfolder === 'undefined') var destfolder = this.folder.path;
-
- var _fileName = this.fullName;
- if(destfolder instanceof this.$.oFolder) destfolder = destfolder.path;
-
- // remove extension from name in case user added it to the param
- copyName.replace ("."+this.extension, "");
- if (this.name == copyName && destfolder == this.folder.path) copyName += "_copy";
-
- var _fileName = copyName+((this.extension.length>0)?"."+this.extension:"");
-
- var _file = new PermanentFile(this.path);
- var _dest = new PermanentFile(destfolder+"/"+_fileName);
-
- if (_dest.exists() && !overwrite){
- throw new Error("Destination file "+destfolder+"/"+_fileName+" exists and will not be overwritten. Can't copy file.", this.DEBUG_LEVEL.ERROR);
- }
-
- this.$.debug("copying "+_file.path()+" to "+_dest.path(), this.$.DEBUG_LEVEL.LOG)
-
- var success = _file.copy(_dest);
- if (!success) throw new Error ("Copy of file "+_file.path()+" to location "+_dest.path()+" has failed.", this.$.DEBUG_LEVEL.ERROR)
-
- return new this.$.oFile(_dest.path());
-}
-
-
-/**
- * Removes the file.
- * @return: { bool } The result of the removal.
- */
-$.oFile.prototype.remove = function(){
- var _file = new PermanentFile(this.path)
- if (_file.exists()) return _file.remove()
-}
-
-
-
-/**
- * Parses the file as a XML and returns an object containing the values.
- * @example
- * // parses the xml file as an object with imbricated hierarchy.
- * // each xml node is represented by a simple object with a "children" property containing the children nodes,
- * // and a objectName property representing the name of the node.
- * // If the node has attributes, those are set as properties on the object. All values are set as strings.
- *
- * // example: parsing the shortcuts file
- *
- * var shortcutsFile = (new $.oFile(specialFolders.userConfig+"/shortcuts.xml")).parseAsXml();
- *
- * // The returned object will always be a simple document object with a single "children" property containing the document nodes.
- *
- * var shortcuts = shortcuts.children[0].children // children[0] is the "shortcuts" parent node, we want the nodes contained within
- *
- * for (var i in shortcuts){
- * log (shortcuts[i].id)
- * }
- */
-$.oFile.prototype.parseAsXml = function(){
- if (this.extension.toLowerCase() != "xml") return
-
- // build an object model representation of the contents of the XML by parsing it character by character
- var xml = this.read();
- var xmlDocument = new this.$.oXml(xml);
- return xmlDocument;
-}
-
-
- /**
- * Used in converting the file to a string value, provides the string-path.
- * @return {string} The file path's as a string.
- */
-$.oFile.prototype.toString = function(){
- return this.path;
-}
-
-
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oXml class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The constructor for the $.oXml class.
- * @classdesc
- * The $.oXml class can be used to create an object from a xml string. It will contain a "children" property which is an array that holds all the children node from the main document.
- * @constructor
- * @param {string} xmlString the string to parse for xml content
- * @param {string} objectName "xmlDocument" for the top node, otherwise, the string description of the xml node (ex:
- * It can have any starting index and so can implement lists with a first index of 1 like the $.oColumn.frames returned value.
- * @param {object[]} initArray An array to initialize the list.
- * @param {int} [startIndex=0] The first index exposed in the list.
- * @param {int} [length=0] The length of the list -- the max between this value and the initial array's length is used.
- * @param {function} [getFunction=null] The function used to initialize the list when accessing an uninitiated element in the list.
In form function( listItem, index ){ return value; }
- * @param {function} [setFunction=null] The function run when setting an entry in the list.
In form function( listItem, index, value ){ return resolvedValue; } -- must return a resolved value.
- * @param {function} [sizeFunction=null] The function run when resizing the list.
In form function( listItem, length ){ }
- */
-$.oList = function( initArray, startIndex, length, getFunction, setFunction, sizeFunction ){
- if(typeof initArray == 'undefined') var initArray = [];
- if(typeof startIndex == 'undefined') var startIndex = 0;
- if(typeof getFunction == 'undefined') var getFunction = false;
- if(typeof setFunction == 'undefined') var setFunction = false;
- if(typeof sizeFunction == 'undefined') var sizeFunction = false;
- if(typeof length == 'undefined') var length = 0;
-
- //Extend the cache if the content has been provided initially.
- //Must be not enumerable. . .
- // this._initArray = initArray;
- // this._cache = [];
-
- // this._getFunction = getFunction;
- // this._setFunction = setFunction;
- // this._sizeFunction = sizeFunction;
-
- // this.startIndex = startIndex;
- // this._length = Math.max( initArray.length, startIndex+length );
- // this.currentIndex = startIndex;
-
- Object.defineProperty( this, '_initArray', {
- enumerable : false, writable : true,
- value: initArray
- });
-
- Object.defineProperty( this, '_cache', {
- enumerable : false, writable : true,
- value: []
- });
-
- Object.defineProperty( this, '_getFunction', {
- enumerable : false, writable : true, configurable: false,
- value: getFunction
- });
-
- Object.defineProperty( this, '_setFunction', {
- enumerable : false, writable : true, configurable: false,
- value: setFunction
- });
-
- Object.defineProperty( this, '_sizeFunction', {
- enumerable : false, writable : true, configurable: false,
- value: sizeFunction
- });
-
- Object.defineProperty( this, 'currentIndex', {
- enumerable : false, writable : true, configurable: false,
- value: startIndex
- });
-
- Object.defineProperty( this, '_startIndex', {
- enumerable : false, writable : true, configurable: false,
- value: startIndex
- });
-
- Object.defineProperty( this, '_length', {
- enumerable : false, writable : true, configurable: false,
- value: Math.max( initArray.length, startIndex+length )
- });
-
- this.createGettersSetters();
-}
-
-
-Object.defineProperty( $.oList.prototype, '_type', {
- enumerable : false, writable : false, configurable: false,
- value: 'dynList'
-});
-
-
-/**
- * The next item in the list, undefined if reaching the end of the list.
- * @name $.oList#createGettersSetters
- * @private
- */
-Object.defineProperty($.oList.prototype, 'createGettersSetters', {
- enumerable : false,
- value: function(){
- {
- //Dynamic getter/setters.
- var func_get = function( listItem, index ){
- if( index >= listItem._cache._length ) return null;
- if( listItem._cache[index].cacheAvailable ){
- return listItem._cache[index].value;
- }
- if( listItem._getFunction ){
- listItem._cache[index].cacheAvailable = true;
- listItem._cache[index].value = listItem._getFunction( listItem, index );
- return listItem._cache[ index ].value;
- }
- return null;
- };
-
- //Either set the cache function directly, or run the setFunction to get a value and set it.
- var func_set = function( listItem, index, value ){
- if( index >= listItem._cache._length ){
- if( listItem._sizeFunction ){
- listItem.length = index+1;
- }else{
- throw new ReferenceError( 'Index of out of range: '+index+ ' out of ' + listItem._cache._length )
- }
- }
-
- if( listItem._setFunction ){
- listItem._cache[index].cacheAvailable = true;
- try{
- listItem._cache[index].value = listItem._setFunction( listItem, index, value );
- }catch(err){}
- }else{
- listItem._cache[index].cacheAvailable = true;
- listItem._cache[index].value = value;
- }
- };
-
- var setup_length = Math.max( this._length, this._cache.length );
- if( this._cache.length < setup_length ){
- this._cache = this._cache.concat( new Array( setup_length-this._cache.length ) );
- }
-
- for( var n=0;n
Given a node as a source, will use provide the metadata associated to that node,
- * otherwise provides metadata for the scene.
- * @param {$.oNode} source A node as the source of the metadata-- otherwise provides the scene metadata.
- * @todo Need to extend this to allow node metadata.
- * @constructor
- * @example
- * var metadata = $.scene.getMetadata();
- * metadata.create( "mySceneMetadataName", {"ref":"thisReferenceValue"} );
- * metadata["mySceneMetadataName"]; //Provides: {"ref":"thisReferenceValue"}
- */
-$.oMetadata = function( source ){
- this._type = "metadata";
- if( !source ){ source = 'scene'; }
- this.source = source;
-
- this._metadatas = {};
-
- this.refresh();
-}
-
-
-/**
- * Refreshes the preferences by re-reading the preference file and ingesting their values appropriately. They are then available as properties of this class.
- * Note, any new preferences will not be available as properties until Harmony saves the preference file at exit. In order to reference new preferences, use the get function.
- * @name $.oMetadata#refresh
- * @function
- */
-$.oMetadata.prototype.refresh = function(){
-
- //----------------------------
- //GETTER/SETTERS
- var set_val = function( meta, name, val ){
- var metadata = meta._metadatas[ name ];
-
- var valtype = false;
- var jsonify = false;
- switch( typeof val ){
- case 'string':
- valtype = 'string';
- break;
- case 'number':
- if( val%1.0==0.0 ){
- valtype = 'int';
- }else{
- valtype = 'double';
- }
- break
- case 'boolean':
- case 'undefined':
- case 'null':
- valtype = 'bool';
- break
- case 'object':
- default:
- valtype = 'string';
- jsonify = true;
- break
- }
-
- if(jsonify){
- val = 'json('+JSON.stringify( val )+')';
- }
-
- if( meta.source == "scene" ){
- var type = false;
- scene.setMetadata( {
- "name" : name,
- "type" : valtype,
- "creator" : "OpenHarmony",
- "version" : "1.0",
- "value" : val
- }
- );
- }else{
- var metaAttr = this.source.attributes["meta"];
- if( metaAttr ){
- metaAttr[ name ] = val;
- }
- }
-
- meta.refresh();
- }
-
- var get_val = function( meta, name ){
- return meta._metadatas[name].value;
- }
-
- //Definition of properties.
- var getterSetter_create = function( targ, id, type, value ){
-
- if( type == "string" ){
- if( value.slice( 0, 5 ) == "json(" ){
- var obj = value.slice( 5, value.length-1 );
- value = JSON.parse( obj );
- }
- }
- targ._metadatas[ id ] = { "value": value, "type":type };
-
- //Create a getter/setter for it!
- Object.defineProperty( targ, id, {
- enumerable : true,
- configurable: true,
- set : eval( 'val = function(val){ set_val( targ, "'+id+'", val ); }' ),
- get : eval( 'val = function(){ return get_val( targ, "'+id+'"); }' )
- });
- }
-
-
- //Clear this objects previous getter/setters to make room for new ones.
- if( this._metadatas ){
- for( n in this._metadatas ){ //Remove them if they've disappeared.
- Object.defineProperty( this, n, {
- enumerable : false,
- configurable: true,
- set : function(){},
- get : function(){}
- });
- }
- }
- this._metadatas = {};
-
- if( this.source == "scene" ){
- var metadatas = scene.metadatas();
-
- for( var n=0;n
The metadata is removed from the source to which this metadata object references.
- * @name $.oMetadata#remove
- * @param {string} name The name of the metadata to remove.
- */
-$.oMetadata.prototype.remove = function( name ){
- var name = name.toLowerCase();
- if( !this.hasOwnProperty( name ) ){ return true; }
-
- var res = false;
- if( this.source == "scene" ){
- if( !scene.removeMetadata ){
- res = scene.removeMetadata( scene.metadata(name), this._metadatas[ name ].type );
- }else{
- throw ReferenceError( "This is supposed to exist, but doesn't seem to be available." );
- }
- }else{
- res = this.source.removeAttribute( "meta."+name );
- }
-
- this.refresh();
- return res;
-}
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_misc.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_misc.js
deleted file mode 100644
index 6ef75f5560..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_misc.js
+++ /dev/null
@@ -1,122 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library v0.01
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney...
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the MIT license.
-// https://opensource.org/licenses/mit
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-
-//TODO : view.currentToolManager integration.
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oUtils class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * The $.oUtils helper class -- providing generic utilities. Doesn't need instantiation.
- * @classdesc $.oUtils utility Class
- */
-$.oUtils = function(){
- this._type = "utils";
-}
-
-/**
- * Finds the longest common substring between two strings.
- * @param {string} str1
- * @param {string} str2
- * @returns {string} the found string
- */
-$.oUtils.longestCommonSubstring = function( str1, str2 ){
- if (!str1 || !str2)
- return {
- length: 0,
- sequence: "",
- offset: 0
- };
-
- var sequence = "",
- str1Length = str1.length,
- str2Length = str2.length,
- num = new Array(str1Length),
- maxlen = 0,
- lastSubsBegin = 0;
-
- for (var i = 0; i < str1Length; i++) {
- var subArray = new Array(str2Length);
- for (var j = 0; j < str2Length; j++)
- subArray[j] = 0;
- num[i] = subArray;
- }
- var subsBegin = null;
- for (var i = 0; i < str1Length; i++){
- for (var j = 0; j < str2Length; j++){
- if (str1[i] !== str2[j]){
- num[i][j] = 0;
- }else{
- if ((i === 0) || (j === 0)){
- num[i][j] = 1;
- }else{
- num[i][j] = 1 + num[i - 1][j - 1];
- }
- if (num[i][j] > maxlen){
- maxlen = num[i][j];
- subsBegin = i - num[i][j] + 1;
- if (lastSubsBegin === subsBegin){//if the current LCS is the same as the last time this block ran
- sequence += str1[i];
- }else{
- //this block resets the string builder if a different LCS is found
- lastSubsBegin = subsBegin;
- sequence= ""; //clear it
- sequence += str1.substr(lastSubsBegin, (i + 1) - lastSubsBegin);
- }
- }
- }
- }
- }
- return {
- length: maxlen,
- sequence: sequence,
- offset: subsBegin
- };
-}
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_network.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_network.js
deleted file mode 100644
index 2a6aa3519a..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_network.js
+++ /dev/null
@@ -1,424 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library v0.01
-//
-//
-// Developed by Mathieu Chaptel, Chris Fourney...
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is guaranteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the MIT license.
-// https://opensource.org/licenses/mit
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oNetwork methods //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * Network helper for HTTP methods.
Available under $.network
- * @constructor
- * @classdesc Network Helper Class
- * @param {dom} $ The connection back to the DOM.
- *
- */
-$.oNetwork = function( ){
- //Expect a path for CURL.
- var avail_paths = [
- "c:\\Windows\\System32\\curl.exe"
- ];
- if( !about.isWindowsArch() ){
- avail_paths = [
- "/usr/bin/curl",
- "/usr/local/bin/curl"
- ];
- }
-
- var curl_path = false;
- for( var n=0;n
Note, only implemented with useCurl=true.
- * @param {string} address The address for the file to be downloaded.
- * @param {function} path The local file path to save the download.
- * @param {bool} replace Replace the file if it exists.
- *
- * @return: {string/object} The resulting object/string from the query -- otherwise a bool as false when an error occurred..
- */
-$.oNetwork.prototype.downloadSingle = function ( address, path, replace ){
- if (typeof replace === 'undefined') var replace = false;
-
- try{
- if( this.useCurl && this.curlPath ){
- var file = new this.$.oFile( path );
- if( file.exists ){
- if( replace ){
- file.remove();
- }else{
- this.$.debug( "File already exists- unable to replace: " + path, this.$.DEBUG_LEVEL["ERROR"] );
- return false;
- }
- }
-
- var cmdline = [ "-L", "-o", path, address ];
-
- var p = new QProcess();
- p.start( this.curlPath, cmdline );
- p.waitForFinished( 10000 );
-
- var file = new this.$.oFile( path );
- return file.exists;
-
- }else{
- this.$.debug( "Downloads without curl are not implemented.", this.$.DEBUG_LEVEL["ERROR"] );
- return false;
- }
- }catch( err ){
- this.$.debug( err + " ("+err.lineNumber+")", this.$.DEBUG_LEVEL["ERROR"] );
- return false;
- }
-}
-
-
-/**
- * Threads multiple downloads at a time [10 concurrent]. Downloads a from the internet at the given addresses
Note, only implemented with useCurl=true.
- * @param {object[]} instructions The instructions for download, in format [ { "path": localPathOnDisk, "url":"DownloadPath" } ]
- * @param {bool} replace Replace the file if it exists.
- *
- * @return: {bool[]} The results of the download, for each file in the instruction bool[]
- */
-$.oNetwork.prototype.downloadMulti = function ( address_path, replace ){
- if (typeof replace === 'undefined') var replace = false;
-
- var progress = new QProgressDialog();
- progress.setLabelText( "Downloading files..." );
- progress.show();
- progress.setRange( 0, address_path.length );
-
- var complete_process = function( val ){
- }
-
- var dload_cnt = 0;
- try{
- if( this.useCurl && this.curlPath ){
- var in_proc = [];
- var skipped = [];
- for( var x=0;x
- * It uses a cache system, so a node for a given path will only be created once.
- * If the nodes change path through other means than the openHarmony functions during the execution of the script, use oNode.invalidateCache() to create new nodes again.
- * This constructor should not be invoqued by users, who should use $.scene.getNodeByPath() or $.scene.root.getNodeByName() instead.
- * @constructor
- * @param {string} path Path to the node in the network.
- * @param {$.oScene} [oSceneObject] Access to the oScene object of the DOM.
- * @see NodeType
- * @example
- * // To grab a node object from the scene, it's possible to create a new node object by calling the constructor:
- * var myNode = new $.oNode("Top/Drawing", $.scn)
- *
- * // However, most nodes will be grabbed directly from the scene object.
- * var doc = $.scn
- * var nodes = doc.nodes; // grabs the list of all the nodes in the scene
- *
- * // It's possible to grab a single node from the path in the scene
- * var myNode = doc.getNodeByPath("Top/Drawing")
- * var myNode = doc.$node("Top/Drawing") // short synthax but same function
- *
- * // depending on the type of node, oNode objects returned by these functions can actually be an instance the subclasses
- * // oDrawingNode, oGroupNode, oPegNode...
- *
- * $.log(myNode instanceof $.oNode) // true
- * $.log(myNode instanceof $.oDrawingNode) // true
- *
- * // These other subclasses of nodes have other methods that are only shared by nodes of a certain type.
- *
- * // Not documented in this class, oNode objects have attributes which correspond to the values visible in the Layer Properties window.
- * // The attributes values can be accessed and set by using the dot notation on the oNode object:
- *
- * myNode.can_animate = false;
- * myNode.position.separate = true;
- * myNode.position.x = 10;
- *
- * // To access the oAttribute objects in the node, call the oNode.attributes object that contains them
- *
- * var attributes = myNode.attributes;
- */
-$.oNode = function( path, oSceneObject ){
- var instance = this.$.getInstanceFromCache.call(this, path);
- if (instance) return instance;
-
- this._path = path;
- this.type = node.type(this.path);
- this.scene = (typeof oSceneObject === 'undefined')?this.$.scene:oSceneObject;
-
- this._type = 'node';
-
- this.refreshAttributes();
-}
-
-/**
- * Initialize the attribute cache.
- * @private
- */
-$.oNode.prototype.attributesBuildCache = function (){
- //Cache time can be used at later times, to check for auto-rebuild of caches. Not yet implemented.
- this._cacheTime = (new Date()).getTime();
-
- var _attributesList = node.getAttrList( this.path, 1 );
- var _attributes = {};
-
- for (var i in _attributesList){
-
- var _attribute = new this.$.oAttribute(this, _attributesList[i]);
- var _keyword = _attribute.keyword;
-
- _attributes[_keyword] = _attribute;
- }
-
- this._attributes_cached = _attributes;
-}
-
-
-/**
- * Private function to create attributes setters and getters as properties of the node
- * @private
- */
-$.oNode.prototype.setAttrGetterSetter = function (attr, context){
- if (typeof context === 'undefined') context = this;
- // this.$.debug("Setting getter setters for attribute: "+attr.keyword+" of node: "+this.name, this.$.DEBUG_LEVEL.DEBUG)
-
- var _keyword = attr.shortKeyword;
-
- Object.defineProperty( context, _keyword, {
- enumerable : true,
- configurable : true,
- get : function(){
- // MessageLog.trace("getting attribute "+attr.keyword+". animated: "+(attr.column != null))
- var _subAttrs = attr.subAttributes;
- if (_subAttrs.length == 0){
- // if attribute has animation, return the frames
- if (attr.column != null) return attr.frames;
- // otherwise return the value
- var _value = attr.getValue();
- }else{
- // if there are subattributes, create getter setters for each on the returned object
- // this means every result of attr.getValue must be an object.
- // For attributes that have a string return value, attr.getValue() actually returns a fake string object
- // which is an object with a value property and a toString() method returning the value.
- var _value = (attr.column != null)?new this.$.oList(attr.frames, 1):attr.getValue();
- for (var i in _subAttrs){
- this.setAttrGetterSetter( _subAttrs[i], _value );
- }
- }
- return _value;
- },
-
- set : function(newValue){
- // this.$.debug("setting attribute through getter setter "+attr.keyword+" to value: "+newValue, this.$.DEBUG_LEVEL.DEBUG)
- // if attribute has animation, passed value must be a frame object
- var _subAttrs = attr.subAttributes;
-
- // setting the attribute directly if no subattributes are present, or if value is a color (exception)
- if (_subAttrs.length == 0 || attr.type == "COLOR"){
- if (attr.column != null) {
- if (!newValue.hasOwnProperty("frameNumber")) {
- // fallback to set frame 1
- newValue = {value:newValue, frameNumber:1};
- }
- attr.setValue(newValue.value, newValue.frameNumber)
- }else{
- return attr.setValue(newValue)
- }
- }else{
- var _frame = undefined;
- var _value = newValue;
- // dealing with value being an object with frameNumber for animated values
- if (attr.column != null) {
- if (!(newValue instanceof oFrame)) {
- // fallback to set frame 1
- newValue = {value:newValue, frameNumber:1};
- }
-
- _frame = newValue.frameNumber;
- _value = newValue.value;
- }
-
- // setting non animated attribute value
- for (var i in _subAttrs){
- // set each subAttr individually based on corresponding values in the provided object
- var _keyword = _subAttrs[i].shortKeyword;
- if (_value.hasOwnProperty(_keyword)) _subAttrs[i].setValue(_value[_keyword], _frame);
- }
- }
- }
- });
-};
-
-
-/**
- * The derived path to the node.
- * @deprecated use oNode.path instead
- * @name $.oNode#fullPath
- * @readonly
- * @type {string}
- */
-Object.defineProperty($.oNode.prototype, 'fullPath', {
- get : function( ){
- return this._path;
- }
-});
-
-
-/**
- * The path of the node (includes all groups from 'Top' separated by forward slashes).
- * To change the path of a node, use oNode.moveToGroup()
- * @name $.oNode#path
- * @type {string}
- * @readonly
- */
-Object.defineProperty($.oNode.prototype, 'path', {
- get : function( ){
- return this._path;
- }
-});
-
-
-/**
- * The type of the node.
- * @name $.oNode#type
- * @readonly
- * @type {string}
- */
-Object.defineProperty( $.oNode.prototype, 'type', {
- get : function( ){
- return node.type( this.path );
- }
-});
-
-
-/**
- * Is the node a group?
- * @name $.oNode#isGroup
- * @readonly
- * @deprecated check if the node is an instance of oGroupNode instead
- * @type {bool}
- */
-Object.defineProperty($.oNode.prototype, 'isGroup', {
- get : function( ){
- if( this.root ){
- //in a sense, its a group.
- return true;
- }
-
- return node.isGroup( this.path );
- }
-});
-
-
-/**
- * The $.oNode objects contained in this group. This is deprecated and was moved to oGroupNode
- * @DEPRECATED Use oGroupNode.children instead.
- * @name $.oNode#children
- * @readonly
- * @type {$.oNode[]}
- */
-Object.defineProperty($.oNode.prototype, 'children', {
- get : function( ){
- if( !this.isGroup ){ return []; }
-
- var _children = [];
- var _subnodes = node.subNodes( this.path );
- for( var n=0; n<_subnodes.length; n++ ){
- _children.push( this.scene.getNodeByPath( _subnodes[n] ) );
- }
-
- return _children;
- },
-
- set : function( arr_children ){
- //Consider a way to have this group adopt the children, move content here?
- //this may be a bit tough to extend.
- }
-});
-
-
-/**
- * Does the node exist?
- * @name $.oNode#exists
- * @type {bool}
- * @readonly
- */
-Object.defineProperty($.oNode.prototype, 'exists', {
- get : function(){
- if( this.type ){
- return true;
- }else{
- return false;
- }
- }
-});
-
-
-/**
- * Is the node selected?
- * @name $.oNode#selected
- * @type {bool}
- */
-Object.defineProperty($.oNode.prototype, 'selected', {
- get : function(){
- for( var n=0;n
- * It represents 'read' nodes or Drawing nodes in the scene.
- * @constructor
- * @augments $.oNode
- * @param {string} path Path to the node in the network.
- * @param {$.oScene} oSceneObject Access to the oScene object of the DOM.
- * @example
- * // Drawing Nodes are more than a node, as they do not work without an associated Drawing column and element.
- * // adding a drawing node will automatically create a column and an element, unless they are provided as arguments.
- * // Creating an element makes importing a drawing file possible.
- *
- * var doc = $.scn;
- *
- * var drawingName = "myDrawing";
- * var myElement = doc.addElement(drawingName, "TVG"); // add an element that holds TVG(Toonboom Vector Drawing) files
- * var myDrawingColumn = doc.addColumn("DRAWING", drawingName, myElement); // create a column and link the element created to it
- *
- * var sceneRoot = doc.root; // grab the scene root group
- *
- * // Creating the Drawing node and linking the previously created element and column
- * var myDrawingNode = sceneRoot.addDrawingNode(drawingName, new $.oPoint(), myDrawingColumn, myElement);
- *
- * // This also works:
- *
- * var myOtherNode = sceneRoot.addDrawingNode("Drawing2");
- */
-$.oDrawingNode = function(path, oSceneObject) {
- // $.oDrawingNode can only represent a node of type 'READ'
- if (node.type(path) != 'READ') throw "'path' parameter must point to a 'READ' type node";
- var instance = this.$.oNode.call(this, path, oSceneObject);
- if (instance) return instance;
-
- this._type = 'drawingNode';
-}
-$.oDrawingNode.prototype = Object.create($.oNode.prototype);
-$.oDrawingNode.prototype.constructor = $.oDrawingNode;
-
-
-/**
- * The element that holds the drawings displayed by the node.
- * @name $.oDrawingNode#element
- * @type {$.oElement}
- */
-Object.defineProperty($.oDrawingNode.prototype, "element", {
- get : function(){
- var _column = this.attributes.drawing.element.column;
- return ( new this.$.oElement( node.getElementId(this.path), _column ) );
- },
-
- set : function( oElementObject ){
- var _column = this.attributes.drawing.element.column;
- column.setElementIdOfDrawing( _column.uniqueName, oElementObject.id );
- }
-});
-
-
-/**
- * The column that holds the drawings displayed by the node.
- * @name $.oDrawingNode.timingColumn
- * @type {$.oDrawingColumn}
- */
-Object.defineProperty($.oDrawingNode.prototype, "timingColumn", {
- get : function(){
- var _column = this.attributes.drawing.element.column;
- return _column;
- },
-
- set : function (oColumnObject){
- var _attribute = this.attributes.drawing.element;
- _attribute.column = oColumnObject;
- }
-});
-
-
-/**
- * An array of the colorIds contained within the drawings displayed by the node.
- * @name $.oDrawingNode#usedColorIds
- * @type {int[]}
- */
-Object.defineProperty($.oDrawingNode.prototype, "usedColorIds", {
- get : function(){
- // this.$.log("used colors in node : "+this.name)
- var _drawings = this.element.drawings;
- var _colors = [];
-
- for (var i in _drawings){
- var _drawingColors = _drawings[i].usedColorIds;
- for (var c in _drawingColors){
- if (_colors.indexOf(_drawingColors[c]) == -1) _colors.push(_drawingColors[c]);
- }
- }
-
- return _colors;
- }
-});
-
-
-/**
- * An array of the colors contained within the drawings displayed by the node, found in the palettes.
- * @name $.oDrawingNode#usedColors
- * @type {$.oColor[]}
- */
-Object.defineProperty($.oDrawingNode.prototype, "usedColors", {
- get : function(){
- // get unique Color Ids
- var _ids = this.usedColorIds;
-
- // look in both element and scene palettes
- var _palettes = this.palettes.concat(this.$.scn.palettes);
-
- // build a palette/id list to speedup massive palettes/palette lists
- var _colorIds = {}
- for (var i in _palettes){
- var _palette = _palettes[i];
- var _colors = _palette.colors;
- _colorIds[_palette.name] = {};
- for (var j in _colors){
- _colorIds[_palette.name][_colors[j].id] = _colors[j];
- }
- }
-
- // for each id on the drawing, identify the corresponding color
- var _usedColors = _ids.map(function(id){
- for (var paletteName in _colorIds){
- if (_colorIds[paletteName][id]) return _colorIds[paletteName][id];
- }
- throw new Error("Missing color found for id: "+id+". Color doesn't belong to any palette in the scene or element.");
- })
-
- return _usedColors;
- }
-})
-
-
-/**
- * The drawing.element keyframes.
- * @name $.oDrawingNode#timings
- * @type {$.oFrames[]}
- * @example
- * // The timings hold the keyframes that display the drawings across time.
- *
- * var timings = $.scn.$node("Top/Drawing").timings;
- * for (var i in timings){
- * $.log( timings.frameNumber+" : "+timings.value); // outputs the frame and the value of each keyframe
- * }
- *
- * // timings are keyframe objects, so they are dynamic.
- * timings[2].value = "5"; // sets the displayed image of the second key to the drawing named "5"
- *
- * // to set a new value to a frame that wasn't a keyframe before, it's possible to use the attribute keyword like so:
- *
- * var myNode = $.scn.$node("Top/Drawing");
- * myNode.drawing.element = {frameNumber: 5, value: "10"} // setting the value of the frame 5
- * myNode.drawing.element = {frameNumber: 6, value: timings[1].value} // setting the value to the same as one of the timings
- */
-Object.defineProperty($.oDrawingNode.prototype, "timings", {
- get : function(){
- return this.attributes.drawing.element.getKeyframes();
- }
-})
-
-
-/**
- * The element palettes linked to the node.
- * @name $.oDrawingNode#palettes
- * @type {$.oPalette[]}
- */
-Object.defineProperty($.oDrawingNode.prototype, "palettes", {
- get : function(){
- var _element = this.element;
- return _element.palettes;
- }
-})
-
-
-// Class Methods
-
-/**
- * Gets the drawing name at the given frame.
- * @param {int} frameNumber
- * @return {$.oDrawing}
- */
-$.oDrawingNode.prototype.getDrawingAtFrame = function(frameNumber){
- if (typeof frame === "undefined") var frame = this.$.scene.currentFrame;
-
- var _attribute = this.attributes.drawing.element
- return _attribute.getValue(frameNumber);
-}
-
-
- /**
- * Gets the list of palettes containing colors used by a drawing node. This only gets palettes with the first occurrence of the colors.
- * @return {$.oPalette[]} The palettes that contain the color IDs used by the drawings of the node.
- */
-$.oDrawingNode.prototype.getUsedPalettes = function(){
- var _palettes = {};
- var _usedPalettes = [];
-
- var _usedColors = this.usedColors;
- // build an object of palettes under ids as keys to remove duplicates
- for (var i in _usedColors){
- var _palette = _usedColors[i].palette;
- _palettes[_palette.id] = _palette;
- }
- for (var i in _palettes){
- _usedPalettes.push(_palettes[i]);
- }
-
- return _usedPalettes;
-}
-
-
-/**
- * Displays all the drawings from the node's element onto the timeline
- * @param {int} [framesPerDrawing=1] The number of frames each drawing will be shown for
- */
-$.oDrawingNode.prototype.exposeAllDrawings = function(framesPerDrawing){
- if (typeof framesPerDrawing === 'undefined') var framesPerDrawing = 1;
-
- var _drawings = this.element.drawings;
- var frameNumber = 1;
- for (var i=0; i < _drawings.length; i++){
- //log("showing drawing "+_drawings[i].name+" at frame "+i)
- this.showDrawingAtFrame(_drawings[i], frameNumber);
- frameNumber+=framesPerDrawing;
- }
-
- var _column = this.attributes.drawing.element.column;
- var _exposures = _column.getKeyframes();
- _column.extendExposures(_exposures, framesPerDrawing-1);
-}
-
-
-/**
- * Displays the given drawing at the given frame
- * @param {$.oDrawing} drawing
- * @param {int} frameNum
- */
-$.oDrawingNode.prototype.showDrawingAtFrame = function(drawing, frameNum){
- var _column = this.attributes.drawing.element.column;
- _column.setValue(drawing.name, frameNum);
-}
-
-
- /**
- * Links a palette to a drawing node as Element Palette.
- * @param {$.oPalette} oPaletteObject the palette to link to the node
- * @param {int} [index] The index of the list at which the palette should appear once linked
- *
- * @return {$.oPalette} The linked element Palette.
- */
-$.oDrawingNode.prototype.linkPalette = function(oPaletteObject, index){
- return this.element.linkPalette(oPaletteObject, index);
-}
-
-
- /**
- * Unlinks an Element Palette from a drawing node.
- * @param {$.oPalette} oPaletteObject the palette to unlink from the node
- *
- * @return {bool} The success of the unlink operation.
- */
-$.oDrawingNode.prototype.unlinkPalette = function(oPaletteObject){
- return this.element.unlinkPalette(oPaletteObject);
-}
-
-
-
-
- /**
- * Duplicates a node by creating an independent copy.
- * @param {string} [newName] The new name for the duplicated node.
- * @param {oPoint} [newPosition] The new position for the duplicated node.
- * @param {bool} [duplicateElement] Whether to also duplicate the element.
- */
-$.oDrawingNode.prototype.duplicate = function(newName, newPosition, duplicateElement){
- if (typeof newPosition === 'undefined') var newPosition = this.nodePosition;
- if (typeof newName === 'undefined') var newName = this.name+"_1";
- if (typeof duplicateElement === 'undefined') var duplicateElement = true;
-
- var _duplicateElement = duplicateElement?this.element.duplicate(this.name):this.element;
-
- var _duplicateNode = this.group.addDrawingNode(newName, newPosition, _duplicateElement);
- var _attributes = this.attributes;
-
- for (var i in _attributes){
- var _duplicateAttribute = _duplicateNode.getAttributeByName(_attributes[i].keyword);
- _duplicateAttribute.setToAttributeValue(_attributes[i], true);
- }
-
- var _duplicateAttribute = _duplicateNode.getAttributeByName(_attributes[i].keyword);
- _duplicateAttribute.setToAttributeValue(_attributes[i], true);
-
- return _duplicateNode;
-};
-
-
- /**
- * Updates the imported drawings in the node.
- * @param {$.oFile} sourcePath the oFile object pointing to the source to update from
- * @param {string} [drawingName] the drawing to import the updated bitmap into
- * @todo implement a memory of the source through metadata
- */
-$.oDrawingNode.prototype.update = function(sourcePath, drawingName){
- if (!this.element) return; // no element means nothing to update, import instead.
- if (typeof drawingName === 'undefined') var drawingName = this.element.drawings[0].name;
-
- var _drawing = this.element.getDrawingByName(drawingName);
-
- _drawing.importBitmap(sourcePath);
- _drawing.refreshPreview();
-}
-
-
- /**
- * Extracts the position information on a drawing node, and applies it to a new peg instead.
- * @return {$.oPegNode} The created peg.
- */
-$.oDrawingNode.prototype.extractPeg = function(){
- var _drawingNode = this;
- var _peg = this.group.addNode("PEG", this.name+"-P");
- var _columns = _drawingNode.linkedColumns;
-
- _peg.position.separate = _drawingNode.offset.separate;
- _peg.scale.separate = _drawingNode.scale.separate;
-
- // link each column that can be to the peg instead and reset the drawing node
- for (var i in _columns){
- var _attribute = _columns[i].attributeObject;
- var _keyword = _attribute._keyword;
-
- var _nodeAttribute = _drawingNode.getAttributeByName(_keyword);
-
- if (_keyword.indexOf("OFFSET") != -1) _keyword = _keyword.replace("OFFSET", "POSITION");
-
- var _pegAttribute = _peg.getAttributeByName(_keyword);
-
- if (_pegAttribute !== null){
- _pegAttribute.column = _columns[i];
- _nodeAttribute.column = null;
- _drawingNode[_keyword] = _attribute.defaultValue;
- }
- }
-
- _drawingNode.offset.separate = false; // doesn't work?
- _drawingNode.can_animate = false;
-
- _peg.centerAbove(_drawingNode, -1, -30)
- _drawingNode.linkInNode(_peg)
-
- return _peg;
-}
-
-
- /**
- * Gets the contour curves of the drawing, as a concave hull.
- * @param {int} [count] The number of points on the contour curve to derive.
- * @param {int} [frame] The frame to derive the contours.
- *
- * @return {oPoint[][]} The contour curves.
- */
-$.oDrawingNode.prototype.getContourCurves = function( count, frame ){
-
- if (typeof frame === 'undefined') var frame = this.scene.currentFrame;
- if (typeof count === 'undefined') var count = 3;
-
- var res = EnvelopeCreator().getDrawingBezierPath( this.path,
- frame, //FRAME
- 2.5, //DISCRETIZER
- 0, //K
- count, //DESIRED POINT COUNT
- 0, //BLUR
- 0, //EXPAND
- false, //SINGLELINE
- true, //USE MIN POINTS,
- 0, //ADDITIONAL BISSECTING
-
- false
- );
- if( res.success ){
- var _curves = res.results.map(function(x){return [
- new this.$.oPoint( x[0][0], x[0][1], 0.0 ),
- new this.$.oPoint( x[1][0], x[1][1], 0.0 ),
- new this.$.oPoint( x[2][0], x[2][1], 0.0 ),
- new this.$.oPoint( x[3][0], x[3][1], 0.0 )
- ]; } );
- return _curves;
- }
-
- return [];
-}
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oTransformSwitchNode class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-/**
- * Constructor for the $.oTransformSwitchNode class
- * @classdesc
- * $.oTransformSwitchNode is a subclass of $.oNode and implements the same methods and properties as $.oNode.
- * It represents transform switches in the scene.
- * @constructor
- * @augments $.oNode
- * @param {string} path Path to the node in the network.
- * @param {oScene} oSceneObject Access to the oScene object of the DOM.
- * @property {$.oTransformNamesObject} names An array-like object with static indices (starting at 0) for each transformation name, which can be retrieved/set directly.
- * @example
- * // Assuming the existence of a Deformation group applied to a 'Drawing' node at the root of the scene
- * var myNode = $.scn.getNodeByPath("Top/Deformation-Drawing/Transformation-Switch");
- *
- * myNode.names[0] = "B"; // setting the value for the first transform drawing name to "B"
- *
- * var drawingNames = ["A", "B", "C"] // example of iterating over the existing names to set/retrieve them
- * for (var i in myNode.names){
- * $.log(i+": "+myNode.names[i]);
- * $.log(myNode.names[i] = drawingNames[i]);
- * }
- *
- * $.log("length: " + myNode.names.length) // the number of names
- * $.log("names: " + myNode.names) // prints the list of names
- * $.log("indexOf 'B': " + myNode.names.indexOf("B")) // can use methods from Array
- */
-$.oTransformSwitchNode = function( path, oSceneObject ) {
- if (node.type(path) != 'TransformationSwitch') throw "'path' parameter ("+path+") must point to a 'TransformationSwitch' type node. Got: "+node.type(path);
- var instance = this.$.oNode.call( this, path, oSceneObject );
- if (instance) return instance;
-
- this._type = 'transformSwitchNode';
- this.names = new this.$.oTransformNamesObject(this);
-}
-$.oTransformSwitchNode.prototype = Object.create( $.oNode.prototype );
-$.oTransformSwitchNode.prototype.constructor = $.oTransformSwitchNode;
-
-
-/**
- * Constructor for the $.oTransformNamesObject class
- * @classdesc
- * $.oTransformNamesObject is an array like object with static length that exposes getter setters for
- * each transformation name used by the oTransformSwitchNode. It can use the same methods as any array.
- * @constructor
- * @param {$.oTransformSwitchNode} instance the transform Node instance using this object
- * @property {int} length the number of valid elements in the object.
- */
-$.oTransformNamesObject = function(transformSwitchNode){
- Object.defineProperty(this, "transformSwitchNode", {
- enumerable:false,
- get: function(){
- return transformSwitchNode;
- },
- })
-
- this.refresh();
-}
-$.oTransformNamesObject.prototype = Object.create(Array.prototype);
-
-
-/**
- * creates a $.oTransformSwitch.names property with an index for each name to get/set the name value
- * @private
- */
-Object.defineProperty($.oTransformNamesObject.prototype, "createGetterSetter", {
- enumerable:false,
- value: function(index){
- var attrName = "transformation_" + (index+1);
- var transformNode = this.transformSwitchNode;
-
- Object.defineProperty(this, index, {
- enumerable:true,
- configurable:true,
- get: function(){
- return transformNode.transformationnames[attrName];
- },
- set: function(newName){
- newName = newName+""; // convert to string
- this.$.debug("setting "+attrName+" to drawing "+newName+" on "+transformNode.path, this.$.DEBUG_LEVEL.DEBUG)
- if (newName instanceof this.$.oDrawing) newName = newName.name;
- transformNode.transformationnames[attrName] = newName;
- }
- })
- }
-})
-
-
-/**
- * The length of the array of names on the oTransformSwitchNode node. Corresponds to the transformationnames.size subAttribute.
- * @name $.oTransformNamesObject#length
- * @type {int}
- */
- Object.defineProperty($.oTransformNamesObject.prototype, "length", {
- enumerable:false,
- get: function(){
- return this.transformSwitchNode.transformationnames.size;
- },
-})
-
-
-/**
- * A string representation of the names list
- * @private
- */
-Object.defineProperty($.oTransformNamesObject.prototype, "toString", {
- enumerable:false,
- value: function(){
- return this.join(",");
- }
-})
-
-
-/**
- * @private
- */
-Object.defineProperty($.oTransformNamesObject.prototype, "refresh", {
- enumerable:false,
- value:function(){
- for (var i in this){
- delete this[i];
- }
- for (var i=0; i
- * It represents groups in the scene. From this class, it's possible to add nodes, and backdrops, import files and templates into the group.
- * @constructor
- * @augments $.oNode
- * @param {string} path Path to the node in the network.
- * @param {oScene} oSceneObject Access to the oScene object of the DOM.
- * @example
- * // to add a new node, grab the group it'll be created in first
- * var doc = $.scn
- * var sceneRoot = doc.root; // grab the scene root group
- *
- * var myGroup = sceneRoot.addGrop("myGroup", false, false); // create a group in the scene root, with a peg and composite but no nodes
- * var MPO = myGroup.multiportOut; // grab the multiport in of the group
- *
- * var myNode = myGroup.addDrawingNode("myDrawingNode"); // add a drawing node inside the group
- * myNode.linkOutNode(MPO); // link the newly created node to the multiport
- * myNode.centerAbove(MPO);
- *
- * var sceneComposite = doc.$node("Top/Composite"); // grab the scene composite node
- * myGroup.linkOutNode(sceneComposite); // link the group to it
- *
- * myGroup.centerAbove(sceneComposite);
- */
-$.oGroupNode = function(path, oSceneObject) {
- // $.oDrawingNode can only represent a node of type 'READ'
- if (node.type(path) != 'GROUP') throw "'path' parameter must point to a 'GROUP' type node";
- var instance = this.$.oNode.call(this, path, oSceneObject);
- if (instance) return instance;
-
- this._type = 'groupNode';
-}
-$.oGroupNode.prototype = Object.create($.oNode.prototype);
-$.oGroupNode.prototype.constructor = $.oGroupNode;
-
-/**
- * The multiport in node of the group. If one doesn't exist, it will be created.
- * @name $.oGroupNode#multiportIn
- * @readonly
- * @type {$.oNode}
- */
-Object.defineProperty($.oGroupNode.prototype, "multiportIn", {
- get : function(){
- if (this.isRoot) return null
- var _MPI = this.scene.getNodeByPath(node.getGroupInputModule(this.path, "Multiport-In", 0,-100,0),this.scene)
- return (_MPI)
- }
-})
-
-
-/**
- * The multiport out node of the group. If one doesn't exist, it will be created.
- * @name $.oGroupNode#multiportOut
- * @readonly
- * @type {$.oNode}
- */
-Object.defineProperty($.oGroupNode.prototype, "multiportOut", {
- get : function(){
- if (this.isRoot) return null
- var _MPO = this.scene.getNodeByPath(node.getGroupOutputModule(this.path, "Multiport-Out", 0, 100,0),this.scene)
- return (_MPO)
- }
-});
-
- /**
- * All the nodes contained within the group, one level deep.
- * @name $.oGroupNode#nodes
- * @readonly
- * @type {$.oNode[]}
- */
-Object.defineProperty($.oGroupNode.prototype, "nodes", {
- get : function() {
- var _path = this.path;
- var _nodes = node.subNodes(_path);
-
- var self = this;
- return _nodes.map(function(x){return self.scene.getNodeByPath(x)});
- }
-});
-
-
-
- /**
- * All the backdrops contained within the group.
- * @name $.oGroupNode#backdrops
- * @readonly
- * @type {$.oBackdrop[]}
- */
-Object.defineProperty($.oGroupNode.prototype, "backdrops", {
- get : function() {
- var _path = this.path;
- var _backdropObjects = Backdrop.backdrops(this.path);
- var _backdrops = _backdropObjects.map(function(x){return new this.$.oBackdrop(_path, x)});
-
- return _backdrops;
- }
-});
-
-
- /**
- * Returns a node from within a group based on its name.
- * @param {string} name The name of the node.
- *
- * @return {$.oNode} The node, or null if can't be found.
- */
-$.oGroupNode.prototype.getNodeByName = function(name){
- var _path = this.path+"/"+name;
-
- return this.scene.getNodeByPath(_path);
-}
-
-
- /**
- * Returns all the nodes of a certain type in the group.
- * Pass a value to recurse to look into the groups as well.
- * @param {string} typeName The type of the nodes.
- * @param {bool} recurse Whether to look inside the groups.
- *
- * @return {$.oNode[]} The nodes found.
- */
-$.oGroupNode.prototype.getNodesByType = function(typeName, recurse){
- if (typeof recurse === 'undefined') var recurse = false;
- return this.subNodes(recurse).filter(function(x){return x.type == typeName});
-}
-
-
- /**
- * Returns a child node in a group based on a search.
- * @param {string} name The name of the node.
- *
- * @return {$.oNode} The node, or null if can't be found.
- */
-$.oGroupNode.prototype.$node = function(name){
- return this.getNodeByName(name);
-}
-
-
- /**
- * Gets all the nodes contained within the group.
- * @param {bool} [recurse=false] Whether to recurse the groups within the groups.
- *
- * @return {$.oNode[]} The nodes in the group
- */
-$.oGroupNode.prototype.subNodes = function(recurse){
- if (typeof recurse === 'undefined') recurse = false;
-
- var _nodes = node.subNodes(this.path);
- var _subNodes = [];
-
- for (var i in _nodes){
- var _oNodeObject = this.scene.getNodeByPath(_nodes[i]);
- _subNodes.push(_oNodeObject);
- if (recurse && node.isGroup(_nodes[i])) _subNodes = _subNodes.concat(_oNodeObject.subNodes(recurse));
- }
-
- return _subNodes;
-}
-
-
- /**
- * Gets all children of the group.
- * @param {bool} [recurse=false] Whether to recurse the groups within the groups.
- *
- * @return {$.oNode[]} The nodes in the group
- */
-$.oGroupNode.prototype.children = function(recurse){
- return this.subNodes(recurse);
-}
-
-
-
- /**
- * Creates an in-port on top of a group
- * @param {int} portNum The port number where a port will be added
- * @type {string}
- *
- * @return {int} The number of the created port in case the port specified was not correct (for example larger than the current number of ports + 1)
- */
-$.oGroupNode.prototype.addInPort = function(portNum, type){
- var _inPorts = this.inPorts;
-
- if (typeof portNum === 'undefined') var portNum = _inPorts;
- if (portNum > _inPorts) portNum = _inPorts;
-
- var _type = (type=="transform")?"READ":"none"
- var _dummyNode = this.addNode(_type, "dummy_add_port_node");
- var _MPI = this.multiportIn;
- _dummyNode.linkInNode(_MPI, 0, portNum, true);
- _dummyNode.unlinkInNode(_MPI);
- _dummyNode.remove();
-
- return portNum;
-}
-
-
- /**
- * Creates an out-port at the bottom of a group. For some reason groups can have many unconnected in-ports but only one unconnected out-port.
- * @param {int} [portNum] The port number where a port will be added
- * @type {string}
- *
- * @return {int} The number of the created port in case the port specified was not correct (for example larger than the current number of ports + 1)
- */
-$.oGroupNode.prototype.addOutPort = function(portNum, type){
- var _outPorts = this.outPorts;
-
- if (typeof portNum === 'undefined') var portNum = _outPorts;
- if (portNum > _outPorts) portNum = _outPorts;
-
- var _type = (type=="transform")?"PEG":"none"
- var _dummyNode = this.addNode(_type, "dummy_add_port_node");
- var _MPO = this.multiportOut;
-
- _dummyNode.linkOutNode(_MPO, 0, portNum, true);
- _dummyNode.unlinkOutNode(_MPO);
- _dummyNode.remove();
-
- return portNum;
-}
-
- /**
- * Gets all children of the group.
- * @param {bool} [recurse=false] Whether to recurse the groups within the groups.
- *
- * @return {$.oNode[]} The nodes in the group
- */
-$.oGroupNode.prototype.children = function(recurse){
- return this.subNodes(recurse);
-}
-
-
-
- /**
- * Sorts out the node view inside the group
- * @param {bool} [recurse=false] Whether to recurse the groups within the groups.
- */
-$.oGroupNode.prototype.orderNodeView = function(recurse){
- if (typeof recurse === 'undefined') var recurse = false;
-
- TB_orderNetworkUpBatchFromList( node.subNodes(this.path) );
-
- if (!this.isRoot){
- var _MPO = this.multiportOut;
- var _MPI = this.multiportIn;
-
- _MPI.x = _MPO.x
- }
-
- if (recurse){
- var _subNodes = this.subNodes().filter(function(x){return x.type == "GROUP"});
- for (var i in _subNodes){
- _subNodes[i].orderNodeView(recurse);
- }
- }
-}
-
-
-/**
- * Adds a node to the group.
- * @param {string} type The type-name of the node to add.
- * @param {string} [name=type] The name of the newly created node.
- * @param {$.oPoint} [nodePosition={0,0,0}] The position for the node to be placed in the network.
- *
- * @return {$.oNode} The created node, or bool as false.
- * @example
- * // to add a node, simply call addNode on the group you want the node to be added to.
- * var sceneRoot = $.scn.root; // grab the scene root group ("Top")
- *
- * var peg = sceneRoot.addNode("PEG", "MyNewlyCreatedPeg"); // adding a peg
- *
- * // Now we'll also create a drawing node to connect under the peg
- * var sceneComposite = $.scn.getNodeByPath("Top/Composite"); // can also use $.scn.$node("Top/Composite") for shorter synthax
- *
- * var drawingNode = sceneRoot.addDrawingNode("myNewDrawingNode");
- * drawingNode.linkOutNode(sceneComposite);
- * drawingNode.can_animate = false // setting some attributes on the newly created Node
- *
- * peg.linkOutNode(drawingNode);
- *
- * //through all this we didn't specify nodePosition parameters so we'll sort everything at once
- *
- * sceneRoot.orderNodeView();
- *
- * // we can also do:
- *
- * peg.centerAbove(drawingNode);
- *
- */
-$.oGroupNode.prototype.addNode = function( type, name, nodePosition ){
- // Defaults for optional parameters
- if (typeof nodePosition === 'undefined') var nodePosition = new this.$.oPoint(0,0,0);
- if (typeof name === 'undefined') var name = type[0]+type.slice(1).toLowerCase();
- if (typeof name !== 'string') name = name+"";
-
- var _group = this.path;
-
- // create node and return result (this sanitizes/increments the name, so we only create the oNode with the returned value)
- var _path = node.add(_group, name, type, nodePosition.x, nodePosition.y, nodePosition.z);
- _node = this.scene.getNodeByPath(_path);
-
- return _node;
-}
-
-
-/**
- * Adds a drawing layer to the group, with a drawing column and element linked. Possible to specify the column and element to use.
- * @param {string} name The name of the newly created node.
- * @param {$.oPoint} [nodePosition={0,0,0}] The position for the node to be placed in the network.
- * @param {$.object} [element] The element to attach to the column.
- * @param {object} [drawingColumn] The column to attach to the drawing module.
-
- * @return {$.oNode} The created node, or bool as false.
- */
-
-$.oGroupNode.prototype.addDrawingNode = function( name, nodePosition, oElementObject, drawingColumn){
- // add drawing column and element if not passed as parameters
- this.$.beginUndo("oH_addDrawingNode_"+name);
-
- // Defaults for optional parameters
- if (typeof nodePosition === 'undefined') var nodePosition = new this.$.oPoint(0,0,0);
- if (typeof name === 'undefined') var name = type[0]+type.slice(1).toLowerCase();
-
- // creating the node first to get the "safe name" returned by harmony
- var _node = this.addNode("READ", name, nodePosition);
-
- if (typeof oElementObject === 'undefined') var oElementObject = this.scene.addElement(_node.name);
- if (typeof drawingColumn === 'undefined'){
- // first look for a column in the element
- if (!oElementObject.column) {
- var drawingColumn = this.scene.addColumn("DRAWING", _node.name, oElementObject);
- }else{
- var drawingColumn = oElementObject.column;
- }
- }
-
- // setup the node
- // setup animate mode/separate based on preferences?
- _node.attributes.drawing.element.column = drawingColumn;
-
- this.$.endUndo();
-
- return _node;
-}
-
-
-/**
- * Adds a new group to the group, and optionally move the specified nodes into it.
- * @param {string} name The name of the newly created group.
- * @param {$.oPoint} [addComposite=false] Whether to add a composite.
- * @param {bool} [addPeg=false] Whether to add a peg.
- * @param {$.oNode[]} [includeNodes] The nodes to add to the group.
- * @param {$.oPoint} [nodePosition={0,0,0}] The position for the node to be placed in the network.
-
- * @return {$.oGroupNode} The created node, or bool as false.
- */
-$.oGroupNode.prototype.addGroup = function( name, addComposite, addPeg, includeNodes, nodePosition ){
- // Defaults for optional parameters
- if (typeof addPeg === 'undefined') var addPeg = false;
- if (typeof addComposite === 'undefined') var addComposite = false;
- if (typeof includeNodes === 'undefined') var includeNodes = [];
-
- this.$.beginUndo("oH_addGroup_"+name);
-
- var nodeBox = new this.$.oBox();
- includeNodes = includeNodes.filter(function(x){return !!x}) // filter out all invalid types
- if (includeNodes.length > 0) nodeBox.includeNodes(includeNodes);
-
- if (typeof nodePosition === 'undefined') var nodePosition = includeNodes.length?nodeBox.center:new this.$.oPoint(0,0,0);
-
- var _group = this.addNode( "GROUP", name, nodePosition );
-
- var _MPI = _group.multiportIn;
- var _MPO = _group.multiportOut;
-
- if (addComposite){
- var _composite = _group.addNode("COMPOSITE", name+"_Composite");
- _composite.composite_mode = "Pass Through"; // get preference?
- _composite.linkOutNode(_MPO);
- _composite.centerAbove(_MPO);
- }
-
- if (addPeg){
- var _peg = _group.addNode("PEG", name+"-P");
- _peg.linkInNode(_MPI);
- _peg.centerBelow(_MPI);
- }
-
- // moves nodes into the created group and recreates their hierarchy and links
- if (includeNodes.length > 0){
- includeNodes = includeNodes.sort(function(a, b){return a.timelineIndex()>=b.timelineIndex()?1:-1})
-
- var _links = this.scene.getNodesLinks(includeNodes);
-
- for (var i in includeNodes){
- includeNodes[i].moveToGroup(_group);
- }
-
- for (var i in _links){
- _links[i].connect();
- }
-
- // link all unconnected nodes to the peg/MPI and comp/MPO
- var _topNode = _peg?_peg:_MPI;
- var _bottomNode = _composite?_composite:_MPO;
-
- for (var i in includeNodes){
- for (var j=0; j < includeNodes[i].inPorts; j++){
- if (includeNodes[i].getInLinksNumber(j) == 0) includeNodes[i].linkInNode(_topNode);
- }
-
- for (var j=0; j < includeNodes[i].outPorts; j++){
- if (includeNodes[i].getOutLinksNumber(j) == 0) includeNodes[i].linkOutNode(_bottomNode,0,0);
- }
- }
-
- //shifting MPI/MPO/peg/comp out of the way of included nodes
- if (_peg){
- _peg.centerAbove(includeNodes);
- includeNodes.push(_peg);
- }
-
- if (_composite){
- _composite.centerBelow(includeNodes);
- includeNodes.push(_composite);
- }
-
- _MPI.centerAbove(includeNodes);
- _MPO.centerBelow(includeNodes);
- }
-
- this.$.endUndo();
- return _group;
-}
-
-
-/**
- * Imports the specified template into the scene.
- * @param {string} tplPath The path of the TPL file to import.
- * @param {$.oNode[]} [destinationNodes=false] The nodes affected by the template.
- * @param {bool} [extendScene=true] Whether to extend the exposures of the content imported.
- * @param {$.oPoint} [nodePosition={0,0,0}] The position to offset imported new nodes.
- * @param {object} [pasteOptions] An object containing paste options as per Harmony's standard paste options.
- *
- * @return {$.oNode[]} The resulting pasted nodes.
- */
-$.oGroupNode.prototype.importTemplate = function( tplPath, destinationNodes, extendScene, nodePosition, pasteOptions ){
- if (typeof nodePosition === 'undefined') var nodePosition = new oPoint(0,0,0);
- if (typeof destinationNodes === 'undefined' || destinationNodes.length == 0) var destinationNodes = false;
- if (typeof extendScene === 'undefined') var extendScene = true;
-
- if (typeof pasteOptions === 'undefined') var pasteOptions = copyPaste.getCurrentPasteOptions();
- pasteOptions.extendScene = extendScene;
-
- this.$.beginUndo("oH_importTemplate");
-
- var _group = this.path;
-
- if(tplPath instanceof this.$.oFolder) tplPath = tplPath.path;
-
- this.$.debug("importing template : "+tplPath, this.$.DEBUG_LEVEL.LOG);
-
- var _copyOptions = copyPaste.getCurrentCreateOptions();
- var _tpl = copyPaste.copyFromTemplate(tplPath, 0, 999, _copyOptions); // any way to get the length of a template before importing it?
-
- if (destinationNodes){
- // TODO: deal with import options to specify frames
- copyPaste.paste(_tpl, destinationNodes.map(function(x){return x.path}), 0, 999, pasteOptions);
- var _nodes = destinationNodes;
- }else{
- var oldBackdrops = this.backdrops;
- copyPaste.pasteNewNodes(_tpl, _group, pasteOptions);
- var _scene = this.scene;
- var _nodes = selection.selectedNodes().map(function(x){return _scene.$node(x)});
- for (var i in _nodes){
- // only move the root nodes
- if (_nodes[i].parent.path != this.path) continue
-
- _nodes[i].x += nodePosition.x;
- _nodes[i].y += nodePosition.y;
- }
-
- // move backdrops present in the template
- var backdrops = this.backdrops.slice(oldBackdrops.length);
- for (var i in backdrops){
- backdrops[i].x += nodePosition.x;
- backdrops[i].y += nodePosition.y;
- }
-
- // move waypoints in the top level of the template
- for (var i in _nodes) {
- var nodePorts = _nodes[i].outPorts;
- for (var p = 0; p < nodePorts; p++) {
- var theseWP = waypoint.childWaypoints(_nodes[i], p);
- if (theseWP.length > 0) {
- for (var w in theseWP) {
- var x = waypoint.coordX(theseWP[w]);
- var y = waypoint.coordY(theseWP[w]);
- x += nodePosition.x;
- y += nodePosition.y;
- waypoint.setCoord(theseWP[w],x,y);
- }
- }
- }
- }
-
- }
-
- this.$.endUndo();
- return _nodes;
-}
-
-
-/**
- * Adds a backdrop to a group in a specific position.
- * @param {string} [title="Backdrop"] The title of the backdrop.
- * @param {string} [body=""] The body text of the backdrop.
- * @param {$.oColorValue} [color="#323232ff"] The oColorValue of the node.
- * @param {float} [x=0] The X position of the backdrop, an offset value if nodes are specified.
- * @param {float} [y=0] The Y position of the backdrop, an offset value if nodes are specified.
- * @param {float} [width=30] The Width of the backdrop, a padding value if nodes are specified.
- * @param {float} [height=30] The Height of the backdrop, a padding value if nodes are specified.
- *
- * @return {$.oBackdrop} The created backdrop.
- */
-$.oGroupNode.prototype.addBackdrop = function(title, body, color, x, y, width, height ){
- if (typeof color === 'undefined') var color = new this.$.oColorValue("#323232ff");
- if (typeof body === 'undefined') var body = "";
-
- if (typeof x === 'undefined') var x = 0;
- if (typeof y === 'undefined') var y = 0;
- if (typeof width === 'undefined') var width = 30;
- if (typeof height === 'undefined') var height = 30;
-
- var position = {"x":x, "y":y, "w":width, "h":height};
-
- var groupPath = this.path;
-
- if(!(color instanceof this.$.oColorValue)) color = new this.$.oColorValue(color);
-
-
- // incrementing title so that two backdrops can't have the same title
- if (typeof title === 'undefined') var title = "Backdrop";
-
- var _groupBackdrops = Backdrop.backdrops(groupPath);
- var names = _groupBackdrops.map(function(x){return x.title.text})
- var count = 0;
- var newTitle = title;
-
- while (names.indexOf(newTitle) != -1){
- count++;
- newTitle = title+"_"+count;
- }
- title = newTitle;
-
-
- var _backdrop = {
- "position" : position,
- "title" : {"text":title, "color":4278190080, "size":12, "font":"Arial"},
- "description" : {"text":body, "color":4278190080, "size":12, "font":"Arial"},
- "color" : color.toInt()
- }
-
- Backdrop.addBackdrop(groupPath, _backdrop)
- return new this.$.oBackdrop(groupPath, _backdrop)
-};
-
-
-/**
- * Adds a backdrop to a group around specified nodes
- * @param {$.oNode[]} nodes The nodes that the backdrop encompasses.
- * @param {string} [title="Backdrop"] The title of the backdrop.
- * @param {string} [body=""] The body text of the backdrop.
- * @param {$.oColorValue} [color=#323232ff] The oColorValue of the node.
- * @param {float} [x=0] The X position of the backdrop, an offset value if nodes are specified.
- * @param {float} [y=0] The Y position of the backdrop, an offset value if nodes are specified.
- * @param {float} [width=20] The Width of the backdrop, a padding value if nodes are specified.
- * @param {float} [height=20] The Height of the backdrop, a padding value if nodes are specified.
- *
- * @return {$.oBackdrop} The created backdrop.
- * @example
- * function createColoredBackdrop(){
- * // This script will prompt for a color and create a backdrop around the selection
- * $.beginUndo()
- *
- * var doc = $.scn; // grab the scene
- * var nodes = doc.getSelectedNodes(); // grab the selection
- *
- * if(!nodes) return // exit the function if no nodes are selected
- *
- * var color = pickColor(); // prompt for color
- *
- * var group = nodes[0].group // get the group to add the backdrop to
- * var backdrop = group.addBackdropToNodes(nodes, "BackDrop", "", color)
- *
- * $.endUndo();
- *
- * // function to get the color chosen by the user
- * function pickColor(){
- * var d = new QColorDialog;
- * d.exec();
- * var color = d.selectedColor();
- * return new $.oColorValue({r:color.red(), g:color.green(), b:color.blue(), a:color.alpha()})
- * }
- * }
- */
-$.oGroupNode.prototype.addBackdropToNodes = function( nodes, title, body, color, x, y, width, height ){
- if (typeof color === 'undefined') var color = new this.$.oColorValue("#323232ff");
- if (typeof body === 'undefined') var body = "";
- if (typeof x === 'undefined') var x = 0;
- if (typeof y === 'undefined') var y = 0;
- if (typeof width === 'undefined') var width = 20;
- if (typeof height === 'undefined') var height = 20;
-
-
- // get default size from node bounds
- if (typeof nodes === 'undefined') var nodes = [];
-
- if (nodes.length > 0) {
- var _nodeBox = new this.$.oBox();
- _nodeBox.includeNodes(nodes);
-
- x = _nodeBox.left - x - width;
- y = _nodeBox.top - y - height;
- width = _nodeBox.width + width*2;
- height = _nodeBox.height + height*2;
- }
-
- var _backdrop = this.addBackdrop(title, body, color, x, y, width, height)
-
- return _backdrop;
-};
-
-
-/**
- * Imports a PSD into the group.
- * This function is not available when running as harmony in batch mode.
- * @param {string} path The PSD file to import.
- * @param {bool} [separateLayers=true] Separate the layers of the PSD.
- * @param {bool} [addPeg=true] Whether to add a peg.
- * @param {bool} [addComposite=true] Whether to add a composite.
- * @param {string} [alignment="ASIS"] Alignment type.
- * @param {$.oPoint} [nodePosition={0,0,0}] The position for the node to be placed in the node view.
- *
- * @return {$.oNode[]} The nodes being created as part of the PSD import.
- * @example
- * // This example browses for a PSD file then import it in the root of the scene, then connects it to the main composite.
- *
- * function importCustomPSD(){
- * $.beginUndo("importCustomPSD");
- * var psd = $.dialog.browseForFile("get PSD", "*.psd"); // prompt for a PSD file
- *
- * if (!psd) return; // dialog was cancelled, exit the function
- *
- * var doc = $.scn; // get the scene object
- * var sceneRoot = doc.root // grab the scene root group
- * var psdNodes = sceneRoot.importPSD(psd); // import the psd with default settings
- * var psdComp = psdNodes.pop() // get the composite node at the end of the psdNodes array
- * var sceneComp = doc.$node("Top/Composite") // get the scene main composite
- * psdComp.linkOutNode(sceneComp); // ... and link the two.
- * sceneRoot.orderNodeView(); // orders the node view inside the group
- * $.endUndo();
- * }
- */
-$.oGroupNode.prototype.importPSD = function( path, separateLayers, addPeg, addComposite, alignment, nodePosition){
- if (typeof alignment === 'undefined') var alignment = "ASIS" // create an enum for alignments?
- if (typeof addComposite === 'undefined') var addComposite = true;
- if (typeof addPeg === 'undefined') var addPeg = true;
- if (typeof separateLayers === 'undefined') var separateLayers = true;
- if (typeof nodePosition === 'undefined') var nodePosition = new this.$.oPoint(0,0,0);
-
- if (this.$.batchMode){
- this.$.debug("Error: can't import PSD file "+_psdFile.path+" in batch mode.", this.$.DEBUG_LEVEL.ERROR);
- return null
- }
-
- var _psdFile = (path instanceof this.$.oFile)?path:new this.$.oFile( path );
- if (!_psdFile.exists){
- this.$.debug("Error: can't import PSD file "+_psdFile.path+" because it doesn't exist", this.$.DEBUG_LEVEL.ERROR);
- return null;
- }
-
- this.$.beginUndo("oH_importPSD_"+_psdFile.name);
-
- var _elementName = _psdFile.name;
-
- var _xSpacing = 45;
- var _ySpacing = 30;
-
- var _element = this.scene.addElement(_elementName, "PSD");
-
- // save scene otherwise PSD is copied correctly into the element
- // but the TGA for each layer are not generated
- // TODO: how to go around this to avoid saving?
- scene.saveAll();
- var _drawing = _element.addDrawing(1);
-
- if (addPeg) var _peg = this.addNode("PEG", _elementName+"-P", nodePosition);
- if (addComposite) var _comp = this.addNode("COMPOSITE", _elementName+"-Composite", nodePosition);
-
- // Import the PSD in the element
- CELIO.pasteImageFile({ src : _psdFile.path, dst : { elementId : _element.id, exposure : _drawing.name}});
- var _layers = CELIO.getLayerInformation(_psdFile.path);
- var _info = CELIO.getInformation(_psdFile.path);
-
- // create the nodes for each layer
- var _nodes = [];
- if (separateLayers){
-
- var _scale = _info.height/scene.defaultResolutionY();
- var _x = nodePosition.x - _layers.length/2*_xSpacing;
- var _y = nodePosition.y - _layers.length/2*_ySpacing;
-
- for (var i in _layers){
- // generate nodes and set them to show the element for each layer
- var _layer = _layers[i];
- var _layerName = _layer.layerName.split(" ").join("_");
- var _nodePosition = new this.$.oPoint(_x+=_xSpacing, _y +=_ySpacing, 0);
-
- // get/build the group
- var _group = this;
- var _groupPathComponents = _layer.layerPathComponents;
- var _destinationPath = this.path;
- var _groupPeg = _peg;
- var _groupComp = _comp;
-
- // recursively creating groups if they are missing
- for (var i in _groupPathComponents){
- var _destinationPath = _destinationPath + "/" + _groupPathComponents[i];
- var _nextGroup = this.$.scene.getNodeByPath(_destinationPath);
-
- if (!_nextGroup){
- _nextGroup = _group.addGroup(_groupPathComponents[i], true, true, [], _nodePosition);
- if (_groupPeg) _nextGroup.linkInNode(_groupPeg);
- if (_groupComp) _nextGroup.linkOutNode(_groupComp, 0, 0);
- }
- // store the peg/comp for next iteration or layer node
- _group = _nextGroup;
- _groupPeg = _group.multiportIn.linkedOutNodes[0];
- _groupComp = _group.multiportOut.linkedInNodes[0];
- }
-
- var _column = this.scene.addColumn("DRAWING", _layerName, _element);
- var _node = _group.addDrawingNode(_layerName, _nodePosition, _element, _column);
-
- _node.enabled = _layers[i].visible;
- _node.can_animate = false; // use general pref?
- _node.apply_matte_to_color = "Straight";
- _node.alignment_rule = alignment;
- _node.scale.x = _scale;
- _node.scale.y = _scale;
-
- _column.setValue(_layer.layer != ""?"1:"+_layer.layer:1, 1);
- _column.extendExposures();
-
- if (_groupPeg) _node.linkInNode(_groupPeg);
- if (_groupComp) _node.linkOutNode(_groupComp, 0, 0);
-
- _nodes.push(_node);
- }
- }else{
- this.$.endUndo();
- throw new Error("importing PSD as a flattened layer not yet implemented");
- }
-
- if (addPeg){
- _peg.centerAbove(_nodes, 0, -_ySpacing )
- _nodes.unshift(_peg)
- }
-
- if (addComposite){
- _comp.centerBelow(_nodes, 0, _ySpacing )
- _nodes.push(_comp)
- }
- // TODO how to display only one node with the whole file
- this.$.endUndo()
-
- return _nodes
-}
-
-
-/**
- * Updates a PSD previously imported into the group
- * @param {string} path The updated psd file to import.
- * @param {bool} [separateLayers=true] Separate the layers of the PSD.
- *
- * @return {$.oNode[]} The nodes that have been updated/created
- */
-$.oGroupNode.prototype.updatePSD = function( path, separateLayers ){
- if (typeof separateLayers === 'undefined') var separateLayers = true;
-
- var _psdFile = (path instanceof this.$.oFile)?path:new this.$.oFile(path);
- if (!_psdFile.exists){
- this.$.debug("Error: can't import PSD file "+_psdFile.path+" for update because it doesn't exist", this.$.DEBUG_LEVEL.ERROR);
- return null;
- }
-
- this.$.beginUndo("oH_updatePSD_"+_psdFile.name)
-
- // get info from the PSD
- var _info = CELIO.getInformation(_psdFile.path);
- var _layers = CELIO.getLayerInformation(_psdFile.path);
- var _scale = _info.height/scene.defaultResolutionY();
-
- // use layer information to find nodes from precedent export
- if (separateLayers){
- var _nodes = this.subNodes(true).filter(function(x){return x.type == "READ"});
- var _nodeNames = _nodes.map(function(x){return x.name});
-
- var _psdNodes = [];
- var _missingLayers = [];
- var _PSDelement = "";
- var _positions = new Array(_layers.length);
- var _scale = _info.height/scene.defaultResolutionY();
-
- // for each layer find the node by looking at the column name
- for (var i in _layers){
- var _layer = _layers[i];
- var _layerName = _layers[i].layerName.split(" ").join("_");
- var _found = false;
-
- // find the node
- for (var j in _nodes){
- if (_nodes[j].element.format != "PSD") continue;
-
- var _drawingColumn = _nodes[j].attributes.drawing.element.column;
-
- // update the node if found
- if (_drawingColumn.name == _layer.layerName){
- _psdNodes.push(_nodes[j]);
- _found = true;
-
- // update scale in case PSDfile size changed
- _nodes[j].scale.x = _scale;
- _nodes[j].scale.y = _scale;
-
- _positions[_layer.position] = _nodes[j];
-
- // store the element
- _PSDelement = _nodes[j].element
-
- break;
- }
- // if not found, add to the list of layers to import
- _found = false;
- }
-
- if (!_found) _missingLayers.push(_layer);
- }
-
-
- if (_psdNodes.length == 0){
- // PSD was never imported, use import instead?
- this.$.debug("can't find a PSD element to update", this.$.DEBUG_LEVEL.ERROR);
- this.$.endUndo();
- return null;
- }
-
- // pasting updated PSD into element
- CELIO.pasteImageFile({ src : _psdFile.path, dst : { elementId : _PSDelement.id, exposure : "1"}})
-
- for (var i in _missingLayers){
- // find previous import Settings re: group/alignment etc
- var _layer = _missingLayers[i];
- var _layerName = _layer.layerName.split(" ").join("_");
-
- var _layerIndex = _layer.position;
- var _nodePosition = new this.$.oPoint(0,0,0);
- var _group = _psdNodes[0].group;
- var _alignment = _psdNodes[0].alignment_rule;
- var _scale = _psdNodes[0].scale.x;
- var _peg = _psdNodes[0].inNodes[0];
- var _comp = _psdNodes[0].outNodes[0];
- var _scale = _info.height/scene.defaultResolutionY()
- var _port;
-
- //TODO: set into right group according to PSD organisation
- // looking for the existing node below and get the comp port from it
- for (var j = _layerIndex-1; j>=0; j--){
- if (_positions[j] != undefined) break;
- }
- var _nodeBelow = _positions[j];
-
- var _compNodes = _comp.inNodes;
-
- for (var j=0; j<_compNodes.length; j++){
- if (_nodeBelow.path == _compNodes[j].path){
- _port = j+1;
- _nodePosition = _compNodes[j].nodePosition;
- _nodePosition.x -= 35;
- _nodePosition.y -= 25;
- }
- }
-
- // generate nodes and set them to show the element for each layer
- var _node = this.addDrawingNode(_layerName, _nodePosition, _PSDelement);
-
- _node.enabled = _layer.visible;
- _node.can_animate = false; // use general pref?
- _node.apply_matte_to_color = "Straight";
- _node.alignment_rule = _alignment;
- _node.scale.x = _scale;
- _node.scale.y = _scale;
-
- _node.attributes.drawing.element.setValue(_layer.layer != ""?"1:"+_layer.layer:1, 1);
- _node.attributes.drawing.element.column.extendExposures();
-
- // find composite/peg to connect to based on other layers
-
- //if (addPeg) _node.linkInNode(_peg)
- if (_port) _node.linkOutNode(_comp, 0, _port)
-
- _nodes.push(_node);
- }
- this.$.endUndo();
- return nodes;
- } else{
- this.$.endUndo();
- throw new Error("updating a PSD imported as a flattened layer not yet implemented");
- }
-}
-
-
-/**
- * Import a generic image format (PNG, JPG, TGA etc) as a read node.
- * @param {string} path The image file to import.
- * @param {string} [alignment="ASIS"] Alignment type.
- * @param {$.oPoint} [nodePosition={0,0,0}] The position for the node to be placed in the node view.
- *
- * @return {$.oNode} The node for the imported image
- */
-$.oGroupNode.prototype.importImage = function( path, alignment, nodePosition, convertToTvg){
- if (typeof alignment === 'undefined') var alignment = "ASIS"; // create an enum for alignments?
- if (typeof nodePosition === 'undefined') var nodePosition = new this.$.oPoint(0,0,0);
-
- var _imageFile = (path instanceof this.$.oFile)?path:new this.$.oFile( path );
- var _elementName = _imageFile.name;
-
- var _elementType = convertToTvg?"TVG":_imageFile.extension.toUpperCase();
- var _element = this.scene.addElement(_elementName, _elementType);
- var _column = this.scene.addColumn("DRAWING", _elementName, _element);
- _element.column = _column;
-
- if (_imageFile.exists) {
- var _drawing = _element.addDrawing(1, 1, _imageFile.path, convertToTvg);
- }else{
- this.$.debug("Image file to import "+_imageFile.path+" could not be found.", this.$.DEBUG_LEVEL.ERROR);
- }
-
- var _imageNode = this.addDrawingNode(_elementName, nodePosition, _element);
-
- _imageNode.can_animate = false; // use general pref?
- _imageNode.apply_matte_to_color = "Straight";
- _imageNode.alignment_rule = alignment;
-
- var _scale = CELIO.getInformation(_imageFile.path).height/this.scene.defaultResolutionY;
- _imageNode.scale.x = _scale;
- _imageNode.scale.y = _scale;
-
- _imageNode.attributes.drawing.element.setValue(_drawing.name, 1);
- _imageNode.attributes.drawing.element.column.extendExposures();
-
- // TODO how to display only one node with the whole file
- return _imageNode;
-}
-
-
-/**
- * imports an image as a tvg drawing.
- * @param {$.oFile} path the image file to import
- * @param {string} [alignment="ASIS"] the alignment mode for the imported image
- * @param {$.oPoint} [nodePosition={0,0,0}] the position for the created node.
- */
-$.oGroupNode.prototype.importImageAsTVG = function(path, alignment, nodePosition){
- if (!(path instanceof this.$.oFile)) path = new this.$.oFile(path);
-
- var _imageNode = this.importImage(_convertedFilePath, alignment, nodePosition, true);
- _imageNode.name = path.name;
-
- return _imageNode;
-}
-
-
-/**
- * imports an image sequence as a node into the current group.
- * @param {$.oFile[]} imagePaths a list of paths to the images to import (can pass a list of strings or $.oFile)
- * @param {number} [exposureLength=1] the number of frames each drawing should be exposed at. If set to 0/false, each drawing will use the numbering suffix of the file to set its frame.
- * @param {boolean} [convertToTvg=false] whether to convert the files to tvg during import
- * @param {string} [alignment="ASIS"] the alignment to apply to the node
- * @param {$.oPoint} [nodePosition] the position of the node in the nodeview
- *
- * @returns {$.oDrawingNode} the created node
- */
-$.oGroupNode.prototype.importImageSequence = function(imagePaths, exposureLength, convertToTvg, alignment, nodePosition, extendScene) {
- if (typeof exposureLength === 'undefined') var exposureLength = 1;
- if (typeof alignment === 'undefined') var alignment = "ASIS"; // create an enum for alignments?
- if (typeof nodePosition === 'undefined') var nodePosition = new this.$.oPoint(0,0,0);
-
- if (typeof extendScene === 'undefined') var extendScene = false;
-
- // match anything but capture trailing numbers and separates punctuation preceding it
- var numberingRe = /(.*?)([\W_]+)?(\d*)$/i;
-
- // sanitize imagePaths
- imagePaths = imagePaths.map(function(x){
- if (x instanceof this.$.oFile){
- return x;
- } else {
- return new this.$.oFile(x);
- }
- })
-
- var images = [];
-
- if (!exposureLength) {
- // figure out scene length based on exposure and extend the scene if needed
- var sceneLength = 0;
- var image = {frame:0, path:""};
-
- for (var i in imagePaths){
- var imagePath = imagePaths[i];
- if (!(imagePath instanceof this.$.oFile)) imagePath = new this.$.oFile(imagePath);
- var nameGroups = imagePath.name.match(numberingRe);
-
- if (nameGroups[3]){
- // use trailing number as frame number
- var frameNumber = parseInt(nameGroups[3], 10);
- if (frameNumber > sceneLength) sceneLength = frameNumber;
-
- images.push({frame: frameNumber, path:imagePath});
- }
- }
- } else {
- // simply create a list of numbers based on exposure
- images = imagePaths.map(function(x, index){
- var frameNumber = index * exposureLength + 1;
- return ({frame:frameNumber, path:x});
- })
- var sceneLength = images[images.length-1].frame + exposureLength - 1;
- }
-
- if (extendScene){
- if (this.scene.length < sceneLength) this.scene.length = sceneLength;
- }
-
- // create a node to hold the image sequence
- var firstImage = imagePaths[0];
- var name = firstImage.name.match(numberingRe)[1]; // match anything before trailing digits
- var drawingNode = this.importImage(firstImage, alignment, nodePosition, convertToTvg);
- drawingNode.name = name;
-
- for (var i in images){
- var image = images[i];
- drawingNode.element.addDrawing(image.frame, image.frame, image.path, convertToTvg);
- }
-
- drawingNode.timingColumn.extendExposures();
-
- return drawingNode;
-}
-
-/**
- * Imports a QT into the group
- * @param {string} path The palette file to import.
- * @param {bool} [importSound=true] Whether to import the sound
- * @param {bool} [extendScene=true] Whether to extend the scene to the duration of the QT.
- * @param {string} [alignment="ASIS"] Alignment type.
- * @param {$.oPoint} [nodePosition] The position for the node to be placed in the network.
- *
- * @return {$.oNode} The imported Quicktime Node.
- */
-$.oGroupNode.prototype.importQT = function( path, importSound, extendScene, alignment, nodePosition){
- if (typeof alignment === 'undefined') var alignment = "ASIS";
- if (typeof extendScene === 'undefined') var extendScene = true;
- if (typeof importSound === 'undefined') var importSound = true;
- if (typeof nodePosition === 'undefined') var nodePosition = new this.$.oPoint(0,0,0);
-
- var _QTFile = (path instanceof this.$.oFile)?path:new this.$.oFile(path);
- if (!_QTFile.exists){
- throw new Error ("Import Quicktime failed: file "+_QTFile.path+" doesn't exist");
- }
-
- var _movieName = _QTFile.name;
- this.$.beginUndo("oH_importQT_"+_movieName);
-
- var _element = this.scene.addElement(_movieName, "PNG");
- var _elementName = _element.name;
-
- var _movieNode = this.addDrawingNode(_movieName, nodePosition, _element);
- var _column = _movieNode.attributes.drawing.element.column;
- _element.column = _column;
-
- // setup the node
- _movieNode.can_animate = false;
- _movieNode.alignment_rule = alignment;
-
- // create the temp folder
- var _tempFolder = new this.$.oFolder(this.$.scn.tempFolder.path + "/movImport/" + _element.id);
- _tempFolder.create();
-
- var _tempFolderPath = _tempFolder.path;
- var _audioPath = _tempFolder.path + "/" + _movieName + ".wav";
-
- // progressDialog will display an infinite loading bar as we don't have precise feedback
- var progressDialog = new this.$.oProgressDialog("Importing video...", 0, "Import Movie", true);
-
- // setup import
- MovieImport.setMovieFilename(_QTFile.path);
- MovieImport.setImageFolder(_tempFolder);
- MovieImport.setImagePrefix(_movieName);
- if (importSound) MovieImport.setAudioFile(_audioPath);
- this.$.log("converting movie file to pngs...");
- MovieImport.doImport();
- this.$.log("conversion finished");
-
- progressDialog.range = 100;
- progressDialog.value = 80;
-
- var _movielength = MovieImport.numberOfImages();
-
- if (extendScene && this.scene.length < _movielength) this.scene.length = _movielength;
-
- // create a drawing for each frame
- for (var i=1; i<=_movielength; i++) {
- _drawingPath = _tempFolder + "/" + _movieName + "-" + i + ".png";
- _element.addDrawing(i, i, _drawingPath);
- }
-
- progressDialog.value = 95;
-
- // creating an audio column for the sound
- if (importSound && MovieImport.isAudioFileCreated() ){
- var _soundName = _elementName + "_sound";
- var _soundColumn = this.scene.addColumn("SOUND", _soundName);
- column.importSound( _soundColumn.name, 1, _audioPath);
- }
-
- progressDialog.value = 100;
-
- this.$.endUndo();
- return _movieNode;
-}
-
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_nodeAttributes.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_nodeAttributes.js
deleted file mode 100644
index 38a077ea5a..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_nodeAttributes.js
+++ /dev/null
@@ -1,3636 +0,0 @@
-var docstringStart = "/**"
-var docstringEnd = "*/"
-/*
-// values outputted by following script:
-function traceAll(){
- var globalMessage = [docstringStart+"\n * Attributes associated to Node types\n * @class NodeTypes \n "+docstringEnd];
- var nodes = selection.selectedNodes()
-
- for (var i in nodes){
- var message = [
- "Attributes present in the node : " + node.getName(nodes[i]),
- "@name NodeTypes#"+ node.type(nodes[i])
- ]
- message = message.concat(node.getAttrList( nodes[i], 1).map(function(x){return traceAttributes(x, nodes[i]));
- message = ("\n "+docstringStart+"\n * ")+message.join("\n * ")+("\n "+docstringEnd+"\n");
- globalMessage.push(message);
- }
-
- MessageLog.trace(globalMessage.join("\n"));
-}
-
-function traceAttributes(attribute, theNode){
- var message = [formatAttribute(attribute, theNode)];
- if (attribute.hasSubAttributes()){
- var subattributes = attribute.getSubAttributes();
- for (var i in subattributes){
- message = message.concat(traceAttributes(subattributes[i], theNode));
- }
- }
- return message
-}
-
-function formatAttribute(attr, theNode){
- var keyword = attr.fullKeyword();
- var type = attr.typeName().toLowerCase();
- var name = attr.name();
- var defaultValue = node.getTextAttr(theNode, 1, keyword).split(" ").join("_").replace(".0000", "");
- if (defaultValue == "N") defaultValue = "false";
- if (defaultValue == "Y") defaultValue = "true";
- var message = "@property {"+type+"} "+keyword.toLowerCase()+((defaultValue)?"="+defaultValue:"")+" - "+name+"."
- return message;
-}
-*/
-
-/**
- * Attributes associated to Node types.
These are the types to specify when creating a node, and the corresponding usual node name when creating directly through Harmony's interface. The attributes displayed here can be set and manipulated by calling the displayed names.
- * @class NodeTypes
- * @hideconstructor
- * @namespace
- * @example
- * // This is how to use this page:
- *
- * var myNode = $.scn.root.addNode("READ"); // This is the node type as specified for each node under the default display name.
- * $.log(myNode.type) // This is how to find out the type
- *
- * myNode.drawing.element = "1" // Sets the drawing.element attribute to display drawing "1"
- *
- * myNode.drawing.element = {frameNumber: 5, "2"} // If the attribute can be animated, pass a {frameNumber, value} object to set a specific frame;
- *
- * myNode.attributes.drawing.element.setValue ("2", 5) // also possible to set the attribute directly.
- *
- * // refer to the node type on this page to find out what properties can be set with what synthax for each Node Type.
- */
-
-/**
- * Attributes present in the node of type: 'MasterController'
- * @name NodeTypes#MasterController
- * @property {string} [specs_editor="
[ { "node": src_node, "port":srcNodeInfo.port, "link":srcNodeInfo.link } ]
- * @private
- * @return {object} Object in form { "node":oNode, "port":int, "link": int }
- */
-$.oNodeLink.prototype.findInputPath = function( onode, port, path ) {
- var srcNodeInfo = node.srcNodeInfo( onode.path, port );
- if( !srcNodeInfo ){
- return path;
- }
-
- var src_node = this.$.scene.getNodeByPath( srcNodeInfo.node );
- if( !src_node ){
- return path;
- }
-
- if( src_node.type == "MULTIPORT_IN" ){
- //Continue to dereference until we find something other than a group/multiport in.
- var ret = { "node": src_node, "port":srcNodeInfo.port, "link":srcNodeInfo.link };
- path.push( ret );
-
- var src_node = src_node.group;
-
- var ret = { "node": src_node, "port":srcNodeInfo.port, "link":srcNodeInfo.link };
- path.push( ret );
- }else if( src_node.type == "GROUP" ){
- //Continue to dereference until we find something other than a group/multiport out.
- var ret = { "node": src_node, "port":srcNodeInfo.port, "link":srcNodeInfo.link };
- path.push( ret );
-
- var src_node = src_node.multiportOut;
-
- var ret = { "node": src_node, "port":srcNodeInfo.port, "link":srcNodeInfo.link };
- path.push( ret );
- }else{
- var ret = { "node": src_node, "port":srcNodeInfo.port, "link":srcNodeInfo.link };
- path.push( ret );
- return path;
- }
-
- return this.findInputPath( src_node, srcNodeInfo.port, path );
-}
-
-
-/**
- * Changes both the in-node and in-port at once.
- * @param {oNode} onode The node to link on the input.
- * @param {int} port The port to link on the input.
- * @example
- * //Connect two pegs together.
- * var peg1 = $.scene.getNodeByPath( "Top/Peg1" );
- * var peg2 = $.scene.getNodeByPath( "Top/Peg2" );
- *
- * var outLinks = peg1.outLinks;
- * outLinks[0].linkIn( peg2, 0 ); //Links the input of peg2, port 0 -- to this link, connecting its outNode [peg1] and outPort [0] and outLink [arbitrary].
- */
-$.oNodeLink.prototype.linkIn = function( onode, port ) {
- this._validated = false;
- var stopUpdates_val = this.stopUpdates;
- this.stopUpdates = true;
-
- this.inNode = onode;
- this.inPort = port;
-
- this.stopUpdates = stopUpdates_val;
-}
-
-
-/**
- * Changes both the out-node and out-port at once.
- * @param {oNode} onode The node to link on the output.
- * @param {int} port The port to link on the output.
- * @example
- * //Connect two pegs together.
- * var peg1 = $.scene.getNodeByPath( "Top/Peg1" );
- * var peg2 = $.scene.getNodeByPath( "Top/Peg2" );
- *
- * var inLinks = peg1.inLinks;
- * inLinks[0].linkOut( peg2, 0 ); //Links the output of peg2, port 0 -- to this link, connecting its inNode [peg1] and inPort [0].
- */
-$.oNodeLink.prototype.linkOut = function( onode, port ) {
- this._validated = false;
-
- var stopUpdates_val = this.stopUpdates;
- this.stopUpdates = true;
-
- this.outNode = onode;
- this.outPort = port;
-
- this.stopUpdates = stopUpdates_val;
-}
-
-
-/**
- * Insert a node in the middle of the link chain.
- * @param {oNode} nodeToInsert The node to link on the output.
- * @param {int} inPort The port to link on the output.
- * @param {int} outPort The port to link on the output.
- * @example
- * //Connect two pegs together.
- * var peg1 = $.scene.getNodeByPath( "Top/Peg1" );
- * var peg2 = $.scene.getNodeByPath( "Top/Peg2" );
- *
- * //Create a new $.oNodeLink -- We'll connect two pegs with this new nodeLink.
- * var link = new $.oNodeLink( peg1, //Out Node
- * 0, //Out Port
- * peg2, //In Node
- * 0 ); //In Port
- *
- * //The link we already created above can have a node inserted between it easily.
- * var peg4 = $.scene.getNodeByPath( "Top/Peg4" );
- * link.insertNode( peg4, 0, 0 ); //Peg to insert, in port, out port.
- */
-$.oNodeLink.prototype.insertNode = function( nodeToInsert, inPort, outPort ) {
- this.stopUpdates = true;
-
- var inNode = this.inNode;
- var inPort = this.inPort;
-
- this.inNode = nodeToInsert;
- this.inport = inPort;
-
- this.stopUpdates = false;
-
- var new_link = new this.$.oNodeLink( nodeToInsert, outPort, inNode, inPort, 0 );
- new_link.apply( true );
-}
-
-/**
- * Apply the links as needed after unfreezing the oNodeLink
- * @param {bool} force Forcefully reconnect/disconnect the note given the current settings of this nodelink.
- * @example
- * //Connect two pegs together.
- * var peg1 = $.scene.getNodeByPath( "Top/Peg1" );
- * var peg2 = $.scene.getNodeByPath( "Top/Peg2" );
- *
- * //Create a new $.oNodeLink -- We'll connect two pegs with this new nodeLink.
- * var link = new $.oNodeLink( peg1, //Out Node
- * 0, //Out Port
- * peg2, //In Node
- * 0 ); //In Port
- *
- * //The node link doesn't exist yet, but lets apply it.
- * link.apply();
- */
-$.oNodeLink.prototype.apply = function( force ) {
- this._stopUpdates = false;
- this._validated = false; // ? Shouldn't we use this to bypass application if it's already been validated?
-
- var disconnect_in = false;
- var disconnect_out = false;
- var inports_removed = {};
- var outports_removed = {};
-
- if( force || !this._exists ){ //Apply this.
- this._newInNode = this._newInNode ? this._newInNode : this._inNode;
- this._newOutNode = this._newOutNode ? this._newOutNode : this._outNode;
- this._newOutPort = ( this._newOutPort === null ) ? this._outPort : this._newOutPort;
- this._newInPort = ( this._newInPort === null ) ? this._inPort : this._newInPort;
-
- var force = true;
-
- disconnect_in = true;
- disconnect_out = true;
- }else{
-
- //Force a reconnect -- track content as needed.
- //Check and validate in ports.
- var target_port = this._inPort;
- if( this._newInPort !== null ){
- if( this._newInPort != this._inPort ){
- target_port = this._newInPort;
- disconnect_in = true;
- }
- }
-
- var old_inPortCount = false; //Used to track if the inport count has changed upon its removal.
- if( this._newInNode !== null ){
- if( this._newInNode ){
- if( !this._inNode || ( this._inNode.path != this._newInNode.path ) ){
- disconnect_in = true;
- }
- }else if( this._inNode ){
- disconnect_in = true;
- }
- }
-
- //Check and validate out ports.
- if( this._newOutPort !== null ){
- if( ( this._newOutPort !== this._outPort ) ){
- disconnect_out = true;
- }
- }
-
- if( this._newOutNode !== null ){
- if( this._newOutNode ){
- if( !this._outNode || ( this._outNode.path != this._newOutNode.path ) ){
- disconnect_out = true;
- }
- }else if( this._outNode ){
- disconnect_out = true;
- }
- }
- }
-
- if( !disconnect_in && !disconnect_out ){
- //Nothing happened.
- // System.println( "NOTHING TO DO" );
- return;
- }
-
- if( this._newInNode ){
- // if( this._newInNode.inNodes.length > target_port ){
- if( this._newInNode.inPorts > target_port ){
- // if( this._newInNode.inNodes[ target_port ] ){
- if( node.isLinked(this._newInNode.path, target_port) ){
- //-- Theres already a connection here-- lets remove it.
- if( this.autoDisconnect ){
- node.unlink (this._newInNode.path, target_port)
- // this._newInNode.unlinkInPort( target_port );
- inports_removed[ this._newInNode.path ] = target_port;
- }else{
- throw "Unable to link "+this._outNode+" to "+this._newInNode+", port "+target_port+" is already occupied.";
- }
- }
- }
- }
-
- //We'll work with the new values -- pretend any new connection is a new one.
- this._newInNode = this._newInNode ? this._newInNode : this._inNode;
- this._newOutNode = this._newOutNode ? this._newOutNode : this._outNode;
- this._newOutPort = ( this._newOutPort === null ) ? this._outPort : this._newOutPort;
- this._newInPort = ( this._newInPort === null ) ? this._inPort : this._newInPort;
-
-
- if( !this._newInNode || !this._newOutNode ){
- //Nothing to attach.
- this._inNode = this._newInNode;
- this._inPort = this._newInPort;
- this._outNode = this._newOutNode;
- this._outPort = this._newOutPort;
-
- return;
- }
-
- if( !this._newInNode.exists || !this._newOutNode.exists ){
- this._inNode = this._newInNode;
- this._inPort = this._newInPort;
- this._outNode = this._newOutNode;
- this._outPort = this._newOutPort;
-
- return;
- }
-
-
- //Kill and rebuild the current connection - but first, calculate existing port indices so they can be reconnected contextually.
- // var newInPortCount = this._newInNode ? this._newInNode.inNodes.length : 0;
- var newInPortCount = this._newInNode ? this._newInNode.inPorts : 0;
- // var newOutPortCount = this._newOutNode ? this._newOutNode.outNodes.length : 0;
- var newOutPortCount = this._newOutNode ? this._newOutNode.outPorts : 0;
-
- //Unlink it anyway! Dont worry, we'll reattach that after.
- if( this._inNode ){
- // this._inNode.unlinkInPort( this._inPort );
- node.unlink (this._inNode.path, this._inPort)
- if( this._outNode ) outports_removed[ this._outNode.path ] = this._outPort;
- inports_removed[ this._inNode.path ] = this._inPort;
- }
-
- //Cant connect without a valid port.
- if( ( this._newOutPort === null ) || ( this._newOutPort === false ) ){
- this._inNode = this._newInNode;
- this._inPort = this._newInPort;
- this._outNode = this._newOutNode;
- this._outPort = this._newOutPort;
-
- return;
- }
- if( ( this._newInPort === null ) || ( this._newInPort === false ) ){
- this._inNode = this._newInNode;
- this._inPort = this._newInPort;
- this._outNode = this._newOutNode;
- this._outPort = this._newOutPort;
-
- return;
- }
-
- //Check to see if any of the port values have changed.
- var newInPortCount_result = this._newInNode ? this._newInNode.inNodes.length : 0;
- var newOutPortCount_result = this._newOutNode ? this._newOutNode.outNodes.length : 0;
-
- if( newOutPortCount_result != newOutPortCount ){
- //Outport might have changed. React appropriately.
- if( this._newOutNode.path in outports_removed ){
- if( this._newOutPort > outports_removed[ this._newOutNode.path ] ){
- this._newOutPort-=1;
- }
- }
- }
-
- if( newInPortCount_result != newInPortCount ){
- //Outport might have changed. React appropriately.
- if( this._newInNode.path in inports_removed ){
- if( this._newInPort > inports_removed[ this._newInNode.path ] ){
- this._newInPort-=1;
- }
- }
- }
-
- var new_inGroup = this._newInNode.group;
- var new_outGroup = this._newOutNode.group;
- if( new_inGroup.path == new_outGroup.path ){
- //Simple direct connection within the same group.
- node.link( _newOutNode.path, this._newInPort, this._newInNode.path, this._newOutPort);
- //this._newOutNode.linkOutNode( this._newInNode, this._newInPort, this._newOutPort ); MCNote: use the API so we can replace stuff into it later
-
- }else{
- //Look for an access route.
-
- var common_path = [];
- var split_in = new_inGroup.path.split( "/" );
- var split_out = new_outGroup.path.split( "/" );
-
- //Find the common top path.
- for( var n=0;n
In the event this value wasn't known by the link object but the link is actually connected, the correct value will be found.
- * @name $.oLink#inPort
- * @type {int}
- */
-Object.defineProperty($.oLink.prototype, 'inPort', {
- get : function(){
- if (this.linked) return this._inPort; // cached value was correct
-
- var _found = this.findPorts();
- if (_found) return this._inPort;
-
- // nodes are not connected
- return null;
- },
-
- set : function(newInPort){
- this._inPort = newInPort;
- this._linked = false;
- }
-});
-
-
-/**
- * The out-port used by the link. Changing this value doesn't reconnect the link, just changes the connection described by the link object.
- *
In the event this value wasn't known by the link object but the link is actually connected, the correct value will be found.
- * @name $.oLink#outPort
- * @type {int}
- */
-Object.defineProperty($.oLink.prototype, 'outPort', {
- get : function(){
- if (this.linked) return this._outPort; // cached value was correct
-
- var _found = this.findPorts();
- if (_found) return this._outPort;
-
- // nodes are not connected
- return null;
- },
-
- set : function(newOutPort){
- this._outPort = newOutPort;
- this._linked = false;
- }
-});
-
-
-/**
- * The index of the link coming out of the out-port.
- *
In the event this value wasn't known by the link object but the link is actually connected, the correct value will be found.
- * @name $.oLink#outLink
- * @readonly
- * @type {int}
- */
-Object.defineProperty($.oLink.prototype, 'outLink', {
- get : function(){
- if (this.linked) return this._outLink;
-
- var _found = this.findPorts();
- if (_found) return this._outLink;
-
- // nodes are not connected
- return null;
- }
-});
-
-
-/**
- * Get and set the linked status of a link
- * @name $.oLink#linked
- * @type {bool}
- */
-Object.defineProperty($.oLink.prototype, 'linked', {
- get : function(){
- if (this._linked) return this._linked;
-
- // first check if node object refers to two valid nodes
- if (this.outNode === undefined || this.inNode === undefined){
- this.$.debug("checking 'linked' for invalid link: "+this.outNode+">"+this.inNode, this.$.DEBUG_LEVEL.ERROR)
- return false;
- }
-
- // if ports/links unknown, get a valid link we can check
- if (this._outPort === undefined || this._inPort === undefined || this._outLink === undefined){
- if (!this.findPorts()){
- return false;
- }
- }
-
- // if ports/links are specified, we check the if the nodes connected to each port correspond with the link values
- var _linkedOutNode = this.outNode.getLinkedOutNode(this._outPort, this._outLink);
- var _linkedInNode = this.inNode.getLinkedInNode(this._inPort);
-
- if (_linkedOutNode == null || _linkedInNode == null) return false;
-
- var validOutLink = (_linkedOutNode.path == this.inNode.path);
- var validInLink = (_linkedInNode.path == this.outNode.path);
-
- if (validOutLink && validInLink){
- this._linked = true;
- return true;
- }
- return false;
- },
-
- set : function(newLinkedStatus){
- if (newLinkedStatus){
- this.connect();
- }else{
- this.disconnect();
- }
- }
-});
-
-
-/**
- * Compares the start and end nodes groups to see if the path traverses several groups or not.
- * @name $.oLink#isMultiLevel
- * @readonly
- * @type {bool}
- */
-Object.defineProperty($.oLink.prototype, 'isMultiLevel', {
- get : function(){
- //this.$.debug("isMultiLevel? "+this.outNode +" "+this.inNode, this.$.DEBUG_LEVEL.LOG);
- if (!this.outNode || !this.outNode.group || !this.inNode || !this.inNode.group) return false;
- return this.outNode.group.path != this.inNode.group.path;
- }
-});
-
-
-/**
- * Compares the start and end nodes groups to see if the path traverses several groups or not.
- * @name $.oLink#isMultiLevel
- * @readonly
- * @type {bool}
- */
-Object.defineProperty($.oLink.prototype, 'waypoints', {
- get : function(){
- if (!this.linked) return []
- var _waypoints = waypoint.getAllWaypointsAbove (this.inNode, this.inPort)
- return _waypoints;
- }
-});
-
-
-/**
- * Get a link that can be connected by working out ports that can be used. If a link already exists, it will be returned.
- * @return {$.oLink} A separate $.oLink object that can be connected. Null if none could be constructed.
- */
-$.oLink.prototype.getValidLink = function(createOutPorts, createInPorts){
- if (typeof createOutPorts === 'undefined') var createOutPorts = false;
- if (typeof createInPorts === 'undefined') var createInPorts = true;
- var start = this.outNode;
- var end = this.inNode;
- var outPort = this._outPort;
- var inPort = this._inPort;
-
- if (!start || !end) {
- $.debug("A valid link can't be found: node missing in link "+this.toString(), this.$.DEBUG_LEVEL.ERROR)
- return null;
- }
-
- if (this.isMultiLevel) return null;
-
- var _link = new this.$.oLink(start, end, outPort, inPort);
- _link.findPorts();
-
- // if can't be found, choose a new non existent link
- if (!_link.linked){
- if (typeof outPort === 'undefined' || outPort === undefined){
- _link._outPort = start.getFreeOutPort(createOutPorts);
- // if (_link._outPort == null) _link._outPort = 0; // just use a current port and add a link
- }
-
- _link._outLink = start.getOutLinksNumber(_link._outPort);
-
- if (typeof inPort === 'undefined' || inPort === undefined){
- _link._inPort = end.getFreeInPort(createInPorts);
- if (_link._inPort == null){
- this.$.debug("can't create link because the node "+end+" can't create a free inPort", this.$.DEBUG_LEVEL.ERROR);
- return null; // can't create a valid link.
- }
-
- }else{
- _link._inPort = inPort;
-
- if (end.getInLinksNumber(inPort)!= 0 && !end.canCreateInPorts){
- this.$.debug("can't create link because the requested port "+_link._inPort+" of node "+end+" isn't free", this.$.DEBUG_LEVEL.ERROR);
- return null;
- }
- }
- }
-
- return _link;
-}
-
-
-/**
- * Attempts to connect a link. Will guess the ports if not provided.
- * @return {bool}
- */
-$.oLink.prototype.connect = function(){
- if (this._linked){
- return true;
- }
-
- // do we want to just always get a valid link here or do we want it to fail if not set properly?
- if (!this.findPorts()){
- var _validLink = this.getValidLink(this.outNode.canCreateInPorts, this.inNode.canCreateInPorts);
- if (!_validLink) return false;
- this.inPort = _validLink.inPort;
- this.outPort = _validLink.outPort;
- this.outLink = _validLink.outLink;
- };
-
- if (this.inNode.getInLinksNumber(this._inPort) > 0 && !this.inNode.canCreateInPorts) return false; // can't connect if the in-port is already connected
-
- var createOutPorts = (this.outNode.outPorts <= this._outPort && this.outNode.canCreateOutPorts);
- var createInPorts = ((this.inNode.inPorts <= this._inPort || this.inNode.getInLinksNumber(this._inPort)>0) && this.inNode.canCreateInPorts);
-
- if (this._outNode.type == "GROUP" && createOutPorts) this._outNode.addOutPort(this._outPort);
- if (this._inNode.type == "GROUP" && createInPorts) this._inNode.addInPort(this._inPort);
-
- try{
- this.$.debug("linking nodes "+this._outNode+" to "+this._inNode+" through outPort: "+this._outPort+", inPort: "+this._inPort+" and create ports: "+createOutPorts+" "+createInPorts, this.$.DEBUG_LEVEL.LOG);
-
- var success = node.link(this._outNode, this._outPort, this._inNode, this._inPort, createOutPorts, createInPorts);
- this._linked = success;
-
- if (!success) throw new Error();
- return success;
-
- }catch(err){
- this.$.debug("linking nodes "+this._outNode+" to "+this._inNode+" through outPort: "+this._outPort+", inPort: "+this._inPort+", create outports: "+createOutPorts+", create inports:"+createInPorts, this.$.DEBUG_LEVEL.ERROR);
- this.$.debug("Error linking nodes: " +err, this.$.DEBUG_LEVEL.ERROR);
- return false;
- }
-}
-
-
-/**
- * Disconnects a link.
- * @return {bool} Whether disconnecting was successful;
- */
-$.oLink.prototype.disconnect = function(){
- if (!this._linked) return true;
-
- if (!this.findPorts()) return false;
-
- node.unlink(this._inNode, this._inPort);
- this._linked = false;
- return true;
-}
-
-
-/**
- * Finds ports missing or undefined ports in the link object if it is linked, and update the object accordingly.
- * This will not update ports if the link isn't connected. Use getValidLink to get a connectable unconnected link.
- * @private
- * @return {bool} Whether finding ports was successful.
- */
-$.oLink.prototype.findPorts = function(){
- // Unless some ports are specified, this will always find the first link and stop there. Provide more info in case of multiple links
-
- if (!this.outNode|| !this.inNode) {
- this.$.debug("calling 'findPorts' for invalid link: "+this.outNode+" > "+this.inNode, this.$.DEBUG_LEVEL.ERROR);
- return false;
- }
-
- if (this._inPort !== undefined && this._outPort!== undefined && this._outLink!== undefined) return true; // ports already are valid, even if link might not be linked
-
- var _inNodePath = this.inNode.path;
- var _outNodePath = this.outNode.path;
-
- // Try to find outPort based on inPort
- // most likely to be missing is outLink, and this is the quickest way to find it.
- if (this._inPort != undefined){
- var _nodeInfo = node.srcNodeInfo(_inNodePath, this._inPort);
- if (_nodeInfo && _nodeInfo.node == _outNodePath && (this._outPort == undefined || this._outPort == _nodeInfo.port)){
- this._outPort = _nodeInfo.port;
- this._outLink = _nodeInfo.link;
- this._linked = true;
-
- // this.$.log("found ports through provided inPort: "+ this._inPort)
- return true;
- }
- }
-
- // Try to find ports based on outLink/outPort
- if (this._outPort !== undefined && this._outLink !== undefined){
- var _nodeInfo = node.dstNodeInfo(_outNodePath, this._outPort, this._outLink);
- if (_nodeInfo && _nodeInfo.node == _inNodePath){
- this._inPort = _nodeInfo.port;
- this._linked = true;
-
- // this.$.log("found ports through provided outPort/outLink: "+this._outPort+" "+this._outLink)
- return true;
- }
- }
-
- // Find the ports if we are missing all of them, looking at in-ports to avoid messing with outLinks
- var _inPorts = this.inNode.inPorts;
- for (var i = 0; i<_inPorts; i++){
- var _nodeInfo = node.srcNodeInfo(_inNodePath, i);
- if (_nodeInfo && _nodeInfo.node == _outNodePath){
- if (this._outPort !== undefined && this._outPort !== _nodeInfo.port) continue;
-
- this._inPort = i;
- this._outPort = _nodeInfo.port;
- this._outLink = _nodeInfo.link;
-
- // this.$.log("found ports through iterations")
- this._linked = true;
-
- return true;
- }
- }
-
- // The nodes are not linked
- this._linked = false;
- return false;
-}
-
-
-/**
- * Connects the given node in the middle of the link. The link must be connected.
- * @param {$.oNode} oNode The node to insert in the link
- * @param {int} [nodeInPort = 0] The inPort to use on the inserted node
- * @param {int} [nodeOutPort = 0] The outPort to use on the inserted node
- * @param {int} [nodeOutLink = 0] The outLink to use on the inserted node
- * @return {$.oLink[]} an Array of two oLink objects that describe the new connections.
- * @example
- * include("openHarmony.js")
- * doc = $.scn
- * var node1 = doc.$node("Top/Drawing")
- * var node2 = doc.$node("Top/Composite")
- * var node3 = doc.$node("Top/Transparency")
- *
- * var link = new $.oLink(node1, node2)
- * link.insertNode(node3) // insert the Transparency node between the Drawing and Composite
- */
-$.oLink.prototype.insertNode = function(oNode, nodeInPort, nodeOutPort, nodeOutLink){
- if (!this.linked) return // can't insert a node if the link isn't connected
-
- this.$.beginUndo("oh_insertNode")
-
- var _inNode = this.inNode
- var _outNode = this.outNode
- var _inPort = this.inPort
- var _outPort = this.outPort
- var _outLink = this.outLink
-
- var _topLink = new this.$.oLink(_outNode, oNode, _outPort, nodeInPort, _outLink)
- var _lowerLink = new this.$.oLink(oNode, _inNode, nodeOutPort, _inPort, nodeOutLink)
-
- this.linked = false;
- var success = (_topLink.connect() && _lowerLink.connect());
-
- this.$.endUndo()
-
- if (success) {
- return [_topLink, _lowerLink]
- } else{
- // we restore the links to default state and return false
- this.$.debug("failed to insert node "+oNode+" into link "+this)
- this.$.undo()
- return false
- }
-}
-
-/**
- * Converts the node link to a string.
- * @private
- */
-$.oLink.prototype.toString = function( ) {
- return ('link: {"'+this._outNode+'" ['+this._outPort+', '+this._outLink+'] -> "'+this._inNode+'" ['+this._inPort+']} linked:'+this._linked);
- // return '{outNode:'+this.outNode+' inNode:'+this.inNode+' }';
-}
-
-
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oLinkPath class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-/**
- * Constructor for $.oLinkPath class
- * @classdesc
- * The $.oLinkPath class allows to figure out paths as a series of links between distant nodes.
- * It can either look for existing paths and check that two distant nodes are connected or create new ones that can then be connected.
- * @constructor
- * @param {$.oNode} startNode The first node from which the link is coming out.
- * @param {$.oNode} endNode The last node into which the link is connected.
- * @param {oScene} [outPortNum] The out-port of the startNode.
- * @param {oScene} [inPortNum] The in-port of the endNode.
- * @param {oScene} [outLinkNum] The link index coming out of the out-port of the startNode.
- * @see NodeType
- */
-$.oLinkPath = function( startNode, endNode, outPort, inPort, outLink){
- this.startNode = startNode;
- this.endNode = endNode;
- this.outPort = (typeof outPort !== 'undefined')? outPort:undefined;
- this.inPort = (typeof inPort !== 'undefined')? inPort:undefined;
- this.outLink = (typeof outLink !== 'undefined')? outLink:undefined;
-}
-
-
-/**
- * Compares the start and end nodes groups to see if the path traverses several groups or not.
- * @name $.oLinkPath#isMultiLevel
- * @readonly
- * @type {bool}
- */
-Object.defineProperty($.oLinkPath.prototype, 'isMultiLevel', {
- get : function(){
- //this.$.log(this.startNode+" "+this.endNode)
- return this.startNode.group.path != this.endNode.group.path;
- }
-});
-
-
-/**
- * Identifies the group in which the two nodes will connect if they are at different levels of depth.
- * @name $.oLinkPath#lowestCommonGroup
- * @readonly
- * @type {$.oGroupNode}
- */
-Object.defineProperty($.oLinkPath.prototype, 'lowestCommonGroup', {
- get : function(){
- var startPath = this.startNode.group.path.split("/");
- var endPath = this.endNode.group.path.split("/");
-
- var commonPath = [];
- for (var i=0; i
- * Alternatively, new preferences can be retrieved with the .get function.
- * @constructor
- * @example
- * var pref = $.getPreferences();
- * pref.create( "MyNewPreferenceName", "MyPreferenceValue" );
- * pref["MyNewPreferenceName"]; // Provides: MyPreferenceValue
- * pref.get("MyNewPreferenceName"); // Provides: MyPreferenceValue
- */
-$.oPreferences = function( ){
- this._type = "preferences";
- this._addedPreferences = []
-
- this.refresh();
-}
-
-
-/**
- * Refreshes the preferences by re-reading the preference file and ingesting their values appropriately. They are then available as properties of this class.
- * Note, any new preferences will not be available as properties until Harmony saves the preference file at exit. In order to reference new preferences, use the get function.
- * @name $.oPreferences#refresh
- * @function
- */
-$.oPreferences.prototype.refresh = function(){
- var fl = specialFolders.userConfig + "/Harmony Premium-pref.xml";
- var nfl = new this.$.oFile( fl );
- if( !nfl.exists ){
- System.println( "Unable to find preference file: " + fl );
- this.$.debug( "Unable to find preference file: " + fl, this.$.DEBUG_LEVEL.ERROR );
- return;
- }
-
- var xmlDom = new QDomDocument();
- xmlDom.setContent( nfl.read() );
-
- if( !xmlDom ){
- return;
- }
-
- var prefXML = xmlDom.elementsByTagName( "preferences" );
- if( prefXML.length() == 0 ){
- this.$.debug( "Unable to find preferences in file: " + fl, this.$.DEBUG_LEVEL.ERROR );
- return;
- }
-
- var XMLpreferences = prefXML.at(0);
-
- //Clear this objects previous getter/setters to make room for new ones.
- if( this._preferences ){
- for( n in this._preferences ){ //Remove them if they've disappeared.
- Object.defineProperty( this, n, {
- enumerable : false,
- configurable: true,
- set : function(){},
- get : function(){}
- });
- }
- }
- this._preferences = {};
-
- if( !XMLpreferences.hasChildNodes() ){
- this.$.debug( "Unable to find preferences in file: " + fl, this.$.DEBUG_LEVEL.ERROR );
- return;
- }
-
- //THE DEFAULT SETTER
- var set_val = function( pref, name, val ){
- var prefObj = pref._preferences[name];
-
- //Check against types, unable to set types differently.
- switch( typeof val ){
- case 'string':
- if( prefObj["type"] != "string" ){
- throw ReferenceError( "Harmony does not support preference type-changes. Preference must remain " + prefObj["type"] );
- }
- preferences.setString( name, val );
- break;
- case 'number':
- if( prefObj["type"] == "int" ){
- val = Math.floor( val );
- preferences.setInt( name, val );
- }else if( prefObj["type"] == "double" ){
- //This is fine.
- preferences.setDouble( name, val );
- }else{
- throw ReferenceError( "Harmony does not support preference type-changes. Preference must remain " + prefObj["type"] );
- }
- break
- case 'boolean':
- case 'undefined':
- case 'null':
- if( prefObj["type"] != "bool" ){
- throw ReferenceError( "Harmony does not support preference type-changes. Preference must remain " + prefObj["type"] );
- }
- preferences.setBool( name, val ? true:false );
- break
- case 'object':
- default:
- var set = false;
- try{
- if( val.r && val.g && val.b && val.a ){
- if( prefObj["type"] != "color" ){
- throw ReferenceError( "Harmony does not support preference type-changes. Preference must remain " + prefObj["type"] );
- }
-
- value = preferences.setColor( name, new ColorRGBA( val.r, val.g, val.b, val.a ) );
- set = true;
- }
- }catch(err){
- }
-
- if(!set){
- if( prefObj["type"] != "string" ){
- throw ReferenceError( "Harmony does not support preference type-changes. Preference must remain " + prefObj["type"] );
- }
- var json_val = 'json('+JSON.stringify( val )+')';
- preferences.setString( name, json_val );
- }
- break
- }
-
- {
- pref._preferences[name].value = val;
- }
- }
-
- //THE DEFAULT GETTER
- var get_val = function( pref, name ){
- return pref._preferences[name].value;
- }
-
-
- var getterSetter_create = function( targ, id, type ){
- switch( type ){
- case 'color':
- var tempVal = preferences.getColor( id, new ColorRGBA () );
- value = new $.oColorValue( tempVal.r, tempVal.g, tempVal.b, tempVal.a );
- break;
- case 'int':
- value = preferences.getInt( id, 0 );
- break
- case 'double':
- value = preferences.getDouble( id, 0.0 );
- break
- case 'bool':
- value = preferences.getBool( id, false );
- break
- case 'string':
- value = preferences.getString( id, "unknown" );
- if( value.slice( 0, 5 ) == "json(" ){
- var obj = value.slice( 5, value.length-1 );
- value = JSON.parse( obj );
- }
- break
- default:
- break;
- }
- if( value === null ) return;
-
- targ._preferences[ id ] = { "value": value, "type":type };
-
- //Create a getter/setter for it!
- Object.defineProperty( targ, id, {
- enumerable : true,
- configurable: true,
- set : eval( 'val = function(val){ set_val( targ, "'+id+'", val ); }' ),
- get : eval( 'val = function(){ return get_val( targ, "'+id+'"); }' )
- });
- }
-
-
- //Get all the children preferences.
- var childNodes = XMLpreferences.childNodes();
- for( var cn=0;cn
This is generally useful for accessing newly created preferences that have not been written to disk.
- * @name $.oPreferences#get
- * @param {string} name The name of the preference to retrieve.
- * @example
- * var pref = $.getPreferences();
- * pref.create( "MyNewPreferenceName", "MyPreferenceValue" );
- * //This new preference won't be available in the file until Harmony closes.
- * //So if preferences are reinstantiated, it won't be readily available -- but it can still be retrieved with get.
- *
- * var pref2 = $.getPreferences();
- * pref["MyNewPreferenceName"]; // Provides: undefined -- its not in the Harmony preference file.
- * pref.get("MyNewPreferenceName"); // Provides: MyPreferenceValue, its still available
- */
-$.oPreferences.prototype.get = function( name ){
- if( this[name] ){
- return this[name];
- }
-
- var testTime = (new Date()).getTime();
- var doubleExist = preferences.getDouble( name, testTime );
- if( doubleExist!= testTime ){
- this._addedPreferences.push( {"type":'double', "name":name } );
- this.refresh();
-
- return doubleExist;
- }
-
- var intExist = preferences.getInt( name, testTime );
- if( intExist!= testTime ){
- this._addedPreferences.push( {"type":'int', "name":name } );
- this.refresh();
-
- return intExist;
- }
-
-
- var colorExist = preferences.getColor( name, new ColorRGBA(1,2,3,4) );
- if( !( (colorExist.r==1) && (colorExist.g==2) && (colorExist.b==3) && (colorExist.a==4) ) ){
- this._addedPreferences.push( {"type":'color', "name":name } );
- this.refresh();
-
- return colorExist;
- }
-
- var stringExist = preferences.getString( name, "doesntExist" );
- if( stringExist != "doesntExist" ){
- this._addedPreferences.push( {"type":'color', "name":name } );
- this.refresh();
-
- return this[name];
- }
-
- return preferences.getBool( name, false );
-}
-
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oPreference class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-/**
- * The constructor for the oPreference Class.
- * @classdesc
- * The oPreference class wraps a single preference item.
- * @constructor
- * @param {string} category The category of the preference
- * @param {string} keyword The keyword used by the preference
- * @param {string} type The type of value held by the preference
- * @param {string} description A short string of description
- * @param {string} descriptionText The complete tooltip text for the preference
- * @example
- * // To access the preferences of Harmony, grab the preference object in the $.oApp class:
- * var prefs = $.app.preferences;
- *
- * // It's then possible to access all available preferences of the software:
- * for (var i in prefs){
- * log (i+" "+prefs[i]);
- * }
- *
- * // accessing the preference value can be done directly by using the dot notation:
- * prefs.USE_OVERLAY_UNDERLAY_ART = true;
- * log (prefs.USE_OVERLAY_UNDERLAY_ART);
- *
- * //the details objects of the preferences object allows access to more information about each preference
- * var details = prefs.details
- * log(details.USE_OVERLAY_UNDERLAY_ART.category+" "+details.USE_OVERLAY_UNDERLAY_ART.id+" "+details.USE_OVERLAY_UNDERLAY_ART.type);
- *
- * for (var i in details){
- * log(i+" "+JSON.stringify(details[i])) // each object inside detail is a complete oPreference instance
- * }
- *
- * // the preference object also holds a categories array with the list of all categories
- * log (prefs.categories)
- */
-$.oPreference = function(category, keyword, type, value, description, descriptionText){
- this.category = category;
- this.keyword = keyword;
- this.type = type;
- this.description = description;
- this.descriptionText = descriptionText;
- this.defaultValue = value;
-}
-
-
-/**
- * get and set a preference value
- * @name $.oPreference#value
- */
-Object.defineProperty ($.oPreference.prototype, 'value', {
- get: function(){
- try{
- switch(this.type){
- case "bool":
- var _value = preferences.getBool(this.keyword, this.defaultValue);
- break
- case "int":
- var _value = preferences.getInt(this.keyword, this.defaultValue);
- break;
- case "double":
- var _value = preferences.getDouble(this.keyword, this.defaultValue);
- break;
- case "color":
- var _value = preferences.getColor(this.keyword, this.defaultValue);
- _value = new this.$.oColorValue(_value.r, _value.g, _value.b, _value.a)
- break;
- default:
- var _value = preferences.getString(this.keyword, this.defaultValue);
- }
- }catch(err){
- this.$.debug(err, this.$.DEBUG_LEVEL.ERROR)
- }
- this.$.debug("Getting value of Preference "+this.keyword+" : "+_value, this.$.DEBUG_LEVEL.LOG)
- return _value;
- },
-
- set : function(newValue){
- switch(this.type){
- case "bool":
- preferences.setBool(this.keyword, newValue);
- break
- case "int":
- preferences.setInt(this.keyword, newValue);
- break;
- case "double":
- preferences.setDouble(this.keyword, newValue);
- break;
- case "color":
- if (typeof newValue == String) newValue = (new oColorValue()).fromColorString(newValue);
- preferences.setColor(this.keyword, new ColorRGBA(newValue.r, newValue.g, newValue.b, newValue.a));
- break;
- default:
- preferences.setString(this.keyword, newValue);
- }
- this.$.debug("Preference "+this.keyword+" was set to : "+newValue, this.$.DEBUG_LEVEL.LOG)
- }
-})
-
-
-/**
- * Creates getter setters on a simple object for the preference described by the params
- * @private
- * @param {string} category The category of the preference
- * @param {string} keyword The keyword used by the preference
- * @param {string} type The type of value held by the preference
- * @param {string} description A short string of description
- * @param {string} descriptionText The complete tooltip text for the preference
- * @param {Object} prefObject The preference object that will receive the getter setter property (usually $.oApp._prefObject)
- */
-$.oPreference.createPreference = function(category, keyword, type, value, description, descriptionText, prefObject){
- if (!prefObject.details.hasOwnProperty(keyword)){
- var pref = new $.oPreference(category, keyword, type, value, description, descriptionText);
- Object.defineProperty(prefObject, keyword,{
- enumerable: true,
- get : function(){
- return pref.value;
- },
- set : function(newValue){
- pref.value = newValue;
- }
- })
- }else{
- var pref = prefObject.details[keyword]
- }
-
- return pref;
-}
\ No newline at end of file
diff --git a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_scene.js b/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_scene.js
deleted file mode 100644
index 8100df3a1c..0000000000
--- a/server_addon/harmony/client/ayon_harmony/vendor/OpenHarmony/openHarmony/openHarmony_scene.js
+++ /dev/null
@@ -1,2345 +0,0 @@
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-//
-// openHarmony Library
-//
-//
-// Developped by Mathieu Chaptel, Chris Fourney
-//
-//
-// This library is an open source implementation of a Document Object Model
-// for Toonboom Harmony. It also implements patterns similar to JQuery
-// for traversing this DOM.
-//
-// Its intended purpose is to simplify and streamline toonboom scripting to
-// empower users and be easy on newcomers, with default parameters values,
-// and by hiding the heavy lifting required by the official API.
-//
-// This library is provided as is and is a work in progress. As such, not every
-// function has been implemented or is garanteed to work. Feel free to contribute
-// improvements to its official github. If you do make sure you follow the provided
-// template and naming conventions and document your new methods properly.
-//
-// This library doesn't overwrite any of the objects and classes of the official
-// Toonboom API which must remains available.
-//
-// This library is made available under the Mozilla Public license 2.0.
-// https://www.mozilla.org/en-US/MPL/2.0/
-//
-// The repository for this library is available at the address:
-// https://github.com/cfourney/OpenHarmony/
-//
-//
-// For any requests feel free to contact m.chaptel@gmail.com
-//
-//
-//
-//
-//////////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////////////////////////////////////////////////////////
-
-//////////////////////////////////////
-//////////////////////////////////////
-// //
-// //
-// $.oScene class //
-// //
-// //
-//////////////////////////////////////
-//////////////////////////////////////
-
-
-//TODO: Metadata, settings, aspect, camera peg, view.
-/**
- * The constructor for $.oScene.
- * @classdesc
- * The base Class to access all the contents of the scene, and add elements.
This is the main class to do exporting operations as well as column/element/palette creation.
- * @constructor
- * @example
- * // Access to the direct dom object. Available and automatically instantiated as $.getScene, $.scene, $.scn, $.s
- * var doc = $.getScene ;
- * var doc = $.scn ;
- * ver doc = $.s ; // all these are equivalents
- *
- * // To grab the scene from a QWidget Dialog callback, store the $ object in a local variable to access all the fonctions from the library.
- * function myCallBackFunction(){
- * var this.$ = $;
- *
- * var doc = this.$.scn;
- * }
- *
- *
- */
-$.oScene = function( ){
- // $.oScene.nodes property is a class property shared by all instances, so it can be passed by reference and always contain all nodes in the scene
-
- //var _topNode = new this.$.oNode("Top");
- //this.__proto__.nodes = _topNode.subNodes(true);
-
- this._type = "scene";
-}
-
-
-//-------------------------------------------------------------------------------------
-//--- $.oScene Objects Properties
-//-------------------------------------------------------------------------------------
-//-------------------------------------------------------------------------------------
-
-
-/**
- * The folder that contains this scene.
- * @name $.oScene#path
- * @type {$.oFolder}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'path', {
- get : function(){
- return new this.$.oFolder( scene.currentProjectPathRemapped() );
- }
-});
-
-/**
- * The stage file of the scene.
- * @name $.oScene#stage
- * @type {$.oFile}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'stage', {
- get : function(){
- if (this.online) return this.path + "/stage/" + this.name + ".stage";
- return this.path + "/" + this.version + ".xstage";
- }
-});
-
-/**
- * The folder that contains this scene.
- * @name $.oScene#paletteFolder
- * @type {$.oFolder}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'paletteFolder', {
- get : function(){
- return new this.$.oFolder( this.path+"/palette-library" );
- }
-});
-
-
-/**
- * The temporary folder where files are created before being saved.
- * If the folder doesn't exist yet, it will be created.
- * @name $.oScene#tempFolder
- * @type {$.oFolder}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'tempFolder', {
- get : function(){
- if (!this.hasOwnProperty("_tempFolder")){
- this._tempFolder = new this.$.oFolder(scene.tempProjectPathRemapped());
- if (!this._tempFolder.exists) this._tempFolder.create()
- }
- return this._tempFolder;
- }
-});
-
-/**
- * The name of the scene.
- * @name $.oScene#name
- * @readonly
- * @type {string}
- */
-Object.defineProperty($.oScene.prototype, 'name', {
- get : function(){
- return scene.currentScene();
- }
-});
-
-
-/**
- * Wether the scene is hosted on a Toonboom database.
- * @name $.oScene#online
- * @readonly
- * @type {bool}
- */
-Object.defineProperty($.oScene.prototype, 'online', {
- get : function(){
- return about.isDatabaseMode()
- }
-});
-
-/**
- * The name of the scene.
- * @name $.oScene#environnement
- * @readonly
- * @type {string}
- */
-Object.defineProperty($.oScene.prototype, 'environnement', {
- get : function(){
- if (!this.online) return null;
- return scene.currentEnvironment();
- }
-});
-
-
-/**
- * The name of the scene.
- * @name $.oScene#job
- * @readonly
- * @type {string}
- */
-Object.defineProperty($.oScene.prototype, 'job', {
- get : function(){
- if (!this.online) return null;
- return scene.currentJob();
- }
-});
-
-
-/**
- * The name of the scene.
- * @name $.oScene#version
- * @readonly
- * @type {string}
- */
-Object.defineProperty($.oScene.prototype, 'version', {
- get : function(){
- return scene.currentVersionName();
- }
-});
-
-
-/**
- * The sceneName file of the scene.
- * @Deprecated
- * @readonly
- * @name $.oScene#sceneName
- * @type {string}
- */
-Object.defineProperty($.oScene.prototype, 'sceneName', {
- get : function(){
- return this.name;
- }
-});
-
-
-
-/**
- * The startframe to the playback of the scene.
- * @name $.oScene#startPreview
- * @type {int}
- */
-Object.defineProperty($.oScene.prototype, 'startPreview', {
- get : function(){
- return scene.getStartFrame();
- },
- set : function(val){
- scene.setStartFrame( val );
- }
-});
-
-/**
- * The stopFrame to the playback of the scene.
- * @name $.oScene#stopPreview
- * @type {int}
- */
-Object.defineProperty($.oScene.prototype, 'stopPreview', {
- get : function(){
- return scene.getStopFrame()+1;
- },
- set : function(val){
- scene.setStopFrame( val-1 );
- }
-});
-
-/**
- * The frame rate of the scene.
- * @name $.oScene#framerate
- * @type {float}
- */
-Object.defineProperty($.oScene.prototype, 'framerate', {
- get : function(){
- return scene.getFrameRate();
- },
- set : function(val){
- return scene.setFrameRate( val );
- }
-});
-
-
-/**
- * The Field unit aspect ratio as a coefficient (width/height).
- * @name $.oScene#unitsAspectRatio
- * @type {double}
- */
- Object.defineProperty($.oScene.prototype, 'unitsAspectRatio', {
- get : function(){
- return this.aspectRatioX/this.aspectRatioY;
- }
-});
-
-
-/**
- * The horizontal aspect ratio of Field units.
- * @name $.oScene#aspectRatioX
- * @type {double}
- */
-Object.defineProperty($.oScene.prototype, 'aspectRatioX', {
- get : function(){
- return scene.unitsAspectRatioX();
- },
- set : function(val){
- scene.setUnitsAspectRatio( val, this.aspectRatioY );
- }
-});
-
-/**
- * The vertical aspect ratio of Field units.
- * @name $.oScene#aspectRatioY
- * @type {double}
- */
-Object.defineProperty($.oScene.prototype, 'aspectRatioY', {
- get : function(){
- return scene.unitsAspectRatioY();
- },
- set : function(val){
- scene.setUnitsAspectRatio( this.aspectRatioY, val );
- }
-});
-
-/**
- * The horizontal Field unit count.
- * @name $.oScene#unitsX
- * @type {double}
- */
-Object.defineProperty($.oScene.prototype, 'unitsX', {
- get : function(){
- return scene.numberOfUnitsX();
- },
- set : function(val){
- scene.setNumberOfUnits( val, this.unitsY, this.unitsZ );
- }
-});
-
-/**
- * The vertical Field unit count.
- * @name $.oScene#unitsY
- * @type {double}
- */
-Object.defineProperty($.oScene.prototype, 'unitsY', {
- get : function(){
- return scene.numberOfUnitsY();
- },
- set : function(val){
- scene.setNumberOfUnits( this.unitsX, val, this.unitsZ );
- }
-});
-
-/**
- * The depth Field unit count.
- * @name $.oScene#unitsZ
- * @type {double}
- */
-Object.defineProperty($.oScene.prototype, 'unitsZ', {
- get : function(){
- return scene.numberOfUnitsZ();
- },
- set : function(val){
- scene.setNumberOfUnits( this.unitsX, this.unitsY, val );
- }
-});
-
-
-/**
- * The center coordinates of the scene.
- * @name $.oScene#center
- * @type {$.oPoint}
- */
-Object.defineProperty($.oScene.prototype, 'center', {
- get : function(){
- return new this.$.oPoint( scene.coordAtCenterX(), scene.coordAtCenterY(), 0.0 );
- },
- set : function( val ){
- scene.setCoordAtCenter( val.x, val.y );
- }
-});
-
-
-/**
- * The amount of drawing units represented by 1 field on the horizontal axis.
- * @name $.oScene#fieldVectorResolutionX
- * @type {double}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'fieldVectorResolutionX', {
- get : function(){
- var yUnit = this.fieldVectorResolutionY;
- var unit = yUnit * this.unitsAspectRatio;
- return unit
- }
-});
-
-
-/**
- * The amount of drawing units represented by 1 field on the vertical axis.
- * @name $.oScene#fieldVectorResolutionY
- * @type {double}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'fieldVectorResolutionY', {
- get : function(){
- var verticalResolution = 1875 // the amount of drawing units for the max vertical field value
- var unit = verticalResolution/12; // the vertical number of units on drawings is always 12 regardless of $.scn.unitsY
- return unit
- }
-});
-
-
-/**
- * The horizontal resolution in pixels (for rendering).
- * @name $.oScene#resolutionX
- * @readonly
- * @type {int}
- */
-Object.defineProperty($.oScene.prototype, 'resolutionX', {
- get : function(){
- return scene.currentResolutionX();
- }
-});
-
-/**
- * The vertical resolution in pixels (for rendering).
- * @name $.oScene#resolutionY
- * @type {int}
- */
-Object.defineProperty($.oScene.prototype, 'resolutionY', {
- get : function(){
- return scene.currentResolutionY();
- }
-});
-
-/**
- * The default horizontal resolution in pixels.
- * @name $.oScene#defaultResolutionX
- * @type {int}
- */
-Object.defineProperty($.oScene.prototype, 'defaultResolutionX', {
- get : function(){
- return scene.defaultResolutionX();
- },
- set : function(val){
- scene.setDefaultResolution( val, this.defaultResolutionY, this.fov );
- }
-});
-
-/**
- * The default vertical resolution in pixels.
- * @name $.oScene#defaultResolutionY
- * @type {int}
- */
-Object.defineProperty($.oScene.prototype, 'defaultResolutionY', {
- get : function(){
- return scene.defaultResolutionY();
- },
- set : function(val){
- scene.setDefaultResolution( this.defaultResolutionX, val, this.fov );
- }
-});
-
-/**
- * The field of view of the scene.
- * @name $.oScene#fov
- * @type {double}
- */
-Object.defineProperty($.oScene.prototype, 'fov', {
- get : function(){
- return scene.defaultResolutionFOV();
- },
- set : function(val){
- scene.setDefaultResolution( this.defaultResolutionX, this.defaultResolutionY, val );
- }
-});
-
-
-/**
- * The default Display of the scene.
- * @name $.oScene#defaultDisplay
- * @type {oNode}
- */
-Object.defineProperty($.oScene.prototype, 'defaultDisplay', {
- get : function(){
- return this.getNodeByPath(scene.getDefaultDisplay());
- },
-
- set : function(newDisplay){
- node.setAsGlobalDisplay(newDisplay.path);
- }
-});
-
-
-/**
- * Whether the scene contains unsaved changes.
- * @name $.oScene#unsaved
- * @readonly
- * @type {bool}
- */
-Object.defineProperty($.oScene.prototype, 'unsaved', {
- get : function(){
- return scene.isDirty();
- }
-});
-
-
-/**
- * The root group of the scene.
- * @name $.oScene#root
- * @type {$.oGroupNode}
- * @readonly
- */
-Object.defineProperty($.oScene.prototype, 'root', {
- get : function(){
- var _topNode = this.getNodeByPath( "Top" );
- return _topNode
- }
-});
-
-
-/**
- * Contains the list of all the nodes present in the scene.
- * @name $.oScene#nodes
- * @readonly
- * @type {$.oNode[]}
- */
-Object.defineProperty($.oScene.prototype, 'nodes', {
- get : function(){
- var _topNode = this.root;
- return _topNode.subNodes( true );
- }
-});
-
-
-/**
- * Contains the list of columns present in the scene.
- * @name $.oScene#columns
- * @readonly
- * @type {$.oColumn[]}
- * @todo add attribute finding to get complete column objects
- */
-Object.defineProperty($.oScene.prototype, 'columns', {
- get : function(){
- var _columns = [];
- for (var i=0; i
The complete documentation is available at this address:https://cfourney.github.io/OpenHarmony/
Install this library to be able to use the scripts that require it.
", - "repository": "https://github.com/cfourney/OpenHarmony/", - "isPackage": false, - "files": [ - "openHarmony.js", - "openHarmony/" - ], - "keywords": [ - "openHarmony", - "library" - ], - "author": "OpenHarmony", - "license": "MPL-2.0", - "website": "https://github.com/cfourney/OpenHarmony/", - "localFiles": "" - }, - { - "name": "oH Anim tools - Smartkey", - "version": "1.0.0", - "compatibility": "Harmony Premium 15", - "description": "This script creates a key frame on the current timeline layer, and set the stop motion or interpolated mode of the key according to the surrounding keyframes. A Keyframe placed on an interpolation will remain interpolated, and a key placed between stop motion keyframes will also be set to stop motion.\nThis script uses the OpenHarmony library. Install it first to be able to use it.
\n\nAssign this script to a shortcut with the script ScriptShortcuts.
", - "repository": "https://github.com/cfourney/OpenHarmony/", - "isPackage": false, - "files": [ - "tools/OpenHarmony_basic/openHarmony_anim_tools.js" - ], - "keywords": [ - "openHarmony", - "animation" - ], - "author": "Chris F", - "license": "MPL-v2.0", - "website": "https://github.com/cfourney/OpenHarmony/", - "localFiles": "" - }, - { - "name": "oH Rigging tools", - "version": "1.0.0", - "compatibility": "Harmony Premium 15", - "description": "OpenHarmony Rigging Tools\nThose scripts require the openHarmony lib to work. Install it first for the scripts to work.
\nAdd Centered Weighted Peg
Adds a peg with a pivot at the center of the selected drawing.
Place Pivot with Click
Place the pivot with a simple click.
Clean Unused Palettes
\nFinds and removes all unnecessary palettes files from the filesystem. Doesn't support Element Palettes yet!
Create Backdrop on Selection
Set up backdrops easily on the selection with this script.
end: - raise _DecodeError('Truncated message.') - return pos - - -def _SkipGroup(buffer, pos, end): - """Skip sub-group. Returns the new position.""" - - while 1: - (tag_bytes, pos) = ReadTag(buffer, pos) - new_pos = SkipField(buffer, pos, end, tag_bytes) - if new_pos == -1: - return pos - pos = new_pos - - -def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): - """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" - - unknown_field_set = containers.UnknownFieldSet() - while end_pos is None or pos < end_pos: - (tag_bytes, pos) = ReadTag(buffer, pos) - (tag, _) = _DecodeVarint(tag_bytes, 0) - field_number, wire_type = wire_format.UnpackTag(tag) - if wire_type == wire_format.WIRETYPE_END_GROUP: - break - (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) - # pylint: disable=protected-access - unknown_field_set._add(field_number, wire_type, data) - - return (unknown_field_set, pos) - - -def _DecodeUnknownField(buffer, pos, wire_type): - """Decode a unknown field. Returns the UnknownField and new position.""" - - if wire_type == wire_format.WIRETYPE_VARINT: - (data, pos) = _DecodeVarint(buffer, pos) - elif wire_type == wire_format.WIRETYPE_FIXED64: - (data, pos) = _DecodeFixed64(buffer, pos) - elif wire_type == wire_format.WIRETYPE_FIXED32: - (data, pos) = _DecodeFixed32(buffer, pos) - elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: - (size, pos) = _DecodeVarint(buffer, pos) - data = buffer[pos:pos+size].tobytes() - pos += size - elif wire_type == wire_format.WIRETYPE_START_GROUP: - (data, pos) = _DecodeUnknownFieldSet(buffer, pos) - elif wire_type == wire_format.WIRETYPE_END_GROUP: - return (0, -1) - else: - raise _DecodeError('Wrong wire type in tag.') - - return (data, pos) - - -def _EndGroup(buffer, pos, end): - """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" - - return -1 - - -def _SkipFixed32(buffer, pos, end): - """Skip a fixed32 value. Returns the new position.""" - - pos += 4 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - - -def _DecodeFixed32(buffer, pos): - """Decode a fixed32.""" - - new_pos = pos + 4 - return (struct.unpack('B').pack - - def EncodeVarint(write, value, unused_deterministic=None): - bits = value & 0x7f - value >>= 7 - while value: - write(local_int2byte(0x80|bits)) - bits = value & 0x7f - value >>= 7 - return write(local_int2byte(bits)) - - return EncodeVarint - - -def _SignedVarintEncoder(): - """Return an encoder for a basic signed varint value (does not include - tag).""" - - local_int2byte = struct.Struct('>B').pack - - def EncodeSignedVarint(write, value, unused_deterministic=None): - if value < 0: - value += (1 << 64) - bits = value & 0x7f - value >>= 7 - while value: - write(local_int2byte(0x80|bits)) - bits = value & 0x7f - value >>= 7 - return write(local_int2byte(bits)) - - return EncodeSignedVarint - - -_EncodeVarint = _VarintEncoder() -_EncodeSignedVarint = _SignedVarintEncoder() - - -def _VarintBytes(value): - """Encode the given integer as a varint and return the bytes. This is only - called at startup time so it doesn't need to be fast.""" - - pieces = [] - _EncodeVarint(pieces.append, value, True) - return b"".join(pieces) - - -def TagBytes(field_number, wire_type): - """Encode the given tag and return the bytes. Only called at startup.""" - - return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) - -# -------------------------------------------------------------------- -# As with sizers (see above), we have a number of common encoder -# implementations. - - -def _SimpleEncoder(wire_type, encode_value, compute_value_size): - """Return a constructor for an encoder for fields of a particular type. - - Args: - wire_type: The field's wire type, for encoding tags. - encode_value: A function which encodes an individual value, e.g. - _EncodeVarint(). - compute_value_size: A function which computes the size of an individual - value, e.g. _VarintSize(). - """ - - def SpecificEncoder(field_number, is_repeated, is_packed): - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - size = 0 - for element in value: - size += compute_value_size(element) - local_EncodeVarint(write, size, deterministic) - for element in value: - encode_value(write, element, deterministic) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, deterministic): - for element in value: - write(tag_bytes) - encode_value(write, element, deterministic) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, deterministic): - write(tag_bytes) - return encode_value(write, value, deterministic) - return EncodeField - - return SpecificEncoder - - -def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): - """Like SimpleEncoder but additionally invokes modify_value on every value - before passing it to encode_value. Usually modify_value is ZigZagEncode.""" - - def SpecificEncoder(field_number, is_repeated, is_packed): - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - size = 0 - for element in value: - size += compute_value_size(modify_value(element)) - local_EncodeVarint(write, size, deterministic) - for element in value: - encode_value(write, modify_value(element), deterministic) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, deterministic): - for element in value: - write(tag_bytes) - encode_value(write, modify_value(element), deterministic) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, deterministic): - write(tag_bytes) - return encode_value(write, modify_value(value), deterministic) - return EncodeField - - return SpecificEncoder - - -def _StructPackEncoder(wire_type, format): - """Return a constructor for an encoder for a fixed-width field. - - Args: - wire_type: The field's wire type, for encoding tags. - format: The format string to pass to struct.pack(). - """ - - value_size = struct.calcsize(format) - - def SpecificEncoder(field_number, is_repeated, is_packed): - local_struct_pack = struct.pack - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - local_EncodeVarint(write, len(value) * value_size, deterministic) - for element in value: - write(local_struct_pack(format, element)) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, unused_deterministic=None): - for element in value: - write(tag_bytes) - write(local_struct_pack(format, element)) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, unused_deterministic=None): - write(tag_bytes) - return write(local_struct_pack(format, value)) - return EncodeField - - return SpecificEncoder - - -def _FloatingPointEncoder(wire_type, format): - """Return a constructor for an encoder for float fields. - - This is like StructPackEncoder, but catches errors that may be due to - passing non-finite floating-point values to struct.pack, and makes a - second attempt to encode those values. - - Args: - wire_type: The field's wire type, for encoding tags. - format: The format string to pass to struct.pack(). - """ - - value_size = struct.calcsize(format) - if value_size == 4: - def EncodeNonFiniteOrRaise(write, value): - # Remember that the serialized form uses little-endian byte order. - if value == _POS_INF: - write(b'\x00\x00\x80\x7F') - elif value == _NEG_INF: - write(b'\x00\x00\x80\xFF') - elif value != value: # NaN - write(b'\x00\x00\xC0\x7F') - else: - raise - elif value_size == 8: - def EncodeNonFiniteOrRaise(write, value): - if value == _POS_INF: - write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') - elif value == _NEG_INF: - write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') - elif value != value: # NaN - write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') - else: - raise - else: - raise ValueError('Can\'t encode floating-point values that are ' - '%d bytes long (only 4 or 8)' % value_size) - - def SpecificEncoder(field_number, is_repeated, is_packed): - local_struct_pack = struct.pack - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - local_EncodeVarint(write, len(value) * value_size, deterministic) - for element in value: - # This try/except block is going to be faster than any code that - # we could write to check whether element is finite. - try: - write(local_struct_pack(format, element)) - except SystemError: - EncodeNonFiniteOrRaise(write, element) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, unused_deterministic=None): - for element in value: - write(tag_bytes) - try: - write(local_struct_pack(format, element)) - except SystemError: - EncodeNonFiniteOrRaise(write, element) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, unused_deterministic=None): - write(tag_bytes) - try: - write(local_struct_pack(format, value)) - except SystemError: - EncodeNonFiniteOrRaise(write, value) - return EncodeField - - return SpecificEncoder - - -# ==================================================================== -# Here we declare an encoder constructor for each field type. These work -# very similarly to sizer constructors, described earlier. - - -Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( - wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) - -UInt32Encoder = UInt64Encoder = _SimpleEncoder( - wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) - -SInt32Encoder = SInt64Encoder = _ModifiedEncoder( - wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, - wire_format.ZigZagEncode) - -# Note that Python conveniently guarantees that when using the '<' prefix on -# formats, they will also have the same size across all platforms (as opposed -# to without the prefix, where their sizes depend on the C compiler's basic -# type sizes). -Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str - ValueType = int - - def __init__(self, enum_type): - """Inits EnumTypeWrapper with an EnumDescriptor.""" - self._enum_type = enum_type - self.DESCRIPTOR = enum_type # pylint: disable=invalid-name - - def Name(self, number): # pylint: disable=invalid-name - """Returns a string containing the name of an enum value.""" - try: - return self._enum_type.values_by_number[number].name - except KeyError: - pass # fall out to break exception chaining - - if not isinstance(number, int): - raise TypeError( - 'Enum value for {} must be an int, but got {} {!r}.'.format( - self._enum_type.name, type(number), number)) - else: - # repr here to handle the odd case when you pass in a boolean. - raise ValueError('Enum {} has no name defined for value {!r}'.format( - self._enum_type.name, number)) - - def Value(self, name): # pylint: disable=invalid-name - """Returns the value corresponding to the given enum name.""" - try: - return self._enum_type.values_by_name[name].number - except KeyError: - pass # fall out to break exception chaining - raise ValueError('Enum {} has no value defined for name {!r}'.format( - self._enum_type.name, name)) - - def keys(self): - """Return a list of the string names in the enum. - - Returns: - A list of strs, in the order they were defined in the .proto file. - """ - - return [value_descriptor.name - for value_descriptor in self._enum_type.values] - - def values(self): - """Return a list of the integer values in the enum. - - Returns: - A list of ints, in the order they were defined in the .proto file. - """ - - return [value_descriptor.number - for value_descriptor in self._enum_type.values] - - def items(self): - """Return a list of the (name, value) pairs of the enum. - - Returns: - A list of (str, int) pairs, in the order they were defined - in the .proto file. - """ - return [(value_descriptor.name, value_descriptor.number) - for value_descriptor in self._enum_type.values] - - def __getattr__(self, name): - """Returns the value corresponding to the given enum name.""" - try: - return super( - EnumTypeWrapper, - self).__getattribute__('_enum_type').values_by_name[name].number - except KeyError: - pass # fall out to break exception chaining - raise AttributeError('Enum {} has no value defined for name {!r}'.format( - self._enum_type.name, name)) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/extension_dict.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/extension_dict.py deleted file mode 100644 index b346cf283e..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/extension_dict.py +++ /dev/null @@ -1,213 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains _ExtensionDict class to represent extensions. -""" - -from google.protobuf.internal import type_checkers -from google.protobuf.descriptor import FieldDescriptor - - -def _VerifyExtensionHandle(message, extension_handle): - """Verify that the given extension handle is valid.""" - - if not isinstance(extension_handle, FieldDescriptor): - raise KeyError('HasExtension() expects an extension handle, got: %s' % - extension_handle) - - if not extension_handle.is_extension: - raise KeyError('"%s" is not an extension.' % extension_handle.full_name) - - if not extension_handle.containing_type: - raise KeyError('"%s" is missing a containing_type.' - % extension_handle.full_name) - - if extension_handle.containing_type is not message.DESCRIPTOR: - raise KeyError('Extension "%s" extends message type "%s", but this ' - 'message is of type "%s".' % - (extension_handle.full_name, - extension_handle.containing_type.full_name, - message.DESCRIPTOR.full_name)) - - -# TODO(robinson): Unify error handling of "unknown extension" crap. -# TODO(robinson): Support iteritems()-style iteration over all -# extensions with the "has" bits turned on? -class _ExtensionDict(object): - - """Dict-like container for Extension fields on proto instances. - - Note that in all cases we expect extension handles to be - FieldDescriptors. - """ - - def __init__(self, extended_message): - """ - Args: - extended_message: Message instance for which we are the Extensions dict. - """ - self._extended_message = extended_message - - def __getitem__(self, extension_handle): - """Returns the current value of the given extension handle.""" - - _VerifyExtensionHandle(self._extended_message, extension_handle) - - result = self._extended_message._fields.get(extension_handle) - if result is not None: - return result - - if extension_handle.label == FieldDescriptor.LABEL_REPEATED: - result = extension_handle._default_constructor(self._extended_message) - elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - message_type = extension_handle.message_type - if not hasattr(message_type, '_concrete_class'): - # pylint: disable=protected-access - self._extended_message._FACTORY.GetPrototype(message_type) - assert getattr(extension_handle.message_type, '_concrete_class', None), ( - 'Uninitialized concrete class found for field %r (message type %r)' - % (extension_handle.full_name, - extension_handle.message_type.full_name)) - result = extension_handle.message_type._concrete_class() - try: - result._SetListener(self._extended_message._listener_for_children) - except ReferenceError: - pass - else: - # Singular scalar -- just return the default without inserting into the - # dict. - return extension_handle.default_value - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - result = self._extended_message._fields.setdefault( - extension_handle, result) - - return result - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - - my_fields = self._extended_message.ListFields() - other_fields = other._extended_message.ListFields() - - # Get rid of non-extension fields. - my_fields = [field for field in my_fields if field.is_extension] - other_fields = [field for field in other_fields if field.is_extension] - - return my_fields == other_fields - - def __ne__(self, other): - return not self == other - - def __len__(self): - fields = self._extended_message.ListFields() - # Get rid of non-extension fields. - extension_fields = [field for field in fields if field[0].is_extension] - return len(extension_fields) - - def __hash__(self): - raise TypeError('unhashable object') - - # Note that this is only meaningful for non-repeated, scalar extension - # fields. Note also that we may have to call _Modified() when we do - # successfully set a field this way, to set any necessary "has" bits in the - # ancestors of the extended message. - def __setitem__(self, extension_handle, value): - """If extension_handle specifies a non-repeated, scalar extension - field, sets the value of that field. - """ - - _VerifyExtensionHandle(self._extended_message, extension_handle) - - if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or - extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): - raise TypeError( - 'Cannot assign to extension "%s" because it is a repeated or ' - 'composite type.' % extension_handle.full_name) - - # It's slightly wasteful to lookup the type checker each time, - # but we expect this to be a vanishingly uncommon case anyway. - type_checker = type_checkers.GetTypeChecker(extension_handle) - # pylint: disable=protected-access - self._extended_message._fields[extension_handle] = ( - type_checker.CheckValue(value)) - self._extended_message._Modified() - - def __delitem__(self, extension_handle): - self._extended_message.ClearExtension(extension_handle) - - def _FindExtensionByName(self, name): - """Tries to find a known extension with the specified name. - - Args: - name: Extension full name. - - Returns: - Extension field descriptor. - """ - return self._extended_message._extensions_by_name.get(name, None) - - def _FindExtensionByNumber(self, number): - """Tries to find a known extension with the field number. - - Args: - number: Extension field number. - - Returns: - Extension field descriptor. - """ - return self._extended_message._extensions_by_number.get(number, None) - - def __iter__(self): - # Return a generator over the populated extension fields - return (f[0] for f in self._extended_message.ListFields() - if f[0].is_extension) - - def __contains__(self, extension_handle): - _VerifyExtensionHandle(self._extended_message, extension_handle) - - if extension_handle not in self._extended_message._fields: - return False - - if extension_handle.label == FieldDescriptor.LABEL_REPEATED: - return bool(self._extended_message._fields.get(extension_handle)) - - if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - value = self._extended_message._fields.get(extension_handle) - # pylint: disable=protected-access - return value is not None and value._is_present_in_parent - - return True diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/message_listener.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/message_listener.py deleted file mode 100644 index 0fc255a774..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/message_listener.py +++ /dev/null @@ -1,78 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Defines a listener interface for observing certain -state transitions on Message objects. - -Also defines a null implementation of this interface. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - - -class MessageListener(object): - - """Listens for modifications made to a message. Meant to be registered via - Message._SetListener(). - - Attributes: - dirty: If True, then calling Modified() would be a no-op. This can be - used to avoid these calls entirely in the common case. - """ - - def Modified(self): - """Called every time the message is modified in such a way that the parent - message may need to be updated. This currently means either: - (a) The message was modified for the first time, so the parent message - should henceforth mark the message as present. - (b) The message's cached byte size became dirty -- i.e. the message was - modified for the first time after a previous call to ByteSize(). - Therefore the parent should also mark its byte size as dirty. - Note that (a) implies (b), since new objects start out with a client cached - size (zero). However, we document (a) explicitly because it is important. - - Modified() will *only* be called in response to one of these two events -- - not every time the sub-message is modified. - - Note that if the listener's |dirty| attribute is true, then calling - Modified at the moment would be a no-op, so it can be skipped. Performance- - sensitive callers should check this attribute directly before calling since - it will be true most of the time. - """ - - raise NotImplementedError - - -class NullMessageListener(object): - - """No-op MessageListener implementation.""" - - def Modified(self): - pass diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py deleted file mode 100644 index 63651a3f19..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/message_set_extensions.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestMessageSet.RegisterExtension(message_set_extension3) - TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) - TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) - - DESCRIPTOR._options = None - _TESTMESSAGESET._options = None - _TESTMESSAGESET._serialized_options = b'\010\001' - _TESTMESSAGESET._serialized_start=83 - _TESTMESSAGESET._serialized_end=113 - _TESTMESSAGESETEXTENSION1._serialized_start=116 - _TESTMESSAGESETEXTENSION1._serialized_end=281 - _TESTMESSAGESETEXTENSION2._serialized_start=284 - _TESTMESSAGESETEXTENSION2._serialized_end=451 - _TESTMESSAGESETEXTENSION3._serialized_start=453 - _TESTMESSAGESETEXTENSION3._serialized_end=493 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py deleted file mode 100644 index 5497083197..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/missing_enum_values.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None - _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' - _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None - _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' - _TESTENUMVALUES._serialized_start=88 - _TESTENUMVALUES._serialized_end=409 - _TESTENUMVALUES_NESTEDENUM._serialized_start=378 - _TESTENUMVALUES_NESTEDENUM._serialized_end=409 - _TESTMISSINGENUMVALUES._serialized_start=412 - _TESTMISSINGENUMVALUES._serialized_end=751 - _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 - _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 - _JUSTSTRING._serialized_start=753 - _JUSTSTRING._serialized_end=780 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py deleted file mode 100644 index 0953706bac..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_extensions_dynamic.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) - - DESCRIPTOR._options = None - _DYNAMICMESSAGETYPE._serialized_start=132 - _DYNAMICMESSAGETYPE._serialized_end=163 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_extensions_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_extensions_pb2.py deleted file mode 100644 index 1cfa1b7c8b..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_extensions_pb2.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_extensions.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - ExtendedMessage.RegisterExtension(optional_int_extension) - ExtendedMessage.RegisterExtension(optional_message_extension) - ExtendedMessage.RegisterExtension(repeated_int_extension) - ExtendedMessage.RegisterExtension(repeated_message_extension) - - DESCRIPTOR._options = None - _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None - _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' - _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None - _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' - _NESTEDMESSAGE.fields_by_name['submessage']._options = None - _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' - _TOPLEVELMESSAGE._serialized_start=77 - _TOPLEVELMESSAGE._serialized_end=230 - _NESTEDMESSAGE._serialized_start=232 - _NESTEDMESSAGE._serialized_end=314 - _EXTENDEDMESSAGE._serialized_start=316 - _EXTENDEDMESSAGE._serialized_end=391 - _FOREIGNMESSAGE._serialized_start=393 - _FOREIGNMESSAGE._serialized_end=438 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_messages_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_messages_pb2.py deleted file mode 100644 index d7f7115609..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/more_messages_pb2.py +++ /dev/null @@ -1,556 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_messages.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - OutOfOrderFields.RegisterExtension(optional_uint64) - OutOfOrderFields.RegisterExtension(optional_int64) - globals()['class'].RegisterExtension(globals()['continue']) - getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) - globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) - - DESCRIPTOR._options = None - _IS._serialized_start=2669 - _IS._serialized_end=2696 - _OUTOFORDERFIELDS._serialized_start=74 - _OUTOFORDERFIELDS._serialized_end=178 - _CLASS._serialized_start=181 - _CLASS._serialized_end=514 - _CLASS_TRY._serialized_start=448 - _CLASS_TRY._serialized_end=476 - _CLASS_FOR._serialized_start=478 - _CLASS_FOR._serialized_end=506 - _EXTENDCLASS._serialized_start=516 - _EXTENDCLASS._serialized_end=579 - _TESTFULLKEYWORD._serialized_start=581 - _TESTFULLKEYWORD._serialized_end=707 - _LOTSNESTEDMESSAGE._serialized_start=710 - _LOTSNESTEDMESSAGE._serialized_end=2667 - _LOTSNESTEDMESSAGE_B0._serialized_start=731 - _LOTSNESTEDMESSAGE_B0._serialized_end=735 - _LOTSNESTEDMESSAGE_B1._serialized_start=737 - _LOTSNESTEDMESSAGE_B1._serialized_end=741 - _LOTSNESTEDMESSAGE_B2._serialized_start=743 - _LOTSNESTEDMESSAGE_B2._serialized_end=747 - _LOTSNESTEDMESSAGE_B3._serialized_start=749 - _LOTSNESTEDMESSAGE_B3._serialized_end=753 - _LOTSNESTEDMESSAGE_B4._serialized_start=755 - _LOTSNESTEDMESSAGE_B4._serialized_end=759 - _LOTSNESTEDMESSAGE_B5._serialized_start=761 - _LOTSNESTEDMESSAGE_B5._serialized_end=765 - _LOTSNESTEDMESSAGE_B6._serialized_start=767 - _LOTSNESTEDMESSAGE_B6._serialized_end=771 - _LOTSNESTEDMESSAGE_B7._serialized_start=773 - _LOTSNESTEDMESSAGE_B7._serialized_end=777 - _LOTSNESTEDMESSAGE_B8._serialized_start=779 - _LOTSNESTEDMESSAGE_B8._serialized_end=783 - _LOTSNESTEDMESSAGE_B9._serialized_start=785 - _LOTSNESTEDMESSAGE_B9._serialized_end=789 - _LOTSNESTEDMESSAGE_B10._serialized_start=791 - _LOTSNESTEDMESSAGE_B10._serialized_end=796 - _LOTSNESTEDMESSAGE_B11._serialized_start=798 - _LOTSNESTEDMESSAGE_B11._serialized_end=803 - _LOTSNESTEDMESSAGE_B12._serialized_start=805 - _LOTSNESTEDMESSAGE_B12._serialized_end=810 - _LOTSNESTEDMESSAGE_B13._serialized_start=812 - _LOTSNESTEDMESSAGE_B13._serialized_end=817 - _LOTSNESTEDMESSAGE_B14._serialized_start=819 - _LOTSNESTEDMESSAGE_B14._serialized_end=824 - _LOTSNESTEDMESSAGE_B15._serialized_start=826 - _LOTSNESTEDMESSAGE_B15._serialized_end=831 - _LOTSNESTEDMESSAGE_B16._serialized_start=833 - _LOTSNESTEDMESSAGE_B16._serialized_end=838 - _LOTSNESTEDMESSAGE_B17._serialized_start=840 - _LOTSNESTEDMESSAGE_B17._serialized_end=845 - _LOTSNESTEDMESSAGE_B18._serialized_start=847 - _LOTSNESTEDMESSAGE_B18._serialized_end=852 - _LOTSNESTEDMESSAGE_B19._serialized_start=854 - _LOTSNESTEDMESSAGE_B19._serialized_end=859 - _LOTSNESTEDMESSAGE_B20._serialized_start=861 - _LOTSNESTEDMESSAGE_B20._serialized_end=866 - _LOTSNESTEDMESSAGE_B21._serialized_start=868 - _LOTSNESTEDMESSAGE_B21._serialized_end=873 - _LOTSNESTEDMESSAGE_B22._serialized_start=875 - _LOTSNESTEDMESSAGE_B22._serialized_end=880 - _LOTSNESTEDMESSAGE_B23._serialized_start=882 - _LOTSNESTEDMESSAGE_B23._serialized_end=887 - _LOTSNESTEDMESSAGE_B24._serialized_start=889 - _LOTSNESTEDMESSAGE_B24._serialized_end=894 - _LOTSNESTEDMESSAGE_B25._serialized_start=896 - _LOTSNESTEDMESSAGE_B25._serialized_end=901 - _LOTSNESTEDMESSAGE_B26._serialized_start=903 - _LOTSNESTEDMESSAGE_B26._serialized_end=908 - _LOTSNESTEDMESSAGE_B27._serialized_start=910 - _LOTSNESTEDMESSAGE_B27._serialized_end=915 - _LOTSNESTEDMESSAGE_B28._serialized_start=917 - _LOTSNESTEDMESSAGE_B28._serialized_end=922 - _LOTSNESTEDMESSAGE_B29._serialized_start=924 - _LOTSNESTEDMESSAGE_B29._serialized_end=929 - _LOTSNESTEDMESSAGE_B30._serialized_start=931 - _LOTSNESTEDMESSAGE_B30._serialized_end=936 - _LOTSNESTEDMESSAGE_B31._serialized_start=938 - _LOTSNESTEDMESSAGE_B31._serialized_end=943 - _LOTSNESTEDMESSAGE_B32._serialized_start=945 - _LOTSNESTEDMESSAGE_B32._serialized_end=950 - _LOTSNESTEDMESSAGE_B33._serialized_start=952 - _LOTSNESTEDMESSAGE_B33._serialized_end=957 - _LOTSNESTEDMESSAGE_B34._serialized_start=959 - _LOTSNESTEDMESSAGE_B34._serialized_end=964 - _LOTSNESTEDMESSAGE_B35._serialized_start=966 - _LOTSNESTEDMESSAGE_B35._serialized_end=971 - _LOTSNESTEDMESSAGE_B36._serialized_start=973 - _LOTSNESTEDMESSAGE_B36._serialized_end=978 - _LOTSNESTEDMESSAGE_B37._serialized_start=980 - _LOTSNESTEDMESSAGE_B37._serialized_end=985 - _LOTSNESTEDMESSAGE_B38._serialized_start=987 - _LOTSNESTEDMESSAGE_B38._serialized_end=992 - _LOTSNESTEDMESSAGE_B39._serialized_start=994 - _LOTSNESTEDMESSAGE_B39._serialized_end=999 - _LOTSNESTEDMESSAGE_B40._serialized_start=1001 - _LOTSNESTEDMESSAGE_B40._serialized_end=1006 - _LOTSNESTEDMESSAGE_B41._serialized_start=1008 - _LOTSNESTEDMESSAGE_B41._serialized_end=1013 - _LOTSNESTEDMESSAGE_B42._serialized_start=1015 - _LOTSNESTEDMESSAGE_B42._serialized_end=1020 - _LOTSNESTEDMESSAGE_B43._serialized_start=1022 - _LOTSNESTEDMESSAGE_B43._serialized_end=1027 - _LOTSNESTEDMESSAGE_B44._serialized_start=1029 - _LOTSNESTEDMESSAGE_B44._serialized_end=1034 - _LOTSNESTEDMESSAGE_B45._serialized_start=1036 - _LOTSNESTEDMESSAGE_B45._serialized_end=1041 - _LOTSNESTEDMESSAGE_B46._serialized_start=1043 - _LOTSNESTEDMESSAGE_B46._serialized_end=1048 - _LOTSNESTEDMESSAGE_B47._serialized_start=1050 - _LOTSNESTEDMESSAGE_B47._serialized_end=1055 - _LOTSNESTEDMESSAGE_B48._serialized_start=1057 - _LOTSNESTEDMESSAGE_B48._serialized_end=1062 - _LOTSNESTEDMESSAGE_B49._serialized_start=1064 - _LOTSNESTEDMESSAGE_B49._serialized_end=1069 - _LOTSNESTEDMESSAGE_B50._serialized_start=1071 - _LOTSNESTEDMESSAGE_B50._serialized_end=1076 - _LOTSNESTEDMESSAGE_B51._serialized_start=1078 - _LOTSNESTEDMESSAGE_B51._serialized_end=1083 - _LOTSNESTEDMESSAGE_B52._serialized_start=1085 - _LOTSNESTEDMESSAGE_B52._serialized_end=1090 - _LOTSNESTEDMESSAGE_B53._serialized_start=1092 - _LOTSNESTEDMESSAGE_B53._serialized_end=1097 - _LOTSNESTEDMESSAGE_B54._serialized_start=1099 - _LOTSNESTEDMESSAGE_B54._serialized_end=1104 - _LOTSNESTEDMESSAGE_B55._serialized_start=1106 - _LOTSNESTEDMESSAGE_B55._serialized_end=1111 - _LOTSNESTEDMESSAGE_B56._serialized_start=1113 - _LOTSNESTEDMESSAGE_B56._serialized_end=1118 - _LOTSNESTEDMESSAGE_B57._serialized_start=1120 - _LOTSNESTEDMESSAGE_B57._serialized_end=1125 - _LOTSNESTEDMESSAGE_B58._serialized_start=1127 - _LOTSNESTEDMESSAGE_B58._serialized_end=1132 - _LOTSNESTEDMESSAGE_B59._serialized_start=1134 - _LOTSNESTEDMESSAGE_B59._serialized_end=1139 - _LOTSNESTEDMESSAGE_B60._serialized_start=1141 - _LOTSNESTEDMESSAGE_B60._serialized_end=1146 - _LOTSNESTEDMESSAGE_B61._serialized_start=1148 - _LOTSNESTEDMESSAGE_B61._serialized_end=1153 - _LOTSNESTEDMESSAGE_B62._serialized_start=1155 - _LOTSNESTEDMESSAGE_B62._serialized_end=1160 - _LOTSNESTEDMESSAGE_B63._serialized_start=1162 - _LOTSNESTEDMESSAGE_B63._serialized_end=1167 - _LOTSNESTEDMESSAGE_B64._serialized_start=1169 - _LOTSNESTEDMESSAGE_B64._serialized_end=1174 - _LOTSNESTEDMESSAGE_B65._serialized_start=1176 - _LOTSNESTEDMESSAGE_B65._serialized_end=1181 - _LOTSNESTEDMESSAGE_B66._serialized_start=1183 - _LOTSNESTEDMESSAGE_B66._serialized_end=1188 - _LOTSNESTEDMESSAGE_B67._serialized_start=1190 - _LOTSNESTEDMESSAGE_B67._serialized_end=1195 - _LOTSNESTEDMESSAGE_B68._serialized_start=1197 - _LOTSNESTEDMESSAGE_B68._serialized_end=1202 - _LOTSNESTEDMESSAGE_B69._serialized_start=1204 - _LOTSNESTEDMESSAGE_B69._serialized_end=1209 - _LOTSNESTEDMESSAGE_B70._serialized_start=1211 - _LOTSNESTEDMESSAGE_B70._serialized_end=1216 - _LOTSNESTEDMESSAGE_B71._serialized_start=1218 - _LOTSNESTEDMESSAGE_B71._serialized_end=1223 - _LOTSNESTEDMESSAGE_B72._serialized_start=1225 - _LOTSNESTEDMESSAGE_B72._serialized_end=1230 - _LOTSNESTEDMESSAGE_B73._serialized_start=1232 - _LOTSNESTEDMESSAGE_B73._serialized_end=1237 - _LOTSNESTEDMESSAGE_B74._serialized_start=1239 - _LOTSNESTEDMESSAGE_B74._serialized_end=1244 - _LOTSNESTEDMESSAGE_B75._serialized_start=1246 - _LOTSNESTEDMESSAGE_B75._serialized_end=1251 - _LOTSNESTEDMESSAGE_B76._serialized_start=1253 - _LOTSNESTEDMESSAGE_B76._serialized_end=1258 - _LOTSNESTEDMESSAGE_B77._serialized_start=1260 - _LOTSNESTEDMESSAGE_B77._serialized_end=1265 - _LOTSNESTEDMESSAGE_B78._serialized_start=1267 - _LOTSNESTEDMESSAGE_B78._serialized_end=1272 - _LOTSNESTEDMESSAGE_B79._serialized_start=1274 - _LOTSNESTEDMESSAGE_B79._serialized_end=1279 - _LOTSNESTEDMESSAGE_B80._serialized_start=1281 - _LOTSNESTEDMESSAGE_B80._serialized_end=1286 - _LOTSNESTEDMESSAGE_B81._serialized_start=1288 - _LOTSNESTEDMESSAGE_B81._serialized_end=1293 - _LOTSNESTEDMESSAGE_B82._serialized_start=1295 - _LOTSNESTEDMESSAGE_B82._serialized_end=1300 - _LOTSNESTEDMESSAGE_B83._serialized_start=1302 - _LOTSNESTEDMESSAGE_B83._serialized_end=1307 - _LOTSNESTEDMESSAGE_B84._serialized_start=1309 - _LOTSNESTEDMESSAGE_B84._serialized_end=1314 - _LOTSNESTEDMESSAGE_B85._serialized_start=1316 - _LOTSNESTEDMESSAGE_B85._serialized_end=1321 - _LOTSNESTEDMESSAGE_B86._serialized_start=1323 - _LOTSNESTEDMESSAGE_B86._serialized_end=1328 - _LOTSNESTEDMESSAGE_B87._serialized_start=1330 - _LOTSNESTEDMESSAGE_B87._serialized_end=1335 - _LOTSNESTEDMESSAGE_B88._serialized_start=1337 - _LOTSNESTEDMESSAGE_B88._serialized_end=1342 - _LOTSNESTEDMESSAGE_B89._serialized_start=1344 - _LOTSNESTEDMESSAGE_B89._serialized_end=1349 - _LOTSNESTEDMESSAGE_B90._serialized_start=1351 - _LOTSNESTEDMESSAGE_B90._serialized_end=1356 - _LOTSNESTEDMESSAGE_B91._serialized_start=1358 - _LOTSNESTEDMESSAGE_B91._serialized_end=1363 - _LOTSNESTEDMESSAGE_B92._serialized_start=1365 - _LOTSNESTEDMESSAGE_B92._serialized_end=1370 - _LOTSNESTEDMESSAGE_B93._serialized_start=1372 - _LOTSNESTEDMESSAGE_B93._serialized_end=1377 - _LOTSNESTEDMESSAGE_B94._serialized_start=1379 - _LOTSNESTEDMESSAGE_B94._serialized_end=1384 - _LOTSNESTEDMESSAGE_B95._serialized_start=1386 - _LOTSNESTEDMESSAGE_B95._serialized_end=1391 - _LOTSNESTEDMESSAGE_B96._serialized_start=1393 - _LOTSNESTEDMESSAGE_B96._serialized_end=1398 - _LOTSNESTEDMESSAGE_B97._serialized_start=1400 - _LOTSNESTEDMESSAGE_B97._serialized_end=1405 - _LOTSNESTEDMESSAGE_B98._serialized_start=1407 - _LOTSNESTEDMESSAGE_B98._serialized_end=1412 - _LOTSNESTEDMESSAGE_B99._serialized_start=1414 - _LOTSNESTEDMESSAGE_B99._serialized_end=1419 - _LOTSNESTEDMESSAGE_B100._serialized_start=1421 - _LOTSNESTEDMESSAGE_B100._serialized_end=1427 - _LOTSNESTEDMESSAGE_B101._serialized_start=1429 - _LOTSNESTEDMESSAGE_B101._serialized_end=1435 - _LOTSNESTEDMESSAGE_B102._serialized_start=1437 - _LOTSNESTEDMESSAGE_B102._serialized_end=1443 - _LOTSNESTEDMESSAGE_B103._serialized_start=1445 - _LOTSNESTEDMESSAGE_B103._serialized_end=1451 - _LOTSNESTEDMESSAGE_B104._serialized_start=1453 - _LOTSNESTEDMESSAGE_B104._serialized_end=1459 - _LOTSNESTEDMESSAGE_B105._serialized_start=1461 - _LOTSNESTEDMESSAGE_B105._serialized_end=1467 - _LOTSNESTEDMESSAGE_B106._serialized_start=1469 - _LOTSNESTEDMESSAGE_B106._serialized_end=1475 - _LOTSNESTEDMESSAGE_B107._serialized_start=1477 - _LOTSNESTEDMESSAGE_B107._serialized_end=1483 - _LOTSNESTEDMESSAGE_B108._serialized_start=1485 - _LOTSNESTEDMESSAGE_B108._serialized_end=1491 - _LOTSNESTEDMESSAGE_B109._serialized_start=1493 - _LOTSNESTEDMESSAGE_B109._serialized_end=1499 - _LOTSNESTEDMESSAGE_B110._serialized_start=1501 - _LOTSNESTEDMESSAGE_B110._serialized_end=1507 - _LOTSNESTEDMESSAGE_B111._serialized_start=1509 - _LOTSNESTEDMESSAGE_B111._serialized_end=1515 - _LOTSNESTEDMESSAGE_B112._serialized_start=1517 - _LOTSNESTEDMESSAGE_B112._serialized_end=1523 - _LOTSNESTEDMESSAGE_B113._serialized_start=1525 - _LOTSNESTEDMESSAGE_B113._serialized_end=1531 - _LOTSNESTEDMESSAGE_B114._serialized_start=1533 - _LOTSNESTEDMESSAGE_B114._serialized_end=1539 - _LOTSNESTEDMESSAGE_B115._serialized_start=1541 - _LOTSNESTEDMESSAGE_B115._serialized_end=1547 - _LOTSNESTEDMESSAGE_B116._serialized_start=1549 - _LOTSNESTEDMESSAGE_B116._serialized_end=1555 - _LOTSNESTEDMESSAGE_B117._serialized_start=1557 - _LOTSNESTEDMESSAGE_B117._serialized_end=1563 - _LOTSNESTEDMESSAGE_B118._serialized_start=1565 - _LOTSNESTEDMESSAGE_B118._serialized_end=1571 - _LOTSNESTEDMESSAGE_B119._serialized_start=1573 - _LOTSNESTEDMESSAGE_B119._serialized_end=1579 - _LOTSNESTEDMESSAGE_B120._serialized_start=1581 - _LOTSNESTEDMESSAGE_B120._serialized_end=1587 - _LOTSNESTEDMESSAGE_B121._serialized_start=1589 - _LOTSNESTEDMESSAGE_B121._serialized_end=1595 - _LOTSNESTEDMESSAGE_B122._serialized_start=1597 - _LOTSNESTEDMESSAGE_B122._serialized_end=1603 - _LOTSNESTEDMESSAGE_B123._serialized_start=1605 - _LOTSNESTEDMESSAGE_B123._serialized_end=1611 - _LOTSNESTEDMESSAGE_B124._serialized_start=1613 - _LOTSNESTEDMESSAGE_B124._serialized_end=1619 - _LOTSNESTEDMESSAGE_B125._serialized_start=1621 - _LOTSNESTEDMESSAGE_B125._serialized_end=1627 - _LOTSNESTEDMESSAGE_B126._serialized_start=1629 - _LOTSNESTEDMESSAGE_B126._serialized_end=1635 - _LOTSNESTEDMESSAGE_B127._serialized_start=1637 - _LOTSNESTEDMESSAGE_B127._serialized_end=1643 - _LOTSNESTEDMESSAGE_B128._serialized_start=1645 - _LOTSNESTEDMESSAGE_B128._serialized_end=1651 - _LOTSNESTEDMESSAGE_B129._serialized_start=1653 - _LOTSNESTEDMESSAGE_B129._serialized_end=1659 - _LOTSNESTEDMESSAGE_B130._serialized_start=1661 - _LOTSNESTEDMESSAGE_B130._serialized_end=1667 - _LOTSNESTEDMESSAGE_B131._serialized_start=1669 - _LOTSNESTEDMESSAGE_B131._serialized_end=1675 - _LOTSNESTEDMESSAGE_B132._serialized_start=1677 - _LOTSNESTEDMESSAGE_B132._serialized_end=1683 - _LOTSNESTEDMESSAGE_B133._serialized_start=1685 - _LOTSNESTEDMESSAGE_B133._serialized_end=1691 - _LOTSNESTEDMESSAGE_B134._serialized_start=1693 - _LOTSNESTEDMESSAGE_B134._serialized_end=1699 - _LOTSNESTEDMESSAGE_B135._serialized_start=1701 - _LOTSNESTEDMESSAGE_B135._serialized_end=1707 - _LOTSNESTEDMESSAGE_B136._serialized_start=1709 - _LOTSNESTEDMESSAGE_B136._serialized_end=1715 - _LOTSNESTEDMESSAGE_B137._serialized_start=1717 - _LOTSNESTEDMESSAGE_B137._serialized_end=1723 - _LOTSNESTEDMESSAGE_B138._serialized_start=1725 - _LOTSNESTEDMESSAGE_B138._serialized_end=1731 - _LOTSNESTEDMESSAGE_B139._serialized_start=1733 - _LOTSNESTEDMESSAGE_B139._serialized_end=1739 - _LOTSNESTEDMESSAGE_B140._serialized_start=1741 - _LOTSNESTEDMESSAGE_B140._serialized_end=1747 - _LOTSNESTEDMESSAGE_B141._serialized_start=1749 - _LOTSNESTEDMESSAGE_B141._serialized_end=1755 - _LOTSNESTEDMESSAGE_B142._serialized_start=1757 - _LOTSNESTEDMESSAGE_B142._serialized_end=1763 - _LOTSNESTEDMESSAGE_B143._serialized_start=1765 - _LOTSNESTEDMESSAGE_B143._serialized_end=1771 - _LOTSNESTEDMESSAGE_B144._serialized_start=1773 - _LOTSNESTEDMESSAGE_B144._serialized_end=1779 - _LOTSNESTEDMESSAGE_B145._serialized_start=1781 - _LOTSNESTEDMESSAGE_B145._serialized_end=1787 - _LOTSNESTEDMESSAGE_B146._serialized_start=1789 - _LOTSNESTEDMESSAGE_B146._serialized_end=1795 - _LOTSNESTEDMESSAGE_B147._serialized_start=1797 - _LOTSNESTEDMESSAGE_B147._serialized_end=1803 - _LOTSNESTEDMESSAGE_B148._serialized_start=1805 - _LOTSNESTEDMESSAGE_B148._serialized_end=1811 - _LOTSNESTEDMESSAGE_B149._serialized_start=1813 - _LOTSNESTEDMESSAGE_B149._serialized_end=1819 - _LOTSNESTEDMESSAGE_B150._serialized_start=1821 - _LOTSNESTEDMESSAGE_B150._serialized_end=1827 - _LOTSNESTEDMESSAGE_B151._serialized_start=1829 - _LOTSNESTEDMESSAGE_B151._serialized_end=1835 - _LOTSNESTEDMESSAGE_B152._serialized_start=1837 - _LOTSNESTEDMESSAGE_B152._serialized_end=1843 - _LOTSNESTEDMESSAGE_B153._serialized_start=1845 - _LOTSNESTEDMESSAGE_B153._serialized_end=1851 - _LOTSNESTEDMESSAGE_B154._serialized_start=1853 - _LOTSNESTEDMESSAGE_B154._serialized_end=1859 - _LOTSNESTEDMESSAGE_B155._serialized_start=1861 - _LOTSNESTEDMESSAGE_B155._serialized_end=1867 - _LOTSNESTEDMESSAGE_B156._serialized_start=1869 - _LOTSNESTEDMESSAGE_B156._serialized_end=1875 - _LOTSNESTEDMESSAGE_B157._serialized_start=1877 - _LOTSNESTEDMESSAGE_B157._serialized_end=1883 - _LOTSNESTEDMESSAGE_B158._serialized_start=1885 - _LOTSNESTEDMESSAGE_B158._serialized_end=1891 - _LOTSNESTEDMESSAGE_B159._serialized_start=1893 - _LOTSNESTEDMESSAGE_B159._serialized_end=1899 - _LOTSNESTEDMESSAGE_B160._serialized_start=1901 - _LOTSNESTEDMESSAGE_B160._serialized_end=1907 - _LOTSNESTEDMESSAGE_B161._serialized_start=1909 - _LOTSNESTEDMESSAGE_B161._serialized_end=1915 - _LOTSNESTEDMESSAGE_B162._serialized_start=1917 - _LOTSNESTEDMESSAGE_B162._serialized_end=1923 - _LOTSNESTEDMESSAGE_B163._serialized_start=1925 - _LOTSNESTEDMESSAGE_B163._serialized_end=1931 - _LOTSNESTEDMESSAGE_B164._serialized_start=1933 - _LOTSNESTEDMESSAGE_B164._serialized_end=1939 - _LOTSNESTEDMESSAGE_B165._serialized_start=1941 - _LOTSNESTEDMESSAGE_B165._serialized_end=1947 - _LOTSNESTEDMESSAGE_B166._serialized_start=1949 - _LOTSNESTEDMESSAGE_B166._serialized_end=1955 - _LOTSNESTEDMESSAGE_B167._serialized_start=1957 - _LOTSNESTEDMESSAGE_B167._serialized_end=1963 - _LOTSNESTEDMESSAGE_B168._serialized_start=1965 - _LOTSNESTEDMESSAGE_B168._serialized_end=1971 - _LOTSNESTEDMESSAGE_B169._serialized_start=1973 - _LOTSNESTEDMESSAGE_B169._serialized_end=1979 - _LOTSNESTEDMESSAGE_B170._serialized_start=1981 - _LOTSNESTEDMESSAGE_B170._serialized_end=1987 - _LOTSNESTEDMESSAGE_B171._serialized_start=1989 - _LOTSNESTEDMESSAGE_B171._serialized_end=1995 - _LOTSNESTEDMESSAGE_B172._serialized_start=1997 - _LOTSNESTEDMESSAGE_B172._serialized_end=2003 - _LOTSNESTEDMESSAGE_B173._serialized_start=2005 - _LOTSNESTEDMESSAGE_B173._serialized_end=2011 - _LOTSNESTEDMESSAGE_B174._serialized_start=2013 - _LOTSNESTEDMESSAGE_B174._serialized_end=2019 - _LOTSNESTEDMESSAGE_B175._serialized_start=2021 - _LOTSNESTEDMESSAGE_B175._serialized_end=2027 - _LOTSNESTEDMESSAGE_B176._serialized_start=2029 - _LOTSNESTEDMESSAGE_B176._serialized_end=2035 - _LOTSNESTEDMESSAGE_B177._serialized_start=2037 - _LOTSNESTEDMESSAGE_B177._serialized_end=2043 - _LOTSNESTEDMESSAGE_B178._serialized_start=2045 - _LOTSNESTEDMESSAGE_B178._serialized_end=2051 - _LOTSNESTEDMESSAGE_B179._serialized_start=2053 - _LOTSNESTEDMESSAGE_B179._serialized_end=2059 - _LOTSNESTEDMESSAGE_B180._serialized_start=2061 - _LOTSNESTEDMESSAGE_B180._serialized_end=2067 - _LOTSNESTEDMESSAGE_B181._serialized_start=2069 - _LOTSNESTEDMESSAGE_B181._serialized_end=2075 - _LOTSNESTEDMESSAGE_B182._serialized_start=2077 - _LOTSNESTEDMESSAGE_B182._serialized_end=2083 - _LOTSNESTEDMESSAGE_B183._serialized_start=2085 - _LOTSNESTEDMESSAGE_B183._serialized_end=2091 - _LOTSNESTEDMESSAGE_B184._serialized_start=2093 - _LOTSNESTEDMESSAGE_B184._serialized_end=2099 - _LOTSNESTEDMESSAGE_B185._serialized_start=2101 - _LOTSNESTEDMESSAGE_B185._serialized_end=2107 - _LOTSNESTEDMESSAGE_B186._serialized_start=2109 - _LOTSNESTEDMESSAGE_B186._serialized_end=2115 - _LOTSNESTEDMESSAGE_B187._serialized_start=2117 - _LOTSNESTEDMESSAGE_B187._serialized_end=2123 - _LOTSNESTEDMESSAGE_B188._serialized_start=2125 - _LOTSNESTEDMESSAGE_B188._serialized_end=2131 - _LOTSNESTEDMESSAGE_B189._serialized_start=2133 - _LOTSNESTEDMESSAGE_B189._serialized_end=2139 - _LOTSNESTEDMESSAGE_B190._serialized_start=2141 - _LOTSNESTEDMESSAGE_B190._serialized_end=2147 - _LOTSNESTEDMESSAGE_B191._serialized_start=2149 - _LOTSNESTEDMESSAGE_B191._serialized_end=2155 - _LOTSNESTEDMESSAGE_B192._serialized_start=2157 - _LOTSNESTEDMESSAGE_B192._serialized_end=2163 - _LOTSNESTEDMESSAGE_B193._serialized_start=2165 - _LOTSNESTEDMESSAGE_B193._serialized_end=2171 - _LOTSNESTEDMESSAGE_B194._serialized_start=2173 - _LOTSNESTEDMESSAGE_B194._serialized_end=2179 - _LOTSNESTEDMESSAGE_B195._serialized_start=2181 - _LOTSNESTEDMESSAGE_B195._serialized_end=2187 - _LOTSNESTEDMESSAGE_B196._serialized_start=2189 - _LOTSNESTEDMESSAGE_B196._serialized_end=2195 - _LOTSNESTEDMESSAGE_B197._serialized_start=2197 - _LOTSNESTEDMESSAGE_B197._serialized_end=2203 - _LOTSNESTEDMESSAGE_B198._serialized_start=2205 - _LOTSNESTEDMESSAGE_B198._serialized_end=2211 - _LOTSNESTEDMESSAGE_B199._serialized_start=2213 - _LOTSNESTEDMESSAGE_B199._serialized_end=2219 - _LOTSNESTEDMESSAGE_B200._serialized_start=2221 - _LOTSNESTEDMESSAGE_B200._serialized_end=2227 - _LOTSNESTEDMESSAGE_B201._serialized_start=2229 - _LOTSNESTEDMESSAGE_B201._serialized_end=2235 - _LOTSNESTEDMESSAGE_B202._serialized_start=2237 - _LOTSNESTEDMESSAGE_B202._serialized_end=2243 - _LOTSNESTEDMESSAGE_B203._serialized_start=2245 - _LOTSNESTEDMESSAGE_B203._serialized_end=2251 - _LOTSNESTEDMESSAGE_B204._serialized_start=2253 - _LOTSNESTEDMESSAGE_B204._serialized_end=2259 - _LOTSNESTEDMESSAGE_B205._serialized_start=2261 - _LOTSNESTEDMESSAGE_B205._serialized_end=2267 - _LOTSNESTEDMESSAGE_B206._serialized_start=2269 - _LOTSNESTEDMESSAGE_B206._serialized_end=2275 - _LOTSNESTEDMESSAGE_B207._serialized_start=2277 - _LOTSNESTEDMESSAGE_B207._serialized_end=2283 - _LOTSNESTEDMESSAGE_B208._serialized_start=2285 - _LOTSNESTEDMESSAGE_B208._serialized_end=2291 - _LOTSNESTEDMESSAGE_B209._serialized_start=2293 - _LOTSNESTEDMESSAGE_B209._serialized_end=2299 - _LOTSNESTEDMESSAGE_B210._serialized_start=2301 - _LOTSNESTEDMESSAGE_B210._serialized_end=2307 - _LOTSNESTEDMESSAGE_B211._serialized_start=2309 - _LOTSNESTEDMESSAGE_B211._serialized_end=2315 - _LOTSNESTEDMESSAGE_B212._serialized_start=2317 - _LOTSNESTEDMESSAGE_B212._serialized_end=2323 - _LOTSNESTEDMESSAGE_B213._serialized_start=2325 - _LOTSNESTEDMESSAGE_B213._serialized_end=2331 - _LOTSNESTEDMESSAGE_B214._serialized_start=2333 - _LOTSNESTEDMESSAGE_B214._serialized_end=2339 - _LOTSNESTEDMESSAGE_B215._serialized_start=2341 - _LOTSNESTEDMESSAGE_B215._serialized_end=2347 - _LOTSNESTEDMESSAGE_B216._serialized_start=2349 - _LOTSNESTEDMESSAGE_B216._serialized_end=2355 - _LOTSNESTEDMESSAGE_B217._serialized_start=2357 - _LOTSNESTEDMESSAGE_B217._serialized_end=2363 - _LOTSNESTEDMESSAGE_B218._serialized_start=2365 - _LOTSNESTEDMESSAGE_B218._serialized_end=2371 - _LOTSNESTEDMESSAGE_B219._serialized_start=2373 - _LOTSNESTEDMESSAGE_B219._serialized_end=2379 - _LOTSNESTEDMESSAGE_B220._serialized_start=2381 - _LOTSNESTEDMESSAGE_B220._serialized_end=2387 - _LOTSNESTEDMESSAGE_B221._serialized_start=2389 - _LOTSNESTEDMESSAGE_B221._serialized_end=2395 - _LOTSNESTEDMESSAGE_B222._serialized_start=2397 - _LOTSNESTEDMESSAGE_B222._serialized_end=2403 - _LOTSNESTEDMESSAGE_B223._serialized_start=2405 - _LOTSNESTEDMESSAGE_B223._serialized_end=2411 - _LOTSNESTEDMESSAGE_B224._serialized_start=2413 - _LOTSNESTEDMESSAGE_B224._serialized_end=2419 - _LOTSNESTEDMESSAGE_B225._serialized_start=2421 - _LOTSNESTEDMESSAGE_B225._serialized_end=2427 - _LOTSNESTEDMESSAGE_B226._serialized_start=2429 - _LOTSNESTEDMESSAGE_B226._serialized_end=2435 - _LOTSNESTEDMESSAGE_B227._serialized_start=2437 - _LOTSNESTEDMESSAGE_B227._serialized_end=2443 - _LOTSNESTEDMESSAGE_B228._serialized_start=2445 - _LOTSNESTEDMESSAGE_B228._serialized_end=2451 - _LOTSNESTEDMESSAGE_B229._serialized_start=2453 - _LOTSNESTEDMESSAGE_B229._serialized_end=2459 - _LOTSNESTEDMESSAGE_B230._serialized_start=2461 - _LOTSNESTEDMESSAGE_B230._serialized_end=2467 - _LOTSNESTEDMESSAGE_B231._serialized_start=2469 - _LOTSNESTEDMESSAGE_B231._serialized_end=2475 - _LOTSNESTEDMESSAGE_B232._serialized_start=2477 - _LOTSNESTEDMESSAGE_B232._serialized_end=2483 - _LOTSNESTEDMESSAGE_B233._serialized_start=2485 - _LOTSNESTEDMESSAGE_B233._serialized_end=2491 - _LOTSNESTEDMESSAGE_B234._serialized_start=2493 - _LOTSNESTEDMESSAGE_B234._serialized_end=2499 - _LOTSNESTEDMESSAGE_B235._serialized_start=2501 - _LOTSNESTEDMESSAGE_B235._serialized_end=2507 - _LOTSNESTEDMESSAGE_B236._serialized_start=2509 - _LOTSNESTEDMESSAGE_B236._serialized_end=2515 - _LOTSNESTEDMESSAGE_B237._serialized_start=2517 - _LOTSNESTEDMESSAGE_B237._serialized_end=2523 - _LOTSNESTEDMESSAGE_B238._serialized_start=2525 - _LOTSNESTEDMESSAGE_B238._serialized_end=2531 - _LOTSNESTEDMESSAGE_B239._serialized_start=2533 - _LOTSNESTEDMESSAGE_B239._serialized_end=2539 - _LOTSNESTEDMESSAGE_B240._serialized_start=2541 - _LOTSNESTEDMESSAGE_B240._serialized_end=2547 - _LOTSNESTEDMESSAGE_B241._serialized_start=2549 - _LOTSNESTEDMESSAGE_B241._serialized_end=2555 - _LOTSNESTEDMESSAGE_B242._serialized_start=2557 - _LOTSNESTEDMESSAGE_B242._serialized_end=2563 - _LOTSNESTEDMESSAGE_B243._serialized_start=2565 - _LOTSNESTEDMESSAGE_B243._serialized_end=2571 - _LOTSNESTEDMESSAGE_B244._serialized_start=2573 - _LOTSNESTEDMESSAGE_B244._serialized_end=2579 - _LOTSNESTEDMESSAGE_B245._serialized_start=2581 - _LOTSNESTEDMESSAGE_B245._serialized_end=2587 - _LOTSNESTEDMESSAGE_B246._serialized_start=2589 - _LOTSNESTEDMESSAGE_B246._serialized_end=2595 - _LOTSNESTEDMESSAGE_B247._serialized_start=2597 - _LOTSNESTEDMESSAGE_B247._serialized_end=2603 - _LOTSNESTEDMESSAGE_B248._serialized_start=2605 - _LOTSNESTEDMESSAGE_B248._serialized_end=2611 - _LOTSNESTEDMESSAGE_B249._serialized_start=2613 - _LOTSNESTEDMESSAGE_B249._serialized_end=2619 - _LOTSNESTEDMESSAGE_B250._serialized_start=2621 - _LOTSNESTEDMESSAGE_B250._serialized_end=2627 - _LOTSNESTEDMESSAGE_B251._serialized_start=2629 - _LOTSNESTEDMESSAGE_B251._serialized_end=2635 - _LOTSNESTEDMESSAGE_B252._serialized_start=2637 - _LOTSNESTEDMESSAGE_B252._serialized_end=2643 - _LOTSNESTEDMESSAGE_B253._serialized_start=2645 - _LOTSNESTEDMESSAGE_B253._serialized_end=2651 - _LOTSNESTEDMESSAGE_B254._serialized_start=2653 - _LOTSNESTEDMESSAGE_B254._serialized_end=2659 - _LOTSNESTEDMESSAGE_B255._serialized_start=2661 - _LOTSNESTEDMESSAGE_B255._serialized_end=2667 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/no_package_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/no_package_pb2.py deleted file mode 100644 index d46dee080a..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/no_package_pb2.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/no_package.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _NOPACKAGEENUM._serialized_start=106 - _NOPACKAGEENUM._serialized_end=169 - _NOPACKAGEMESSAGE._serialized_start=45 - _NOPACKAGEMESSAGE._serialized_end=104 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/python_message.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/python_message.py deleted file mode 100644 index 2921d5cb6e..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/python_message.py +++ /dev/null @@ -1,1539 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This code is meant to work on Python 2.4 and above only. -# -# TODO(robinson): Helpers for verbose, common checks like seeing if a -# descriptor's cpp_type is CPPTYPE_MESSAGE. - -"""Contains a metaclass and helper functions used to create -protocol message classes from Descriptor objects at runtime. - -Recall that a metaclass is the "type" of a class. -(A class is to a metaclass what an instance is to a class.) - -In this case, we use the GeneratedProtocolMessageType metaclass -to inject all the useful functionality into the classes -output by the protocol compiler at compile-time. - -The upshot of all this is that the real implementation -details for ALL pure-Python protocol buffers are *here in -this file*. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -from io import BytesIO -import struct -import sys -import weakref - -# We use "as" to avoid name collisions with variables. -from google.protobuf.internal import api_implementation -from google.protobuf.internal import containers -from google.protobuf.internal import decoder -from google.protobuf.internal import encoder -from google.protobuf.internal import enum_type_wrapper -from google.protobuf.internal import extension_dict -from google.protobuf.internal import message_listener as message_listener_mod -from google.protobuf.internal import type_checkers -from google.protobuf.internal import well_known_types -from google.protobuf.internal import wire_format -from google.protobuf import descriptor as descriptor_mod -from google.protobuf import message as message_mod -from google.protobuf import text_format - -_FieldDescriptor = descriptor_mod.FieldDescriptor -_AnyFullTypeName = 'google.protobuf.Any' -_ExtensionDict = extension_dict._ExtensionDict - -class GeneratedProtocolMessageType(type): - - """Metaclass for protocol message classes created at runtime from Descriptors. - - We add implementations for all methods described in the Message class. We - also create properties to allow getting/setting all fields in the protocol - message. Finally, we create slots to prevent users from accidentally - "setting" nonexistent fields in the protocol message, which then wouldn't get - serialized / deserialized properly. - - The protocol compiler currently uses this metaclass to create protocol - message classes at runtime. Clients can also manually create their own - classes at runtime, as in this example: - - mydescriptor = Descriptor(.....) - factory = symbol_database.Default() - factory.pool.AddDescriptor(mydescriptor) - MyProtoClass = factory.GetPrototype(mydescriptor) - myproto_instance = MyProtoClass() - myproto.foo_field = 23 - ... - """ - - # Must be consistent with the protocol-compiler code in - # proto2/compiler/internal/generator.*. - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __new__(cls, name, bases, dictionary): - """Custom allocation for runtime-generated class types. - - We override __new__ because this is apparently the only place - where we can meaningfully set __slots__ on the class we're creating(?). - (The interplay between metaclasses and slots is not very well-documented). - - Args: - name: Name of the class (ignored, but required by the - metaclass protocol). - bases: Base classes of the class we're constructing. - (Should be message.Message). We ignore this field, but - it's required by the metaclass protocol - dictionary: The class dictionary of the class we're - constructing. dictionary[_DESCRIPTOR_KEY] must contain - a Descriptor object describing this protocol message - type. - - Returns: - Newly-allocated class. - - Raises: - RuntimeError: Generated code only work with python cpp extension. - """ - descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] - - if isinstance(descriptor, str): - raise RuntimeError('The generated code only work with python cpp ' - 'extension, but it is using pure python runtime.') - - # If a concrete class already exists for this descriptor, don't try to - # create another. Doing so will break any messages that already exist with - # the existing class. - # - # The C++ implementation appears to have its own internal `PyMessageFactory` - # to achieve similar results. - # - # This most commonly happens in `text_format.py` when using descriptors from - # a custom pool; it calls symbol_database.Global().getPrototype() on a - # descriptor which already has an existing concrete class. - new_class = getattr(descriptor, '_concrete_class', None) - if new_class: - return new_class - - if descriptor.full_name in well_known_types.WKTBASES: - bases += (well_known_types.WKTBASES[descriptor.full_name],) - _AddClassAttributesForNestedExtensions(descriptor, dictionary) - _AddSlots(descriptor, dictionary) - - superclass = super(GeneratedProtocolMessageType, cls) - new_class = superclass.__new__(cls, name, bases, dictionary) - return new_class - - def __init__(cls, name, bases, dictionary): - """Here we perform the majority of our work on the class. - We add enum getters, an __init__ method, implementations - of all Message methods, and properties for all fields - in the protocol type. - - Args: - name: Name of the class (ignored, but required by the - metaclass protocol). - bases: Base classes of the class we're constructing. - (Should be message.Message). We ignore this field, but - it's required by the metaclass protocol - dictionary: The class dictionary of the class we're - constructing. dictionary[_DESCRIPTOR_KEY] must contain - a Descriptor object describing this protocol message - type. - """ - descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] - - # If this is an _existing_ class looked up via `_concrete_class` in the - # __new__ method above, then we don't need to re-initialize anything. - existing_class = getattr(descriptor, '_concrete_class', None) - if existing_class: - assert existing_class is cls, ( - 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' - % (descriptor.full_name)) - return - - cls._decoders_by_tag = {} - if (descriptor.has_options and - descriptor.GetOptions().message_set_wire_format): - cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( - decoder.MessageSetItemDecoder(descriptor), None) - - # Attach stuff to each FieldDescriptor for quick lookup later on. - for field in descriptor.fields: - _AttachFieldHelpers(cls, field) - - descriptor._concrete_class = cls # pylint: disable=protected-access - _AddEnumValues(descriptor, cls) - _AddInitMethod(descriptor, cls) - _AddPropertiesForFields(descriptor, cls) - _AddPropertiesForExtensions(descriptor, cls) - _AddStaticMethods(cls) - _AddMessageMethods(descriptor, cls) - _AddPrivateHelperMethods(descriptor, cls) - - superclass = super(GeneratedProtocolMessageType, cls) - superclass.__init__(name, bases, dictionary) - - -# Stateless helpers for GeneratedProtocolMessageType below. -# Outside clients should not access these directly. -# -# I opted not to make any of these methods on the metaclass, to make it more -# clear that I'm not really using any state there and to keep clients from -# thinking that they have direct access to these construction helpers. - - -def _PropertyName(proto_field_name): - """Returns the name of the public property attribute which - clients can use to get and (in some cases) set the value - of a protocol message field. - - Args: - proto_field_name: The protocol message field name, exactly - as it appears (or would appear) in a .proto file. - """ - # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. - # nnorwitz makes my day by writing: - # """ - # FYI. See the keyword module in the stdlib. This could be as simple as: - # - # if keyword.iskeyword(proto_field_name): - # return proto_field_name + "_" - # return proto_field_name - # """ - # Kenton says: The above is a BAD IDEA. People rely on being able to use - # getattr() and setattr() to reflectively manipulate field values. If we - # rename the properties, then every such user has to also make sure to apply - # the same transformation. Note that currently if you name a field "yield", - # you can still access it just fine using getattr/setattr -- it's not even - # that cumbersome to do so. - # TODO(kenton): Remove this method entirely if/when everyone agrees with my - # position. - return proto_field_name - - -def _AddSlots(message_descriptor, dictionary): - """Adds a __slots__ entry to dictionary, containing the names of all valid - attributes for this message type. - - Args: - message_descriptor: A Descriptor instance describing this message type. - dictionary: Class dictionary to which we'll add a '__slots__' entry. - """ - dictionary['__slots__'] = ['_cached_byte_size', - '_cached_byte_size_dirty', - '_fields', - '_unknown_fields', - '_unknown_field_set', - '_is_present_in_parent', - '_listener', - '_listener_for_children', - '__weakref__', - '_oneofs'] - - -def _IsMessageSetExtension(field): - return (field.is_extension and - field.containing_type.has_options and - field.containing_type.GetOptions().message_set_wire_format and - field.type == _FieldDescriptor.TYPE_MESSAGE and - field.label == _FieldDescriptor.LABEL_OPTIONAL) - - -def _IsMapField(field): - return (field.type == _FieldDescriptor.TYPE_MESSAGE and - field.message_type.has_options and - field.message_type.GetOptions().map_entry) - - -def _IsMessageMapField(field): - value_type = field.message_type.fields_by_name['value'] - return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE - - -def _AttachFieldHelpers(cls, field_descriptor): - is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) - is_packable = (is_repeated and - wire_format.IsTypePackable(field_descriptor.type)) - is_proto3 = field_descriptor.containing_type.syntax == 'proto3' - if not is_packable: - is_packed = False - elif field_descriptor.containing_type.syntax == 'proto2': - is_packed = (field_descriptor.has_options and - field_descriptor.GetOptions().packed) - else: - has_packed_false = (field_descriptor.has_options and - field_descriptor.GetOptions().HasField('packed') and - field_descriptor.GetOptions().packed == False) - is_packed = not has_packed_false - is_map_entry = _IsMapField(field_descriptor) - - if is_map_entry: - field_encoder = encoder.MapEncoder(field_descriptor) - sizer = encoder.MapSizer(field_descriptor, - _IsMessageMapField(field_descriptor)) - elif _IsMessageSetExtension(field_descriptor): - field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) - sizer = encoder.MessageSetItemSizer(field_descriptor.number) - else: - field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( - field_descriptor.number, is_repeated, is_packed) - sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( - field_descriptor.number, is_repeated, is_packed) - - field_descriptor._encoder = field_encoder - field_descriptor._sizer = sizer - field_descriptor._default_constructor = _DefaultValueConstructorForField( - field_descriptor) - - def AddDecoder(wiretype, is_packed): - tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) - decode_type = field_descriptor.type - if (decode_type == _FieldDescriptor.TYPE_ENUM and - type_checkers.SupportsOpenEnums(field_descriptor)): - decode_type = _FieldDescriptor.TYPE_INT32 - - oneof_descriptor = None - clear_if_default = False - if field_descriptor.containing_oneof is not None: - oneof_descriptor = field_descriptor - elif (is_proto3 and not is_repeated and - field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): - clear_if_default = True - - if is_map_entry: - is_message_map = _IsMessageMapField(field_descriptor) - - field_decoder = decoder.MapDecoder( - field_descriptor, _GetInitializeDefaultForMap(field_descriptor), - is_message_map) - elif decode_type == _FieldDescriptor.TYPE_STRING: - field_decoder = decoder.StringDecoder( - field_descriptor.number, is_repeated, is_packed, - field_descriptor, field_descriptor._default_constructor, - clear_if_default) - elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( - field_descriptor.number, is_repeated, is_packed, - field_descriptor, field_descriptor._default_constructor) - else: - field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( - field_descriptor.number, is_repeated, is_packed, - # pylint: disable=protected-access - field_descriptor, field_descriptor._default_constructor, - clear_if_default) - - cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) - - AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], - False) - - if is_repeated and wire_format.IsTypePackable(field_descriptor.type): - # To support wire compatibility of adding packed = true, add a decoder for - # packed values regardless of the field's options. - AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) - - -def _AddClassAttributesForNestedExtensions(descriptor, dictionary): - extensions = descriptor.extensions_by_name - for extension_name, extension_field in extensions.items(): - assert extension_name not in dictionary - dictionary[extension_name] = extension_field - - -def _AddEnumValues(descriptor, cls): - """Sets class-level attributes for all enum fields defined in this message. - - Also exporting a class-level object that can name enum values. - - Args: - descriptor: Descriptor object for this message type. - cls: Class we're constructing for this message type. - """ - for enum_type in descriptor.enum_types: - setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) - for enum_value in enum_type.values: - setattr(cls, enum_value.name, enum_value.number) - - -def _GetInitializeDefaultForMap(field): - if field.label != _FieldDescriptor.LABEL_REPEATED: - raise ValueError('map_entry set on non-repeated field %s' % ( - field.name)) - fields_by_name = field.message_type.fields_by_name - key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) - - value_field = fields_by_name['value'] - if _IsMessageMapField(field): - def MakeMessageMapDefault(message): - return containers.MessageMap( - message._listener_for_children, value_field.message_type, key_checker, - field.message_type) - return MakeMessageMapDefault - else: - value_checker = type_checkers.GetTypeChecker(value_field) - def MakePrimitiveMapDefault(message): - return containers.ScalarMap( - message._listener_for_children, key_checker, value_checker, - field.message_type) - return MakePrimitiveMapDefault - -def _DefaultValueConstructorForField(field): - """Returns a function which returns a default value for a field. - - Args: - field: FieldDescriptor object for this field. - - The returned function has one argument: - message: Message instance containing this field, or a weakref proxy - of same. - - That function in turn returns a default value for this field. The default - value may refer back to |message| via a weak reference. - """ - - if _IsMapField(field): - return _GetInitializeDefaultForMap(field) - - if field.label == _FieldDescriptor.LABEL_REPEATED: - if field.has_default_value and field.default_value != []: - raise ValueError('Repeated field default value not empty list: %s' % ( - field.default_value)) - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - # We can't look at _concrete_class yet since it might not have - # been set. (Depends on order in which we initialize the classes). - message_type = field.message_type - def MakeRepeatedMessageDefault(message): - return containers.RepeatedCompositeFieldContainer( - message._listener_for_children, field.message_type) - return MakeRepeatedMessageDefault - else: - type_checker = type_checkers.GetTypeChecker(field) - def MakeRepeatedScalarDefault(message): - return containers.RepeatedScalarFieldContainer( - message._listener_for_children, type_checker) - return MakeRepeatedScalarDefault - - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - # _concrete_class may not yet be initialized. - message_type = field.message_type - def MakeSubMessageDefault(message): - assert getattr(message_type, '_concrete_class', None), ( - 'Uninitialized concrete class found for field %r (message type %r)' - % (field.full_name, message_type.full_name)) - result = message_type._concrete_class() - result._SetListener( - _OneofListener(message, field) - if field.containing_oneof is not None - else message._listener_for_children) - return result - return MakeSubMessageDefault - - def MakeScalarDefault(message): - # TODO(protobuf-team): This may be broken since there may not be - # default_value. Combine with has_default_value somehow. - return field.default_value - return MakeScalarDefault - - -def _ReraiseTypeErrorWithFieldName(message_name, field_name): - """Re-raise the currently-handled TypeError with the field name added.""" - exc = sys.exc_info()[1] - if len(exc.args) == 1 and type(exc) is TypeError: - # simple TypeError; add field name to exception message - exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) - - # re-raise possibly-amended exception with original traceback: - raise exc.with_traceback(sys.exc_info()[2]) - - -def _AddInitMethod(message_descriptor, cls): - """Adds an __init__ method to cls.""" - - def _GetIntegerEnumValue(enum_type, value): - """Convert a string or integer enum value to an integer. - - If the value is a string, it is converted to the enum value in - enum_type with the same name. If the value is not a string, it's - returned as-is. (No conversion or bounds-checking is done.) - """ - if isinstance(value, str): - try: - return enum_type.values_by_name[value].number - except KeyError: - raise ValueError('Enum type %s: unknown label "%s"' % ( - enum_type.full_name, value)) - return value - - def init(self, **kwargs): - self._cached_byte_size = 0 - self._cached_byte_size_dirty = len(kwargs) > 0 - self._fields = {} - # Contains a mapping from oneof field descriptors to the descriptor - # of the currently set field in that oneof field. - self._oneofs = {} - - # _unknown_fields is () when empty for efficiency, and will be turned into - # a list if fields are added. - self._unknown_fields = () - # _unknown_field_set is None when empty for efficiency, and will be - # turned into UnknownFieldSet struct if fields are added. - self._unknown_field_set = None # pylint: disable=protected-access - self._is_present_in_parent = False - self._listener = message_listener_mod.NullMessageListener() - self._listener_for_children = _Listener(self) - for field_name, field_value in kwargs.items(): - field = _GetFieldByName(message_descriptor, field_name) - if field is None: - raise TypeError('%s() got an unexpected keyword argument "%s"' % - (message_descriptor.name, field_name)) - if field_value is None: - # field=None is the same as no field at all. - continue - if field.label == _FieldDescriptor.LABEL_REPEATED: - copy = field._default_constructor(self) - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite - if _IsMapField(field): - if _IsMessageMapField(field): - for key in field_value: - copy[key].MergeFrom(field_value[key]) - else: - copy.update(field_value) - else: - for val in field_value: - if isinstance(val, dict): - copy.add(**val) - else: - copy.add().MergeFrom(val) - else: # Scalar - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - field_value = [_GetIntegerEnumValue(field.enum_type, val) - for val in field_value] - copy.extend(field_value) - self._fields[field] = copy - elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - copy = field._default_constructor(self) - new_val = field_value - if isinstance(field_value, dict): - new_val = field.message_type._concrete_class(**field_value) - try: - copy.MergeFrom(new_val) - except TypeError: - _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) - self._fields[field] = copy - else: - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - field_value = _GetIntegerEnumValue(field.enum_type, field_value) - try: - setattr(self, field_name, field_value) - except TypeError: - _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) - - init.__module__ = None - init.__doc__ = None - cls.__init__ = init - - -def _GetFieldByName(message_descriptor, field_name): - """Returns a field descriptor by field name. - - Args: - message_descriptor: A Descriptor describing all fields in message. - field_name: The name of the field to retrieve. - Returns: - The field descriptor associated with the field name. - """ - try: - return message_descriptor.fields_by_name[field_name] - except KeyError: - raise ValueError('Protocol message %s has no "%s" field.' % - (message_descriptor.name, field_name)) - - -def _AddPropertiesForFields(descriptor, cls): - """Adds properties for all fields in this protocol message type.""" - for field in descriptor.fields: - _AddPropertiesForField(field, cls) - - if descriptor.is_extendable: - # _ExtensionDict is just an adaptor with no state so we allocate a new one - # every time it is accessed. - cls.Extensions = property(lambda self: _ExtensionDict(self)) - - -def _AddPropertiesForField(field, cls): - """Adds a public property for a protocol message field. - Clients can use this property to get and (in the case - of non-repeated scalar fields) directly set the value - of a protocol message field. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - # Catch it if we add other types that we should - # handle specially here. - assert _FieldDescriptor.MAX_CPPTYPE == 10 - - constant_name = field.name.upper() + '_FIELD_NUMBER' - setattr(cls, constant_name, field.number) - - if field.label == _FieldDescriptor.LABEL_REPEATED: - _AddPropertiesForRepeatedField(field, cls) - elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - _AddPropertiesForNonRepeatedCompositeField(field, cls) - else: - _AddPropertiesForNonRepeatedScalarField(field, cls) - - -class _FieldProperty(property): - __slots__ = ('DESCRIPTOR',) - - def __init__(self, descriptor, getter, setter, doc): - property.__init__(self, getter, setter, doc=doc) - self.DESCRIPTOR = descriptor - - -def _AddPropertiesForRepeatedField(field, cls): - """Adds a public property for a "repeated" protocol message field. Clients - can use this property to get the value of the field, which will be either a - RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see - below). - - Note that when clients add values to these containers, we perform - type-checking in the case of repeated scalar fields, and we also set any - necessary "has" bits as a side-effect. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - - def getter(self): - field_value = self._fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - field_value = self._fields.setdefault(field, field_value) - return field_value - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - # We define a setter just so we can throw an exception with a more - # helpful error message. - def setter(self, new_value): - raise AttributeError('Assignment not allowed to repeated field ' - '"%s" in protocol message object.' % proto_field_name) - - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForNonRepeatedScalarField(field, cls): - """Adds a public property for a nonrepeated, scalar protocol message field. - Clients can use this property to get and directly set the value of the field. - Note that when the client sets the value of a field by using this property, - all necessary "has" bits are set as a side-effect, and we also perform - type-checking. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - type_checker = type_checkers.GetTypeChecker(field) - default_value = field.default_value - is_proto3 = field.containing_type.syntax == 'proto3' - - def getter(self): - # TODO(protobuf-team): This may be broken since there may not be - # default_value. Combine with has_default_value somehow. - return self._fields.get(field, default_value) - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - clear_when_set_to_default = is_proto3 and not field.containing_oneof - - def field_setter(self, new_value): - # pylint: disable=protected-access - # Testing the value for truthiness captures all of the proto3 defaults - # (0, 0.0, enum 0, and False). - try: - new_value = type_checker.CheckValue(new_value) - except TypeError as e: - raise TypeError( - 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) - if clear_when_set_to_default and not new_value: - self._fields.pop(field, None) - else: - self._fields[field] = new_value - # Check _cached_byte_size_dirty inline to improve performance, since scalar - # setters are called frequently. - if not self._cached_byte_size_dirty: - self._Modified() - - if field.containing_oneof: - def setter(self, new_value): - field_setter(self, new_value) - self._UpdateOneofState(field) - else: - setter = field_setter - - setter.__module__ = None - setter.__doc__ = 'Setter for %s.' % proto_field_name - - # Add a property to encapsulate the getter/setter. - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForNonRepeatedCompositeField(field, cls): - """Adds a public property for a nonrepeated, composite protocol message field. - A composite field is a "group" or "message" field. - - Clients can use this property to get the value of the field, but cannot - assign to the property directly. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - # TODO(robinson): Remove duplication with similar method - # for non-repeated scalars. - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - - def getter(self): - field_value = self._fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - field_value = self._fields.setdefault(field, field_value) - return field_value - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - # We define a setter just so we can throw an exception with a more - # helpful error message. - def setter(self, new_value): - raise AttributeError('Assignment not allowed to composite field ' - '"%s" in protocol message object.' % proto_field_name) - - # Add a property to encapsulate the getter. - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForExtensions(descriptor, cls): - """Adds properties for all fields in this protocol message type.""" - extensions = descriptor.extensions_by_name - for extension_name, extension_field in extensions.items(): - constant_name = extension_name.upper() + '_FIELD_NUMBER' - setattr(cls, constant_name, extension_field.number) - - # TODO(amauryfa): Migrate all users of these attributes to functions like - # pool.FindExtensionByNumber(descriptor). - if descriptor.file is not None: - # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. - pool = descriptor.file.pool - cls._extensions_by_number = pool._extensions_by_number[descriptor] - cls._extensions_by_name = pool._extensions_by_name[descriptor] - -def _AddStaticMethods(cls): - # TODO(robinson): This probably needs to be thread-safe(?) - def RegisterExtension(extension_handle): - extension_handle.containing_type = cls.DESCRIPTOR - # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. - # pylint: disable=protected-access - cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) - _AttachFieldHelpers(cls, extension_handle) - cls.RegisterExtension = staticmethod(RegisterExtension) - - def FromString(s): - message = cls() - message.MergeFromString(s) - return message - cls.FromString = staticmethod(FromString) - - -def _IsPresent(item): - """Given a (FieldDescriptor, value) tuple from _fields, return true if the - value should be included in the list returned by ListFields().""" - - if item[0].label == _FieldDescriptor.LABEL_REPEATED: - return bool(item[1]) - elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - return item[1]._is_present_in_parent - else: - return True - - -def _AddListFieldsMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def ListFields(self): - all_fields = [item for item in self._fields.items() if _IsPresent(item)] - all_fields.sort(key = lambda item: item[0].number) - return all_fields - - cls.ListFields = ListFields - -_PROTO3_ERROR_TEMPLATE = \ - ('Protocol message %s has no non-repeated submessage field "%s" ' - 'nor marked as optional') -_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' - -def _AddHasFieldMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - is_proto3 = (message_descriptor.syntax == "proto3") - error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE - - hassable_fields = {} - for field in message_descriptor.fields: - if field.label == _FieldDescriptor.LABEL_REPEATED: - continue - # For proto3, only submessages and fields inside a oneof have presence. - if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and - not field.containing_oneof): - continue - hassable_fields[field.name] = field - - # Has methods are supported for oneof descriptors. - for oneof in message_descriptor.oneofs: - hassable_fields[oneof.name] = oneof - - def HasField(self, field_name): - try: - field = hassable_fields[field_name] - except KeyError: - raise ValueError(error_msg % (message_descriptor.full_name, field_name)) - - if isinstance(field, descriptor_mod.OneofDescriptor): - try: - return HasField(self, self._oneofs[field].name) - except KeyError: - return False - else: - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - value = self._fields.get(field) - return value is not None and value._is_present_in_parent - else: - return field in self._fields - - cls.HasField = HasField - - -def _AddClearFieldMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def ClearField(self, field_name): - try: - field = message_descriptor.fields_by_name[field_name] - except KeyError: - try: - field = message_descriptor.oneofs_by_name[field_name] - if field in self._oneofs: - field = self._oneofs[field] - else: - return - except KeyError: - raise ValueError('Protocol message %s has no "%s" field.' % - (message_descriptor.name, field_name)) - - if field in self._fields: - # To match the C++ implementation, we need to invalidate iterators - # for map fields when ClearField() happens. - if hasattr(self._fields[field], 'InvalidateIterators'): - self._fields[field].InvalidateIterators() - - # Note: If the field is a sub-message, its listener will still point - # at us. That's fine, because the worst than can happen is that it - # will call _Modified() and invalidate our byte size. Big deal. - del self._fields[field] - - if self._oneofs.get(field.containing_oneof, None) is field: - del self._oneofs[field.containing_oneof] - - # Always call _Modified() -- even if nothing was changed, this is - # a mutating method, and thus calling it should cause the field to become - # present in the parent message. - self._Modified() - - cls.ClearField = ClearField - - -def _AddClearExtensionMethod(cls): - """Helper for _AddMessageMethods().""" - def ClearExtension(self, extension_handle): - extension_dict._VerifyExtensionHandle(self, extension_handle) - - # Similar to ClearField(), above. - if extension_handle in self._fields: - del self._fields[extension_handle] - self._Modified() - cls.ClearExtension = ClearExtension - - -def _AddHasExtensionMethod(cls): - """Helper for _AddMessageMethods().""" - def HasExtension(self, extension_handle): - extension_dict._VerifyExtensionHandle(self, extension_handle) - if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: - raise KeyError('"%s" is repeated.' % extension_handle.full_name) - - if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - value = self._fields.get(extension_handle) - return value is not None and value._is_present_in_parent - else: - return extension_handle in self._fields - cls.HasExtension = HasExtension - -def _InternalUnpackAny(msg): - """Unpacks Any message and returns the unpacked message. - - This internal method is different from public Any Unpack method which takes - the target message as argument. _InternalUnpackAny method does not have - target message type and need to find the message type in descriptor pool. - - Args: - msg: An Any message to be unpacked. - - Returns: - The unpacked message. - """ - # TODO(amauryfa): Don't use the factory of generated messages. - # To make Any work with custom factories, use the message factory of the - # parent message. - # pylint: disable=g-import-not-at-top - from google.protobuf import symbol_database - factory = symbol_database.Default() - - type_url = msg.type_url - - if not type_url: - return None - - # TODO(haberman): For now we just strip the hostname. Better logic will be - # required. - type_name = type_url.split('/')[-1] - descriptor = factory.pool.FindMessageTypeByName(type_name) - - if descriptor is None: - return None - - message_class = factory.GetPrototype(descriptor) - message = message_class() - - message.ParseFromString(msg.value) - return message - - -def _AddEqualsMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __eq__(self, other): - if (not isinstance(other, message_mod.Message) or - other.DESCRIPTOR != self.DESCRIPTOR): - return False - - if self is other: - return True - - if self.DESCRIPTOR.full_name == _AnyFullTypeName: - any_a = _InternalUnpackAny(self) - any_b = _InternalUnpackAny(other) - if any_a and any_b: - return any_a == any_b - - if not self.ListFields() == other.ListFields(): - return False - - # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, - # then use it for the comparison. - unknown_fields = list(self._unknown_fields) - unknown_fields.sort() - other_unknown_fields = list(other._unknown_fields) - other_unknown_fields.sort() - return unknown_fields == other_unknown_fields - - cls.__eq__ = __eq__ - - -def _AddStrMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __str__(self): - return text_format.MessageToString(self) - cls.__str__ = __str__ - - -def _AddReprMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __repr__(self): - return text_format.MessageToString(self) - cls.__repr__ = __repr__ - - -def _AddUnicodeMethod(unused_message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def __unicode__(self): - return text_format.MessageToString(self, as_utf8=True).decode('utf-8') - cls.__unicode__ = __unicode__ - - -def _BytesForNonRepeatedElement(value, field_number, field_type): - """Returns the number of bytes needed to serialize a non-repeated element. - The returned byte count includes space for tag information and any - other additional space associated with serializing value. - - Args: - value: Value we're serializing. - field_number: Field number of this value. (Since the field number - is stored as part of a varint-encoded tag, this has an impact - on the total bytes required to serialize the value). - field_type: The type of the field. One of the TYPE_* constants - within FieldDescriptor. - """ - try: - fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] - return fn(field_number, value) - except KeyError: - raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) - - -def _AddByteSizeMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def ByteSize(self): - if not self._cached_byte_size_dirty: - return self._cached_byte_size - - size = 0 - descriptor = self.DESCRIPTOR - if descriptor.GetOptions().map_entry: - # Fields of map entry should always be serialized. - size = descriptor.fields_by_name['key']._sizer(self.key) - size += descriptor.fields_by_name['value']._sizer(self.value) - else: - for field_descriptor, field_value in self.ListFields(): - size += field_descriptor._sizer(field_value) - for tag_bytes, value_bytes in self._unknown_fields: - size += len(tag_bytes) + len(value_bytes) - - self._cached_byte_size = size - self._cached_byte_size_dirty = False - self._listener_for_children.dirty = False - return size - - cls.ByteSize = ByteSize - - -def _AddSerializeToStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def SerializeToString(self, **kwargs): - # Check if the message has all of its required fields set. - if not self.IsInitialized(): - raise message_mod.EncodeError( - 'Message %s is missing required fields: %s' % ( - self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) - return self.SerializePartialToString(**kwargs) - cls.SerializeToString = SerializeToString - - -def _AddSerializePartialToStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def SerializePartialToString(self, **kwargs): - out = BytesIO() - self._InternalSerialize(out.write, **kwargs) - return out.getvalue() - cls.SerializePartialToString = SerializePartialToString - - def InternalSerialize(self, write_bytes, deterministic=None): - if deterministic is None: - deterministic = ( - api_implementation.IsPythonDefaultSerializationDeterministic()) - else: - deterministic = bool(deterministic) - - descriptor = self.DESCRIPTOR - if descriptor.GetOptions().map_entry: - # Fields of map entry should always be serialized. - descriptor.fields_by_name['key']._encoder( - write_bytes, self.key, deterministic) - descriptor.fields_by_name['value']._encoder( - write_bytes, self.value, deterministic) - else: - for field_descriptor, field_value in self.ListFields(): - field_descriptor._encoder(write_bytes, field_value, deterministic) - for tag_bytes, value_bytes in self._unknown_fields: - write_bytes(tag_bytes) - write_bytes(value_bytes) - cls._InternalSerialize = InternalSerialize - - -def _AddMergeFromStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def MergeFromString(self, serialized): - serialized = memoryview(serialized) - length = len(serialized) - try: - if self._InternalParse(serialized, 0, length) != length: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise message_mod.DecodeError('Unexpected end-group tag.') - except (IndexError, TypeError): - # Now ord(buf[p:p+1]) == ord('') gets TypeError. - raise message_mod.DecodeError('Truncated message.') - except struct.error as e: - raise message_mod.DecodeError(e) - return length # Return this for legacy reasons. - cls.MergeFromString = MergeFromString - - local_ReadTag = decoder.ReadTag - local_SkipField = decoder.SkipField - decoders_by_tag = cls._decoders_by_tag - - def InternalParse(self, buffer, pos, end): - """Create a message from serialized bytes. - - Args: - self: Message, instance of the proto message object. - buffer: memoryview of the serialized data. - pos: int, position to start in the serialized data. - end: int, end position of the serialized data. - - Returns: - Message object. - """ - # Guard against internal misuse, since this function is called internally - # quite extensively, and its easy to accidentally pass bytes. - assert isinstance(buffer, memoryview) - self._Modified() - field_dict = self._fields - # pylint: disable=protected-access - unknown_field_set = self._unknown_field_set - while pos != end: - (tag_bytes, new_pos) = local_ReadTag(buffer, pos) - field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) - if field_decoder is None: - if not self._unknown_fields: # pylint: disable=protected-access - self._unknown_fields = [] # pylint: disable=protected-access - if unknown_field_set is None: - # pylint: disable=protected-access - self._unknown_field_set = containers.UnknownFieldSet() - # pylint: disable=protected-access - unknown_field_set = self._unknown_field_set - # pylint: disable=protected-access - (tag, _) = decoder._DecodeVarint(tag_bytes, 0) - field_number, wire_type = wire_format.UnpackTag(tag) - if field_number == 0: - raise message_mod.DecodeError('Field number 0 is illegal.') - # TODO(jieluo): remove old_pos. - old_pos = new_pos - (data, new_pos) = decoder._DecodeUnknownField( - buffer, new_pos, wire_type) # pylint: disable=protected-access - if new_pos == -1: - return pos - # pylint: disable=protected-access - unknown_field_set._add(field_number, wire_type, data) - # TODO(jieluo): remove _unknown_fields. - new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) - if new_pos == -1: - return pos - self._unknown_fields.append( - (tag_bytes, buffer[old_pos:new_pos].tobytes())) - pos = new_pos - else: - pos = field_decoder(buffer, new_pos, end, self, field_dict) - if field_desc: - self._UpdateOneofState(field_desc) - return pos - cls._InternalParse = InternalParse - - -def _AddIsInitializedMethod(message_descriptor, cls): - """Adds the IsInitialized and FindInitializationError methods to the - protocol message class.""" - - required_fields = [field for field in message_descriptor.fields - if field.label == _FieldDescriptor.LABEL_REQUIRED] - - def IsInitialized(self, errors=None): - """Checks if all required fields of a message are set. - - Args: - errors: A list which, if provided, will be populated with the field - paths of all missing required fields. - - Returns: - True iff the specified message has all required fields set. - """ - - # Performance is critical so we avoid HasField() and ListFields(). - - for field in required_fields: - if (field not in self._fields or - (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and - not self._fields[field]._is_present_in_parent)): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - - for field, value in list(self._fields.items()): # dict can change size! - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if field.label == _FieldDescriptor.LABEL_REPEATED: - if (field.message_type.has_options and - field.message_type.GetOptions().map_entry): - continue - for element in value: - if not element.IsInitialized(): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - elif value._is_present_in_parent and not value.IsInitialized(): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - - return True - - cls.IsInitialized = IsInitialized - - def FindInitializationErrors(self): - """Finds required fields which are not initialized. - - Returns: - A list of strings. Each string is a path to an uninitialized field from - the top-level message, e.g. "foo.bar[5].baz". - """ - - errors = [] # simplify things - - for field in required_fields: - if not self.HasField(field.name): - errors.append(field.name) - - for field, value in self.ListFields(): - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if field.is_extension: - name = '(%s)' % field.full_name - else: - name = field.name - - if _IsMapField(field): - if _IsMessageMapField(field): - for key in value: - element = value[key] - prefix = '%s[%s].' % (name, key) - sub_errors = element.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - else: - # ScalarMaps can't have any initialization errors. - pass - elif field.label == _FieldDescriptor.LABEL_REPEATED: - for i in range(len(value)): - element = value[i] - prefix = '%s[%d].' % (name, i) - sub_errors = element.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - else: - prefix = name + '.' - sub_errors = value.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - - return errors - - cls.FindInitializationErrors = FindInitializationErrors - - -def _FullyQualifiedClassName(klass): - module = klass.__module__ - name = getattr(klass, '__qualname__', klass.__name__) - if module in (None, 'builtins', '__builtin__'): - return name - return module + '.' + name - - -def _AddMergeFromMethod(cls): - LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED - CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE - - def MergeFrom(self, msg): - if not isinstance(msg, cls): - raise TypeError( - 'Parameter to MergeFrom() must be instance of same class: ' - 'expected %s got %s.' % (_FullyQualifiedClassName(cls), - _FullyQualifiedClassName(msg.__class__))) - - assert msg is not self - self._Modified() - - fields = self._fields - - for field, value in msg._fields.items(): - if field.label == LABEL_REPEATED: - field_value = fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - fields[field] = field_value - field_value.MergeFrom(value) - elif field.cpp_type == CPPTYPE_MESSAGE: - if value._is_present_in_parent: - field_value = fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - fields[field] = field_value - field_value.MergeFrom(value) - else: - self._fields[field] = value - if field.containing_oneof: - self._UpdateOneofState(field) - - if msg._unknown_fields: - if not self._unknown_fields: - self._unknown_fields = [] - self._unknown_fields.extend(msg._unknown_fields) - # pylint: disable=protected-access - if self._unknown_field_set is None: - self._unknown_field_set = containers.UnknownFieldSet() - self._unknown_field_set._extend(msg._unknown_field_set) - - cls.MergeFrom = MergeFrom - - -def _AddWhichOneofMethod(message_descriptor, cls): - def WhichOneof(self, oneof_name): - """Returns the name of the currently set field inside a oneof, or None.""" - try: - field = message_descriptor.oneofs_by_name[oneof_name] - except KeyError: - raise ValueError( - 'Protocol message has no oneof "%s" field.' % oneof_name) - - nested_field = self._oneofs.get(field, None) - if nested_field is not None and self.HasField(nested_field.name): - return nested_field.name - else: - return None - - cls.WhichOneof = WhichOneof - - -def _Clear(self): - # Clear fields. - self._fields = {} - self._unknown_fields = () - # pylint: disable=protected-access - if self._unknown_field_set is not None: - self._unknown_field_set._clear() - self._unknown_field_set = None - - self._oneofs = {} - self._Modified() - - -def _UnknownFields(self): - if self._unknown_field_set is None: # pylint: disable=protected-access - # pylint: disable=protected-access - self._unknown_field_set = containers.UnknownFieldSet() - return self._unknown_field_set # pylint: disable=protected-access - - -def _DiscardUnknownFields(self): - self._unknown_fields = [] - self._unknown_field_set = None # pylint: disable=protected-access - for field, value in self.ListFields(): - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if _IsMapField(field): - if _IsMessageMapField(field): - for key in value: - value[key].DiscardUnknownFields() - elif field.label == _FieldDescriptor.LABEL_REPEATED: - for sub_message in value: - sub_message.DiscardUnknownFields() - else: - value.DiscardUnknownFields() - - -def _SetListener(self, listener): - if listener is None: - self._listener = message_listener_mod.NullMessageListener() - else: - self._listener = listener - - -def _AddMessageMethods(message_descriptor, cls): - """Adds implementations of all Message methods to cls.""" - _AddListFieldsMethod(message_descriptor, cls) - _AddHasFieldMethod(message_descriptor, cls) - _AddClearFieldMethod(message_descriptor, cls) - if message_descriptor.is_extendable: - _AddClearExtensionMethod(cls) - _AddHasExtensionMethod(cls) - _AddEqualsMethod(message_descriptor, cls) - _AddStrMethod(message_descriptor, cls) - _AddReprMethod(message_descriptor, cls) - _AddUnicodeMethod(message_descriptor, cls) - _AddByteSizeMethod(message_descriptor, cls) - _AddSerializeToStringMethod(message_descriptor, cls) - _AddSerializePartialToStringMethod(message_descriptor, cls) - _AddMergeFromStringMethod(message_descriptor, cls) - _AddIsInitializedMethod(message_descriptor, cls) - _AddMergeFromMethod(cls) - _AddWhichOneofMethod(message_descriptor, cls) - # Adds methods which do not depend on cls. - cls.Clear = _Clear - cls.UnknownFields = _UnknownFields - cls.DiscardUnknownFields = _DiscardUnknownFields - cls._SetListener = _SetListener - - -def _AddPrivateHelperMethods(message_descriptor, cls): - """Adds implementation of private helper methods to cls.""" - - def Modified(self): - """Sets the _cached_byte_size_dirty bit to true, - and propagates this to our listener iff this was a state change. - """ - - # Note: Some callers check _cached_byte_size_dirty before calling - # _Modified() as an extra optimization. So, if this method is ever - # changed such that it does stuff even when _cached_byte_size_dirty is - # already true, the callers need to be updated. - if not self._cached_byte_size_dirty: - self._cached_byte_size_dirty = True - self._listener_for_children.dirty = True - self._is_present_in_parent = True - self._listener.Modified() - - def _UpdateOneofState(self, field): - """Sets field as the active field in its containing oneof. - - Will also delete currently active field in the oneof, if it is different - from the argument. Does not mark the message as modified. - """ - other_field = self._oneofs.setdefault(field.containing_oneof, field) - if other_field is not field: - del self._fields[other_field] - self._oneofs[field.containing_oneof] = field - - cls._Modified = Modified - cls.SetInParent = Modified - cls._UpdateOneofState = _UpdateOneofState - - -class _Listener(object): - - """MessageListener implementation that a parent message registers with its - child message. - - In order to support semantics like: - - foo.bar.baz.qux = 23 - assert foo.HasField('bar') - - ...child objects must have back references to their parents. - This helper class is at the heart of this support. - """ - - def __init__(self, parent_message): - """Args: - parent_message: The message whose _Modified() method we should call when - we receive Modified() messages. - """ - # This listener establishes a back reference from a child (contained) object - # to its parent (containing) object. We make this a weak reference to avoid - # creating cyclic garbage when the client finishes with the 'parent' object - # in the tree. - if isinstance(parent_message, weakref.ProxyType): - self._parent_message_weakref = parent_message - else: - self._parent_message_weakref = weakref.proxy(parent_message) - - # As an optimization, we also indicate directly on the listener whether - # or not the parent message is dirty. This way we can avoid traversing - # up the tree in the common case. - self.dirty = False - - def Modified(self): - if self.dirty: - return - try: - # Propagate the signal to our parents iff this is the first field set. - self._parent_message_weakref._Modified() - except ReferenceError: - # We can get here if a client has kept a reference to a child object, - # and is now setting a field on it, but the child's parent has been - # garbage-collected. This is not an error. - pass - - -class _OneofListener(_Listener): - """Special listener implementation for setting composite oneof fields.""" - - def __init__(self, parent_message, field): - """Args: - parent_message: The message whose _Modified() method we should call when - we receive Modified() messages. - field: The descriptor of the field being set in the parent message. - """ - super(_OneofListener, self).__init__(parent_message) - self._field = field - - def Modified(self): - """Also updates the state of the containing oneof in the parent message.""" - try: - self._parent_message_weakref._UpdateOneofState(self._field) - super(_OneofListener, self).Modified() - except ReferenceError: - pass diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/type_checkers.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/type_checkers.py deleted file mode 100644 index a53e71fe8e..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/type_checkers.py +++ /dev/null @@ -1,435 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides type checking routines. - -This module defines type checking utilities in the forms of dictionaries: - -VALUE_CHECKERS: A dictionary of field types and a value validation object. -TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing - function. -TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization - function. -FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their - corresponding wire types. -TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization - function. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import ctypes -import numbers - -from google.protobuf.internal import decoder -from google.protobuf.internal import encoder -from google.protobuf.internal import wire_format -from google.protobuf import descriptor - -_FieldDescriptor = descriptor.FieldDescriptor - - -def TruncateToFourByteFloat(original): - return ctypes.c_float(original).value - - -def ToShortestFloat(original): - """Returns the shortest float that has same value in wire.""" - # All 4 byte floats have between 6 and 9 significant digits, so we - # start with 6 as the lower bound. - # It has to be iterative because use '.9g' directly can not get rid - # of the noises for most values. For example if set a float_field=0.9 - # use '.9g' will print 0.899999976. - precision = 6 - rounded = float('{0:.{1}g}'.format(original, precision)) - while TruncateToFourByteFloat(rounded) != original: - precision += 1 - rounded = float('{0:.{1}g}'.format(original, precision)) - return rounded - - -def SupportsOpenEnums(field_descriptor): - return field_descriptor.containing_type.syntax == 'proto3' - - -def GetTypeChecker(field): - """Returns a type checker for a message field of the specified types. - - Args: - field: FieldDescriptor object for this field. - - Returns: - An instance of TypeChecker which can be used to verify the types - of values assigned to a field of the specified type. - """ - if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and - field.type == _FieldDescriptor.TYPE_STRING): - return UnicodeValueChecker() - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - if SupportsOpenEnums(field): - # When open enums are supported, any int32 can be assigned. - return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] - else: - return EnumValueChecker(field.enum_type) - return _VALUE_CHECKERS[field.cpp_type] - - -# None of the typecheckers below make any attempt to guard against people -# subclassing builtin types and doing weird things. We're not trying to -# protect against malicious clients here, just people accidentally shooting -# themselves in the foot in obvious ways. -class TypeChecker(object): - - """Type checker used to catch type errors as early as possible - when the client is setting scalar fields in protocol messages. - """ - - def __init__(self, *acceptable_types): - self._acceptable_types = acceptable_types - - def CheckValue(self, proposed_value): - """Type check the provided value and return it. - - The returned value might have been normalized to another type. - """ - if not isinstance(proposed_value, self._acceptable_types): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), self._acceptable_types)) - raise TypeError(message) - return proposed_value - - -class TypeCheckerWithDefault(TypeChecker): - - def __init__(self, default_value, *acceptable_types): - TypeChecker.__init__(self, *acceptable_types) - self._default_value = default_value - - def DefaultValue(self): - return self._default_value - - -class BoolValueChecker(object): - """Type checker used for bool fields.""" - - def CheckValue(self, proposed_value): - if not hasattr(proposed_value, '__index__') or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (bool, int))) - raise TypeError(message) - return bool(proposed_value) - - def DefaultValue(self): - return False - - -# IntValueChecker and its subclasses perform integer type-checks -# and bounds-checks. -class IntValueChecker(object): - - """Checker used for integer fields. Performs type-check and range check.""" - - def CheckValue(self, proposed_value): - if not hasattr(proposed_value, '__index__') or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (int,))) - raise TypeError(message) - - if not self._MIN <= int(proposed_value) <= self._MAX: - raise ValueError('Value out of range: %d' % proposed_value) - # We force all values to int to make alternate implementations where the - # distinction is more significant (e.g. the C++ implementation) simpler. - proposed_value = int(proposed_value) - return proposed_value - - def DefaultValue(self): - return 0 - - -class EnumValueChecker(object): - - """Checker used for enum fields. Performs type-check and range check.""" - - def __init__(self, enum_type): - self._enum_type = enum_type - - def CheckValue(self, proposed_value): - if not isinstance(proposed_value, numbers.Integral): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (int,))) - raise TypeError(message) - if int(proposed_value) not in self._enum_type.values_by_number: - raise ValueError('Unknown enum value: %d' % proposed_value) - return proposed_value - - def DefaultValue(self): - return self._enum_type.values[0].number - - -class UnicodeValueChecker(object): - - """Checker used for string fields. - - Always returns a unicode value, even if the input is of type str. - """ - - def CheckValue(self, proposed_value): - if not isinstance(proposed_value, (bytes, str)): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (bytes, str))) - raise TypeError(message) - - # If the value is of type 'bytes' make sure that it is valid UTF-8 data. - if isinstance(proposed_value, bytes): - try: - proposed_value = proposed_value.decode('utf-8') - except UnicodeDecodeError: - raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' - 'encoding. Non-UTF-8 strings must be converted to ' - 'unicode objects before being added.' % - (proposed_value)) - else: - try: - proposed_value.encode('utf8') - except UnicodeEncodeError: - raise ValueError('%.1024r isn\'t a valid unicode string and ' - 'can\'t be encoded in UTF-8.'% - (proposed_value)) - - return proposed_value - - def DefaultValue(self): - return u"" - - -class Int32ValueChecker(IntValueChecker): - # We're sure to use ints instead of longs here since comparison may be more - # efficient. - _MIN = -2147483648 - _MAX = 2147483647 - - -class Uint32ValueChecker(IntValueChecker): - _MIN = 0 - _MAX = (1 << 32) - 1 - - -class Int64ValueChecker(IntValueChecker): - _MIN = -(1 << 63) - _MAX = (1 << 63) - 1 - - -class Uint64ValueChecker(IntValueChecker): - _MIN = 0 - _MAX = (1 << 64) - 1 - - -# The max 4 bytes float is about 3.4028234663852886e+38 -_FLOAT_MAX = float.fromhex('0x1.fffffep+127') -_FLOAT_MIN = -_FLOAT_MAX -_INF = float('inf') -_NEG_INF = float('-inf') - - -class DoubleValueChecker(object): - """Checker used for double fields. - - Performs type-check and range check. - """ - - def CheckValue(self, proposed_value): - """Check and convert proposed_value to float.""" - if (not hasattr(proposed_value, '__float__') and - not hasattr(proposed_value, '__index__')) or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: int, float' % - (proposed_value, type(proposed_value))) - raise TypeError(message) - return float(proposed_value) - - def DefaultValue(self): - return 0.0 - - -class FloatValueChecker(DoubleValueChecker): - """Checker used for float fields. - - Performs type-check and range check. - - Values exceeding a 32-bit float will be converted to inf/-inf. - """ - - def CheckValue(self, proposed_value): - """Check and convert proposed_value to float.""" - converted_value = super().CheckValue(proposed_value) - # This inf rounding matches the C++ proto SafeDoubleToFloat logic. - if converted_value > _FLOAT_MAX: - return _INF - if converted_value < _FLOAT_MIN: - return _NEG_INF - - return TruncateToFourByteFloat(converted_value) - -# Type-checkers for all scalar CPPTYPEs. -_VALUE_CHECKERS = { - _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), - _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), - _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), - _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), - _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), - _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), - _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), - _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), -} - - -# Map from field type to a function F, such that F(field_num, value) -# gives the total byte size for a value of the given type. This -# byte size includes tag information and any other additional space -# associated with serializing "value". -TYPE_TO_BYTE_SIZE_FN = { - _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, - _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, - _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, - _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, - _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, - _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, - _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, - _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, - _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, - _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, - _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, - _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, - _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, - _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, - _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, - _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, - _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, - _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize - } - - -# Maps from field types to encoder constructors. -TYPE_TO_ENCODER = { - _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, - _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, - _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, - _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, - _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, - _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, - _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, - _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, - _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, - _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, - _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, - _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, - _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, - _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, - _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, - _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, - _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, - _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, - } - - -# Maps from field types to sizer constructors. -TYPE_TO_SIZER = { - _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, - _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, - _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, - _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, - _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, - _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, - _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, - _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, - _FieldDescriptor.TYPE_STRING: encoder.StringSizer, - _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, - _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, - _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, - _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, - _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, - _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, - _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, - _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, - _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, - } - - -# Maps from field type to a decoder constructor. -TYPE_TO_DECODER = { - _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, - _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, - _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, - _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, - _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, - _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, - _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, - _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, - _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, - _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, - _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, - _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, - _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, - _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, - _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, - _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, - _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, - _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, - } - -# Maps from field type to expected wiretype. -FIELD_TYPE_TO_WIRE_TYPE = { - _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_STRING: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, - _FieldDescriptor.TYPE_MESSAGE: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_BYTES: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, - } diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/well_known_types.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/well_known_types.py deleted file mode 100644 index b581ab750a..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/well_known_types.py +++ /dev/null @@ -1,878 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains well known classes. - -This files defines well known classes which need extra maintenance including: - - Any - - Duration - - FieldMask - - Struct - - Timestamp -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - -import calendar -import collections.abc -import datetime - -from google.protobuf.descriptor import FieldDescriptor - -_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' -_NANOS_PER_SECOND = 1000000000 -_NANOS_PER_MILLISECOND = 1000000 -_NANOS_PER_MICROSECOND = 1000 -_MILLIS_PER_SECOND = 1000 -_MICROS_PER_SECOND = 1000000 -_SECONDS_PER_DAY = 24 * 3600 -_DURATION_SECONDS_MAX = 315576000000 - - -class Any(object): - """Class for Any Message type.""" - - __slots__ = () - - def Pack(self, msg, type_url_prefix='type.googleapis.com/', - deterministic=None): - """Packs the specified message into current Any message.""" - if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': - self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) - else: - self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) - self.value = msg.SerializeToString(deterministic=deterministic) - - def Unpack(self, msg): - """Unpacks the current Any message into specified message.""" - descriptor = msg.DESCRIPTOR - if not self.Is(descriptor): - return False - msg.ParseFromString(self.value) - return True - - def TypeName(self): - """Returns the protobuf type name of the inner message.""" - # Only last part is to be used: b/25630112 - return self.type_url.split('/')[-1] - - def Is(self, descriptor): - """Checks if this Any represents the given protobuf type.""" - return '/' in self.type_url and self.TypeName() == descriptor.full_name - - -_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) -_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( - 0, tz=datetime.timezone.utc) - - -class Timestamp(object): - """Class for Timestamp message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts Timestamp to RFC 3339 date string format. - - Returns: - A string converted from timestamp. The string is always Z-normalized - and uses 3, 6 or 9 fractional digits as required to represent the - exact time. Example of the return format: '1972-01-01T10:00:20.021Z' - """ - nanos = self.nanos % _NANOS_PER_SECOND - total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND - seconds = total_sec % _SECONDS_PER_DAY - days = (total_sec - seconds) // _SECONDS_PER_DAY - dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) - - result = dt.isoformat() - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return result + 'Z' - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return result + '.%03dZ' % (nanos / 1e6) - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return result + '.%06dZ' % (nanos / 1e3) - # Serialize 9 fractional digits. - return result + '.%09dZ' % nanos - - def FromJsonString(self, value): - """Parse a RFC 3339 date string format to Timestamp. - - Args: - value: A date string. Any fractional digits (or none) and any offset are - accepted as long as they fit into nano-seconds precision. - Example of accepted format: '1972-01-01T10:00:20.021-05:00' - - Raises: - ValueError: On parsing problems. - """ - if not isinstance(value, str): - raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) - timezone_offset = value.find('Z') - if timezone_offset == -1: - timezone_offset = value.find('+') - if timezone_offset == -1: - timezone_offset = value.rfind('-') - if timezone_offset == -1: - raise ValueError( - 'Failed to parse timestamp: missing valid timezone offset.') - time_value = value[0:timezone_offset] - # Parse datetime and nanos. - point_position = time_value.find('.') - if point_position == -1: - second_value = time_value - nano_value = '' - else: - second_value = time_value[:point_position] - nano_value = time_value[point_position + 1:] - if 't' in second_value: - raise ValueError( - 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' - 'lowercase \'t\' is not accepted'.format(second_value)) - date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) - td = date_object - datetime.datetime(1970, 1, 1) - seconds = td.seconds + td.days * _SECONDS_PER_DAY - if len(nano_value) > 9: - raise ValueError( - 'Failed to parse Timestamp: nanos {0} more than ' - '9 fractional digits.'.format(nano_value)) - if nano_value: - nanos = round(float('0.' + nano_value) * 1e9) - else: - nanos = 0 - # Parse timezone offsets. - if value[timezone_offset] == 'Z': - if len(value) != timezone_offset + 1: - raise ValueError('Failed to parse timestamp: invalid trailing' - ' data {0}.'.format(value)) - else: - timezone = value[timezone_offset:] - pos = timezone.find(':') - if pos == -1: - raise ValueError( - 'Invalid timezone offset value: {0}.'.format(timezone)) - if timezone[0] == '+': - seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 - else: - seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 - # Set seconds and nanos - self.seconds = int(seconds) - self.nanos = int(nanos) - - def GetCurrentTime(self): - """Get the current UTC into Timestamp.""" - self.FromDatetime(datetime.datetime.utcnow()) - - def ToNanoseconds(self): - """Converts Timestamp to nanoseconds since epoch.""" - return self.seconds * _NANOS_PER_SECOND + self.nanos - - def ToMicroseconds(self): - """Converts Timestamp to microseconds since epoch.""" - return (self.seconds * _MICROS_PER_SECOND + - self.nanos // _NANOS_PER_MICROSECOND) - - def ToMilliseconds(self): - """Converts Timestamp to milliseconds since epoch.""" - return (self.seconds * _MILLIS_PER_SECOND + - self.nanos // _NANOS_PER_MILLISECOND) - - def ToSeconds(self): - """Converts Timestamp to seconds since epoch.""" - return self.seconds - - def FromNanoseconds(self, nanos): - """Converts nanoseconds since epoch to Timestamp.""" - self.seconds = nanos // _NANOS_PER_SECOND - self.nanos = nanos % _NANOS_PER_SECOND - - def FromMicroseconds(self, micros): - """Converts microseconds since epoch to Timestamp.""" - self.seconds = micros // _MICROS_PER_SECOND - self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND - - def FromMilliseconds(self, millis): - """Converts milliseconds since epoch to Timestamp.""" - self.seconds = millis // _MILLIS_PER_SECOND - self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND - - def FromSeconds(self, seconds): - """Converts seconds since epoch to Timestamp.""" - self.seconds = seconds - self.nanos = 0 - - def ToDatetime(self, tzinfo=None): - """Converts Timestamp to a datetime. - - Args: - tzinfo: A datetime.tzinfo subclass; defaults to None. - - Returns: - If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone - information, i.e. not aware that it's UTC). - - Otherwise, returns a timezone-aware datetime in the input timezone. - """ - delta = datetime.timedelta( - seconds=self.seconds, - microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) - if tzinfo is None: - return _EPOCH_DATETIME_NAIVE + delta - else: - return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta - - def FromDatetime(self, dt): - """Converts datetime to Timestamp. - - Args: - dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. - """ - # Using this guide: http://wiki.python.org/moin/WorkingWithTime - # And this conversion guide: http://docs.python.org/library/time.html - - # Turn the date parameter into a tuple (struct_time) that can then be - # manipulated into a long value of seconds. During the conversion from - # struct_time to long, the source date in UTC, and so it follows that the - # correct transformation is calendar.timegm() - self.seconds = calendar.timegm(dt.utctimetuple()) - self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND - - -class Duration(object): - """Class for Duration message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts Duration to string format. - - Returns: - A string converted from self. The string format will contains - 3, 6, or 9 fractional digits depending on the precision required to - represent the exact Duration value. For example: "1s", "1.010s", - "1.000000100s", "-3.100s" - """ - _CheckDurationValid(self.seconds, self.nanos) - if self.seconds < 0 or self.nanos < 0: - result = '-' - seconds = - self.seconds + int((0 - self.nanos) // 1e9) - nanos = (0 - self.nanos) % 1e9 - else: - result = '' - seconds = self.seconds + int(self.nanos // 1e9) - nanos = self.nanos % 1e9 - result += '%d' % seconds - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return result + 's' - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return result + '.%03ds' % (nanos / 1e6) - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return result + '.%06ds' % (nanos / 1e3) - # Serialize 9 fractional digits. - return result + '.%09ds' % nanos - - def FromJsonString(self, value): - """Converts a string to Duration. - - Args: - value: A string to be converted. The string must end with 's'. Any - fractional digits (or none) are accepted as long as they fit into - precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s - - Raises: - ValueError: On parsing problems. - """ - if not isinstance(value, str): - raise ValueError('Duration JSON value not a string: {!r}'.format(value)) - if len(value) < 1 or value[-1] != 's': - raise ValueError( - 'Duration must end with letter "s": {0}.'.format(value)) - try: - pos = value.find('.') - if pos == -1: - seconds = int(value[:-1]) - nanos = 0 - else: - seconds = int(value[:pos]) - if value[0] == '-': - nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) - else: - nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) - _CheckDurationValid(seconds, nanos) - self.seconds = seconds - self.nanos = nanos - except ValueError as e: - raise ValueError( - 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) - - def ToNanoseconds(self): - """Converts a Duration to nanoseconds.""" - return self.seconds * _NANOS_PER_SECOND + self.nanos - - def ToMicroseconds(self): - """Converts a Duration to microseconds.""" - micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) - return self.seconds * _MICROS_PER_SECOND + micros - - def ToMilliseconds(self): - """Converts a Duration to milliseconds.""" - millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) - return self.seconds * _MILLIS_PER_SECOND + millis - - def ToSeconds(self): - """Converts a Duration to seconds.""" - return self.seconds - - def FromNanoseconds(self, nanos): - """Converts nanoseconds to Duration.""" - self._NormalizeDuration(nanos // _NANOS_PER_SECOND, - nanos % _NANOS_PER_SECOND) - - def FromMicroseconds(self, micros): - """Converts microseconds to Duration.""" - self._NormalizeDuration( - micros // _MICROS_PER_SECOND, - (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) - - def FromMilliseconds(self, millis): - """Converts milliseconds to Duration.""" - self._NormalizeDuration( - millis // _MILLIS_PER_SECOND, - (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) - - def FromSeconds(self, seconds): - """Converts seconds to Duration.""" - self.seconds = seconds - self.nanos = 0 - - def ToTimedelta(self): - """Converts Duration to timedelta.""" - return datetime.timedelta( - seconds=self.seconds, microseconds=_RoundTowardZero( - self.nanos, _NANOS_PER_MICROSECOND)) - - def FromTimedelta(self, td): - """Converts timedelta to Duration.""" - self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, - td.microseconds * _NANOS_PER_MICROSECOND) - - def _NormalizeDuration(self, seconds, nanos): - """Set Duration by seconds and nanos.""" - # Force nanos to be negative if the duration is negative. - if seconds < 0 and nanos > 0: - seconds += 1 - nanos -= _NANOS_PER_SECOND - self.seconds = seconds - self.nanos = nanos - - -def _CheckDurationValid(seconds, nanos): - if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: - raise ValueError( - 'Duration is not valid: Seconds {0} must be in range ' - '[-315576000000, 315576000000].'.format(seconds)) - if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: - raise ValueError( - 'Duration is not valid: Nanos {0} must be in range ' - '[-999999999, 999999999].'.format(nanos)) - if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): - raise ValueError( - 'Duration is not valid: Sign mismatch.') - - -def _RoundTowardZero(value, divider): - """Truncates the remainder part after division.""" - # For some languages, the sign of the remainder is implementation - # dependent if any of the operands is negative. Here we enforce - # "rounded toward zero" semantics. For example, for (-5) / 2 an - # implementation may give -3 as the result with the remainder being - # 1. This function ensures we always return -2 (closer to zero). - result = value // divider - remainder = value % divider - if result < 0 and remainder > 0: - return result + 1 - else: - return result - - -class FieldMask(object): - """Class for FieldMask message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts FieldMask to string according to proto3 JSON spec.""" - camelcase_paths = [] - for path in self.paths: - camelcase_paths.append(_SnakeCaseToCamelCase(path)) - return ','.join(camelcase_paths) - - def FromJsonString(self, value): - """Converts string to FieldMask according to proto3 JSON spec.""" - if not isinstance(value, str): - raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) - self.Clear() - if value: - for path in value.split(','): - self.paths.append(_CamelCaseToSnakeCase(path)) - - def IsValidForDescriptor(self, message_descriptor): - """Checks whether the FieldMask is valid for Message Descriptor.""" - for path in self.paths: - if not _IsValidPath(message_descriptor, path): - return False - return True - - def AllFieldsFromDescriptor(self, message_descriptor): - """Gets all direct fields of Message Descriptor to FieldMask.""" - self.Clear() - for field in message_descriptor.fields: - self.paths.append(field.name) - - def CanonicalFormFromMask(self, mask): - """Converts a FieldMask to the canonical form. - - Removes paths that are covered by another path. For example, - "foo.bar" is covered by "foo" and will be removed if "foo" - is also in the FieldMask. Then sorts all paths in alphabetical order. - - Args: - mask: The original FieldMask to be converted. - """ - tree = _FieldMaskTree(mask) - tree.ToFieldMask(self) - - def Union(self, mask1, mask2): - """Merges mask1 and mask2 into this FieldMask.""" - _CheckFieldMaskMessage(mask1) - _CheckFieldMaskMessage(mask2) - tree = _FieldMaskTree(mask1) - tree.MergeFromFieldMask(mask2) - tree.ToFieldMask(self) - - def Intersect(self, mask1, mask2): - """Intersects mask1 and mask2 into this FieldMask.""" - _CheckFieldMaskMessage(mask1) - _CheckFieldMaskMessage(mask2) - tree = _FieldMaskTree(mask1) - intersection = _FieldMaskTree() - for path in mask2.paths: - tree.IntersectPath(path, intersection) - intersection.ToFieldMask(self) - - def MergeMessage( - self, source, destination, - replace_message_field=False, replace_repeated_field=False): - """Merges fields specified in FieldMask from source to destination. - - Args: - source: Source message. - destination: The destination message to be merged into. - replace_message_field: Replace message field if True. Merge message - field if False. - replace_repeated_field: Replace repeated field if True. Append - elements of repeated field if False. - """ - tree = _FieldMaskTree(self) - tree.MergeMessage( - source, destination, replace_message_field, replace_repeated_field) - - -def _IsValidPath(message_descriptor, path): - """Checks whether the path is valid for Message Descriptor.""" - parts = path.split('.') - last = parts.pop() - for name in parts: - field = message_descriptor.fields_by_name.get(name) - if (field is None or - field.label == FieldDescriptor.LABEL_REPEATED or - field.type != FieldDescriptor.TYPE_MESSAGE): - return False - message_descriptor = field.message_type - return last in message_descriptor.fields_by_name - - -def _CheckFieldMaskMessage(message): - """Raises ValueError if message is not a FieldMask.""" - message_descriptor = message.DESCRIPTOR - if (message_descriptor.name != 'FieldMask' or - message_descriptor.file.name != 'google/protobuf/field_mask.proto'): - raise ValueError('Message {0} is not a FieldMask.'.format( - message_descriptor.full_name)) - - -def _SnakeCaseToCamelCase(path_name): - """Converts a path name from snake_case to camelCase.""" - result = [] - after_underscore = False - for c in path_name: - if c.isupper(): - raise ValueError( - 'Fail to print FieldMask to Json string: Path name ' - '{0} must not contain uppercase letters.'.format(path_name)) - if after_underscore: - if c.islower(): - result.append(c.upper()) - after_underscore = False - else: - raise ValueError( - 'Fail to print FieldMask to Json string: The ' - 'character after a "_" must be a lowercase letter ' - 'in path name {0}.'.format(path_name)) - elif c == '_': - after_underscore = True - else: - result += c - - if after_underscore: - raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' - 'in path name {0}.'.format(path_name)) - return ''.join(result) - - -def _CamelCaseToSnakeCase(path_name): - """Converts a field name from camelCase to snake_case.""" - result = [] - for c in path_name: - if c == '_': - raise ValueError('Fail to parse FieldMask: Path name ' - '{0} must not contain "_"s.'.format(path_name)) - if c.isupper(): - result += '_' - result += c.lower() - else: - result += c - return ''.join(result) - - -class _FieldMaskTree(object): - """Represents a FieldMask in a tree structure. - - For example, given a FieldMask "foo.bar,foo.baz,bar.baz", - the FieldMaskTree will be: - [_root] -+- foo -+- bar - | | - | +- baz - | - +- bar --- baz - In the tree, each leaf node represents a field path. - """ - - __slots__ = ('_root',) - - def __init__(self, field_mask=None): - """Initializes the tree by FieldMask.""" - self._root = {} - if field_mask: - self.MergeFromFieldMask(field_mask) - - def MergeFromFieldMask(self, field_mask): - """Merges a FieldMask to the tree.""" - for path in field_mask.paths: - self.AddPath(path) - - def AddPath(self, path): - """Adds a field path into the tree. - - If the field path to add is a sub-path of an existing field path - in the tree (i.e., a leaf node), it means the tree already matches - the given path so nothing will be added to the tree. If the path - matches an existing non-leaf node in the tree, that non-leaf node - will be turned into a leaf node with all its children removed because - the path matches all the node's children. Otherwise, a new path will - be added. - - Args: - path: The field path to add. - """ - node = self._root - for name in path.split('.'): - if name not in node: - node[name] = {} - elif not node[name]: - # Pre-existing empty node implies we already have this entire tree. - return - node = node[name] - # Remove any sub-trees we might have had. - node.clear() - - def ToFieldMask(self, field_mask): - """Converts the tree to a FieldMask.""" - field_mask.Clear() - _AddFieldPaths(self._root, '', field_mask) - - def IntersectPath(self, path, intersection): - """Calculates the intersection part of a field path with this tree. - - Args: - path: The field path to calculates. - intersection: The out tree to record the intersection part. - """ - node = self._root - for name in path.split('.'): - if name not in node: - return - elif not node[name]: - intersection.AddPath(path) - return - node = node[name] - intersection.AddLeafNodes(path, node) - - def AddLeafNodes(self, prefix, node): - """Adds leaf nodes begin with prefix to this tree.""" - if not node: - self.AddPath(prefix) - for name in node: - child_path = prefix + '.' + name - self.AddLeafNodes(child_path, node[name]) - - def MergeMessage( - self, source, destination, - replace_message, replace_repeated): - """Merge all fields specified by this tree from source to destination.""" - _MergeMessage( - self._root, source, destination, replace_message, replace_repeated) - - -def _StrConvert(value): - """Converts value to str if it is not.""" - # This file is imported by c extension and some methods like ClearField - # requires string for the field name. py2/py3 has different text - # type and may use unicode. - if not isinstance(value, str): - return value.encode('utf-8') - return value - - -def _MergeMessage( - node, source, destination, replace_message, replace_repeated): - """Merge all fields specified by a sub-tree from source to destination.""" - source_descriptor = source.DESCRIPTOR - for name in node: - child = node[name] - field = source_descriptor.fields_by_name[name] - if field is None: - raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( - name, source_descriptor.full_name)) - if child: - # Sub-paths are only allowed for singular message fields. - if (field.label == FieldDescriptor.LABEL_REPEATED or - field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): - raise ValueError('Error: Field {0} in message {1} is not a singular ' - 'message field and cannot have sub-fields.'.format( - name, source_descriptor.full_name)) - if source.HasField(name): - _MergeMessage( - child, getattr(source, name), getattr(destination, name), - replace_message, replace_repeated) - continue - if field.label == FieldDescriptor.LABEL_REPEATED: - if replace_repeated: - destination.ClearField(_StrConvert(name)) - repeated_source = getattr(source, name) - repeated_destination = getattr(destination, name) - repeated_destination.MergeFrom(repeated_source) - else: - if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - if replace_message: - destination.ClearField(_StrConvert(name)) - if source.HasField(name): - getattr(destination, name).MergeFrom(getattr(source, name)) - else: - setattr(destination, name, getattr(source, name)) - - -def _AddFieldPaths(node, prefix, field_mask): - """Adds the field paths descended from node to field_mask.""" - if not node and prefix: - field_mask.paths.append(prefix) - return - for name in sorted(node): - if prefix: - child_path = prefix + '.' + name - else: - child_path = name - _AddFieldPaths(node[name], child_path, field_mask) - - -def _SetStructValue(struct_value, value): - if value is None: - struct_value.null_value = 0 - elif isinstance(value, bool): - # Note: this check must come before the number check because in Python - # True and False are also considered numbers. - struct_value.bool_value = value - elif isinstance(value, str): - struct_value.string_value = value - elif isinstance(value, (int, float)): - struct_value.number_value = value - elif isinstance(value, (dict, Struct)): - struct_value.struct_value.Clear() - struct_value.struct_value.update(value) - elif isinstance(value, (list, ListValue)): - struct_value.list_value.Clear() - struct_value.list_value.extend(value) - else: - raise ValueError('Unexpected type') - - -def _GetStructValue(struct_value): - which = struct_value.WhichOneof('kind') - if which == 'struct_value': - return struct_value.struct_value - elif which == 'null_value': - return None - elif which == 'number_value': - return struct_value.number_value - elif which == 'string_value': - return struct_value.string_value - elif which == 'bool_value': - return struct_value.bool_value - elif which == 'list_value': - return struct_value.list_value - elif which is None: - raise ValueError('Value not set') - - -class Struct(object): - """Class for Struct message type.""" - - __slots__ = () - - def __getitem__(self, key): - return _GetStructValue(self.fields[key]) - - def __contains__(self, item): - return item in self.fields - - def __setitem__(self, key, value): - _SetStructValue(self.fields[key], value) - - def __delitem__(self, key): - del self.fields[key] - - def __len__(self): - return len(self.fields) - - def __iter__(self): - return iter(self.fields) - - def keys(self): # pylint: disable=invalid-name - return self.fields.keys() - - def values(self): # pylint: disable=invalid-name - return [self[key] for key in self] - - def items(self): # pylint: disable=invalid-name - return [(key, self[key]) for key in self] - - def get_or_create_list(self, key): - """Returns a list for this key, creating if it didn't exist already.""" - if not self.fields[key].HasField('list_value'): - # Clear will mark list_value modified which will indeed create a list. - self.fields[key].list_value.Clear() - return self.fields[key].list_value - - def get_or_create_struct(self, key): - """Returns a struct for this key, creating if it didn't exist already.""" - if not self.fields[key].HasField('struct_value'): - # Clear will mark struct_value modified which will indeed create a struct. - self.fields[key].struct_value.Clear() - return self.fields[key].struct_value - - def update(self, dictionary): # pylint: disable=invalid-name - for key, value in dictionary.items(): - _SetStructValue(self.fields[key], value) - -collections.abc.MutableMapping.register(Struct) - - -class ListValue(object): - """Class for ListValue message type.""" - - __slots__ = () - - def __len__(self): - return len(self.values) - - def append(self, value): - _SetStructValue(self.values.add(), value) - - def extend(self, elem_seq): - for value in elem_seq: - self.append(value) - - def __getitem__(self, index): - """Retrieves item by the specified index.""" - return _GetStructValue(self.values.__getitem__(index)) - - def __setitem__(self, index, value): - _SetStructValue(self.values.__getitem__(index), value) - - def __delitem__(self, key): - del self.values[key] - - def items(self): - for i in range(len(self)): - yield self[i] - - def add_struct(self): - """Appends and returns a struct value as the next value in the list.""" - struct_value = self.values.add().struct_value - # Clear will mark struct_value modified which will indeed create a struct. - struct_value.Clear() - return struct_value - - def add_list(self): - """Appends and returns a list value as the next value in the list.""" - list_value = self.values.add().list_value - # Clear will mark list_value modified which will indeed create a list. - list_value.Clear() - return list_value - -collections.abc.MutableSequence.register(ListValue) - - -WKTBASES = { - 'google.protobuf.Any': Any, - 'google.protobuf.Duration': Duration, - 'google.protobuf.FieldMask': FieldMask, - 'google.protobuf.ListValue': ListValue, - 'google.protobuf.Struct': Struct, - 'google.protobuf.Timestamp': Timestamp, -} diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/wire_format.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/wire_format.py deleted file mode 100644 index 883f525585..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/internal/wire_format.py +++ /dev/null @@ -1,268 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Constants and static functions to support protocol buffer wire format.""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import struct -from google.protobuf import descriptor -from google.protobuf import message - - -TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. -TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 - -# These numbers identify the wire type of a protocol buffer value. -# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded -# tag-and-type to store one of these WIRETYPE_* constants. -# These values must match WireType enum in google/protobuf/wire_format.h. -WIRETYPE_VARINT = 0 -WIRETYPE_FIXED64 = 1 -WIRETYPE_LENGTH_DELIMITED = 2 -WIRETYPE_START_GROUP = 3 -WIRETYPE_END_GROUP = 4 -WIRETYPE_FIXED32 = 5 -_WIRETYPE_MAX = 5 - - -# Bounds for various integer types. -INT32_MAX = int((1 << 31) - 1) -INT32_MIN = int(-(1 << 31)) -UINT32_MAX = (1 << 32) - 1 - -INT64_MAX = (1 << 63) - 1 -INT64_MIN = -(1 << 63) -UINT64_MAX = (1 << 64) - 1 - -# "struct" format strings that will encode/decode the specified formats. -FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) - - -def ZigZagEncode(value): - """ZigZag Transform: Encodes signed integers so that they can be - effectively used with varint encoding. See wire_format.h for - more details. - """ - if value >= 0: - return value << 1 - return (value << 1) ^ (~0) - - -def ZigZagDecode(value): - """Inverse of ZigZagEncode().""" - if not value & 0x1: - return value >> 1 - return (value >> 1) ^ (~0) - - - -# The *ByteSize() functions below return the number of bytes required to -# serialize "field number + type" information and then serialize the value. - - -def Int32ByteSize(field_number, int32): - return Int64ByteSize(field_number, int32) - - -def Int32ByteSizeNoTag(int32): - return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) - - -def Int64ByteSize(field_number, int64): - # Have to convert to uint before calling UInt64ByteSize(). - return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) - - -def UInt32ByteSize(field_number, uint32): - return UInt64ByteSize(field_number, uint32) - - -def UInt64ByteSize(field_number, uint64): - return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) - - -def SInt32ByteSize(field_number, int32): - return UInt32ByteSize(field_number, ZigZagEncode(int32)) - - -def SInt64ByteSize(field_number, int64): - return UInt64ByteSize(field_number, ZigZagEncode(int64)) - - -def Fixed32ByteSize(field_number, fixed32): - return TagByteSize(field_number) + 4 - - -def Fixed64ByteSize(field_number, fixed64): - return TagByteSize(field_number) + 8 - - -def SFixed32ByteSize(field_number, sfixed32): - return TagByteSize(field_number) + 4 - - -def SFixed64ByteSize(field_number, sfixed64): - return TagByteSize(field_number) + 8 - - -def FloatByteSize(field_number, flt): - return TagByteSize(field_number) + 4 - - -def DoubleByteSize(field_number, double): - return TagByteSize(field_number) + 8 - - -def BoolByteSize(field_number, b): - return TagByteSize(field_number) + 1 - - -def EnumByteSize(field_number, enum): - return UInt32ByteSize(field_number, enum) - - -def StringByteSize(field_number, string): - return BytesByteSize(field_number, string.encode('utf-8')) - - -def BytesByteSize(field_number, b): - return (TagByteSize(field_number) - + _VarUInt64ByteSizeNoTag(len(b)) - + len(b)) - - -def GroupByteSize(field_number, message): - return (2 * TagByteSize(field_number) # START and END group. - + message.ByteSize()) - - -def MessageByteSize(field_number, message): - return (TagByteSize(field_number) - + _VarUInt64ByteSizeNoTag(message.ByteSize()) - + message.ByteSize()) - - -def MessageSetItemByteSize(field_number, msg): - # First compute the sizes of the tags. - # There are 2 tags for the beginning and ending of the repeated group, that - # is field number 1, one with field number 2 (type_id) and one with field - # number 3 (message). - total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) - - # Add the number of bytes for type_id. - total_size += _VarUInt64ByteSizeNoTag(field_number) - - message_size = msg.ByteSize() - - # The number of bytes for encoding the length of the message. - total_size += _VarUInt64ByteSizeNoTag(message_size) - - # The size of the message. - total_size += message_size - return total_size - - -def TagByteSize(field_number): - """Returns the bytes required to serialize a tag with this field number.""" - # Just pass in type 0, since the type won't affect the tag+type size. - return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) - - -# Private helper function for the *ByteSize() functions above. - -def _VarUInt64ByteSizeNoTag(uint64): - """Returns the number of bytes required to serialize a single varint - using boundary value comparisons. (unrolled loop optimization -WPierce) - uint64 must be unsigned. - """ - if uint64 <= 0x7f: return 1 - if uint64 <= 0x3fff: return 2 - if uint64 <= 0x1fffff: return 3 - if uint64 <= 0xfffffff: return 4 - if uint64 <= 0x7ffffffff: return 5 - if uint64 <= 0x3ffffffffff: return 6 - if uint64 <= 0x1ffffffffffff: return 7 - if uint64 <= 0xffffffffffffff: return 8 - if uint64 <= 0x7fffffffffffffff: return 9 - if uint64 > UINT64_MAX: - raise message.EncodeError('Value out of range: %d' % uint64) - return 10 - - -NON_PACKABLE_TYPES = ( - descriptor.FieldDescriptor.TYPE_STRING, - descriptor.FieldDescriptor.TYPE_GROUP, - descriptor.FieldDescriptor.TYPE_MESSAGE, - descriptor.FieldDescriptor.TYPE_BYTES -) - - -def IsTypePackable(field_type): - """Return true iff packable = true is valid for fields of this type. - - Args: - field_type: a FieldDescriptor::Type value. - - Returns: - True iff fields of this type are packable. - """ - return field_type not in NON_PACKABLE_TYPES diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/json_format.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/json_format.py deleted file mode 100644 index 5024ed89d7..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/json_format.py +++ /dev/null @@ -1,912 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains routines for printing protocol messages in JSON format. - -Simple usage example: - - # Create a proto object and serialize it to a json format string. - message = my_proto_pb2.MyMessage(foo='bar') - json_string = json_format.MessageToJson(message) - - # Parse a json format string to proto object. - message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - - -import base64 -from collections import OrderedDict -import json -import math -from operator import methodcaller -import re -import sys - -from google.protobuf.internal import type_checkers -from google.protobuf import descriptor -from google.protobuf import symbol_database - - -_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' -_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, - descriptor.FieldDescriptor.CPPTYPE_UINT32, - descriptor.FieldDescriptor.CPPTYPE_INT64, - descriptor.FieldDescriptor.CPPTYPE_UINT64]) -_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, - descriptor.FieldDescriptor.CPPTYPE_UINT64]) -_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, - descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) -_INFINITY = 'Infinity' -_NEG_INFINITY = '-Infinity' -_NAN = 'NaN' - -_UNPAIRED_SURROGATE_PATTERN = re.compile( - u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: - raise ParseError('Message too deep. Max recursion depth is {0}'.format( - self.max_recursion_depth)) - message_descriptor = message.DESCRIPTOR - full_name = message_descriptor.full_name - if not path: - path = message_descriptor.name - if _IsWrapperMessage(message_descriptor): - self._ConvertWrapperMessage(value, message, path) - elif full_name in _WKTJSONMETHODS: - methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) - else: - self._ConvertFieldValuePair(value, message, path) - self.recursion_depth -= 1 - - def _ConvertFieldValuePair(self, js, message, path): - """Convert field value pairs into regular message. - - Args: - js: A JSON object to convert the field value pairs. - message: A regular protocol message to record the data. - path: parent path to log parse error info. - - Raises: - ParseError: In case of problems converting. - """ - names = [] - message_descriptor = message.DESCRIPTOR - fields_by_json_name = dict((f.json_name, f) - for f in message_descriptor.fields) - for name in js: - try: - field = fields_by_json_name.get(name, None) - if not field: - field = message_descriptor.fields_by_name.get(name, None) - if not field and _VALID_EXTENSION_NAME.match(name): - if not message_descriptor.is_extendable: - raise ParseError( - 'Message type {0} does not have extensions at {1}'.format( - message_descriptor.full_name, path)) - identifier = name[1:-1] # strip [] brackets - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(identifier) - # pylint: enable=protected-access - if not field: - # Try looking for extension by the message type name, dropping the - # field name following the final . separator in full_name. - identifier = '.'.join(identifier.split('.')[:-1]) - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(identifier) - # pylint: enable=protected-access - if not field: - if self.ignore_unknown_fields: - continue - raise ParseError( - ('Message type "{0}" has no field named "{1}" at "{2}".\n' - ' Available Fields(except extensions): "{3}"').format( - message_descriptor.full_name, name, path, - [f.json_name for f in message_descriptor.fields])) - if name in names: - raise ParseError('Message type "{0}" should not have multiple ' - '"{1}" fields at "{2}".'.format( - message.DESCRIPTOR.full_name, name, path)) - names.append(name) - value = js[name] - # Check no other oneof field is parsed. - if field.containing_oneof is not None and value is not None: - oneof_name = field.containing_oneof.name - if oneof_name in names: - raise ParseError('Message type "{0}" should not have multiple ' - '"{1}" oneof fields at "{2}".'.format( - message.DESCRIPTOR.full_name, oneof_name, - path)) - names.append(oneof_name) - - if value is None: - if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE - and field.message_type.full_name == 'google.protobuf.Value'): - sub_message = getattr(message, field.name) - sub_message.null_value = 0 - elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM - and field.enum_type.full_name == 'google.protobuf.NullValue'): - setattr(message, field.name, 0) - else: - message.ClearField(field.name) - continue - - # Parse field value. - if _IsMapEntry(field): - message.ClearField(field.name) - self._ConvertMapFieldValue(value, message, field, - '{0}.{1}'.format(path, name)) - elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - message.ClearField(field.name) - if not isinstance(value, list): - raise ParseError('repeated field {0} must be in [] which is ' - '{1} at {2}'.format(name, value, path)) - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - # Repeated message field. - for index, item in enumerate(value): - sub_message = getattr(message, field.name).add() - # None is a null_value in Value. - if (item is None and - sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): - raise ParseError('null is not allowed to be used as an element' - ' in a repeated field at {0}.{1}[{2}]'.format( - path, name, index)) - self.ConvertMessage(item, sub_message, - '{0}.{1}[{2}]'.format(path, name, index)) - else: - # Repeated scalar field. - for index, item in enumerate(value): - if item is None: - raise ParseError('null is not allowed to be used as an element' - ' in a repeated field at {0}.{1}[{2}]'.format( - path, name, index)) - getattr(message, field.name).append( - _ConvertScalarFieldValue( - item, field, '{0}.{1}[{2}]'.format(path, name, index))) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - if field.is_extension: - sub_message = message.Extensions[field] - else: - sub_message = getattr(message, field.name) - sub_message.SetInParent() - self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) - else: - if field.is_extension: - message.Extensions[field] = _ConvertScalarFieldValue( - value, field, '{0}.{1}'.format(path, name)) - else: - setattr( - message, field.name, - _ConvertScalarFieldValue(value, field, - '{0}.{1}'.format(path, name))) - except ParseError as e: - if field and field.containing_oneof is None: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - else: - raise ParseError(str(e)) - except ValueError as e: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - except TypeError as e: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - - def _ConvertAnyMessage(self, value, message, path): - """Convert a JSON representation into Any message.""" - if isinstance(value, dict) and not value: - return - try: - type_url = value['@type'] - except KeyError: - raise ParseError( - '@type is missing when parsing any message at {0}'.format(path)) - - try: - sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) - except TypeError as e: - raise ParseError('{0} at {1}'.format(e, path)) - message_descriptor = sub_message.DESCRIPTOR - full_name = message_descriptor.full_name - if _IsWrapperMessage(message_descriptor): - self._ConvertWrapperMessage(value['value'], sub_message, - '{0}.value'.format(path)) - elif full_name in _WKTJSONMETHODS: - methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, - '{0}.value'.format(path))( - self) - else: - del value['@type'] - self._ConvertFieldValuePair(value, sub_message, path) - value['@type'] = type_url - # Sets Any message - message.value = sub_message.SerializeToString() - message.type_url = type_url - - def _ConvertGenericMessage(self, value, message, path): - """Convert a JSON representation into message with FromJsonString.""" - # Duration, Timestamp, FieldMask have a FromJsonString method to do the - # conversion. Users can also call the method directly. - try: - message.FromJsonString(value) - except ValueError as e: - raise ParseError('{0} at {1}'.format(e, path)) - - def _ConvertValueMessage(self, value, message, path): - """Convert a JSON representation into Value message.""" - if isinstance(value, dict): - self._ConvertStructMessage(value, message.struct_value, path) - elif isinstance(value, list): - self._ConvertListValueMessage(value, message.list_value, path) - elif value is None: - message.null_value = 0 - elif isinstance(value, bool): - message.bool_value = value - elif isinstance(value, str): - message.string_value = value - elif isinstance(value, _INT_OR_FLOAT): - message.number_value = value - else: - raise ParseError('Value {0} has unexpected type {1} at {2}'.format( - value, type(value), path)) - - def _ConvertListValueMessage(self, value, message, path): - """Convert a JSON representation into ListValue message.""" - if not isinstance(value, list): - raise ParseError('ListValue must be in [] which is {0} at {1}'.format( - value, path)) - message.ClearField('values') - for index, item in enumerate(value): - self._ConvertValueMessage(item, message.values.add(), - '{0}[{1}]'.format(path, index)) - - def _ConvertStructMessage(self, value, message, path): - """Convert a JSON representation into Struct message.""" - if not isinstance(value, dict): - raise ParseError('Struct must be in a dict which is {0} at {1}'.format( - value, path)) - # Clear will mark the struct as modified so it will be created even if - # there are no values. - message.Clear() - for key in value: - self._ConvertValueMessage(value[key], message.fields[key], - '{0}.{1}'.format(path, key)) - return - - def _ConvertWrapperMessage(self, value, message, path): - """Convert a JSON representation into Wrapper message.""" - field = message.DESCRIPTOR.fields_by_name['value'] - setattr( - message, 'value', - _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) - - def _ConvertMapFieldValue(self, value, message, field, path): - """Convert map field value for a message map field. - - Args: - value: A JSON object to convert the map field value. - message: A protocol message to record the converted data. - field: The descriptor of the map field to be converted. - path: parent path to log parse error info. - - Raises: - ParseError: In case of convert problems. - """ - if not isinstance(value, dict): - raise ParseError( - 'Map field {0} must be in a dict which is {1} at {2}'.format( - field.name, value, path)) - key_field = field.message_type.fields_by_name['key'] - value_field = field.message_type.fields_by_name['value'] - for key in value: - key_value = _ConvertScalarFieldValue(key, key_field, - '{0}.key'.format(path), True) - if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - self.ConvertMessage(value[key], - getattr(message, field.name)[key_value], - '{0}[{1}]'.format(path, key_value)) - else: - getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( - value[key], value_field, path='{0}[{1}]'.format(path, key_value)) - - -def _ConvertScalarFieldValue(value, field, path, require_str=False): - """Convert a single scalar field value. - - Args: - value: A scalar value to convert the scalar field value. - field: The descriptor of the field to convert. - path: parent path to log parse error info. - require_str: If True, the field value must be a str. - - Returns: - The converted scalar field value - - Raises: - ParseError: In case of convert problems. - """ - try: - if field.cpp_type in _INT_TYPES: - return _ConvertInteger(value) - elif field.cpp_type in _FLOAT_TYPES: - return _ConvertFloat(value, field) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: - return _ConvertBool(value, require_str) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: - if field.type == descriptor.FieldDescriptor.TYPE_BYTES: - if isinstance(value, str): - encoded = value.encode('utf-8') - else: - encoded = value - # Add extra padding '=' - padded_value = encoded + b'=' * (4 - len(encoded) % 4) - return base64.urlsafe_b64decode(padded_value) - else: - # Checking for unpaired surrogates appears to be unreliable, - # depending on the specific Python version, so we check manually. - if _UNPAIRED_SURROGATE_PATTERN.search(value): - raise ParseError('Unpaired surrogate') - return value - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: - # Convert an enum value. - enum_value = field.enum_type.values_by_name.get(value, None) - if enum_value is None: - try: - number = int(value) - enum_value = field.enum_type.values_by_number.get(number, None) - except ValueError: - raise ParseError('Invalid enum value {0} for enum type {1}'.format( - value, field.enum_type.full_name)) - if enum_value is None: - if field.file.syntax == 'proto3': - # Proto3 accepts unknown enums. - return number - raise ParseError('Invalid enum value {0} for enum type {1}'.format( - value, field.enum_type.full_name)) - return enum_value.number - except ParseError as e: - raise ParseError('{0} at {1}'.format(e, path)) - - -def _ConvertInteger(value): - """Convert an integer. - - Args: - value: A scalar value to convert. - - Returns: - The integer value. - - Raises: - ParseError: If an integer couldn't be consumed. - """ - if isinstance(value, float) and not value.is_integer(): - raise ParseError('Couldn\'t parse integer: {0}'.format(value)) - - if isinstance(value, str) and value.find(' ') != -1: - raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) - - if isinstance(value, bool): - raise ParseError('Bool value {0} is not acceptable for ' - 'integer field'.format(value)) - - return int(value) - - -def _ConvertFloat(value, field): - """Convert an floating point number.""" - if isinstance(value, float): - if math.isnan(value): - raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') - if math.isinf(value): - if value > 0: - raise ParseError('Couldn\'t parse Infinity or value too large, ' - 'use quoted "Infinity" instead') - else: - raise ParseError('Couldn\'t parse -Infinity or value too small, ' - 'use quoted "-Infinity" instead') - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: - # pylint: disable=protected-access - if value > type_checkers._FLOAT_MAX: - raise ParseError('Float value too large') - # pylint: disable=protected-access - if value < type_checkers._FLOAT_MIN: - raise ParseError('Float value too small') - if value == 'nan': - raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') - try: - # Assume Python compatible syntax. - return float(value) - except ValueError: - # Check alternative spellings. - if value == _NEG_INFINITY: - return float('-inf') - elif value == _INFINITY: - return float('inf') - elif value == _NAN: - return float('nan') - else: - raise ParseError('Couldn\'t parse float: {0}'.format(value)) - - -def _ConvertBool(value, require_str): - """Convert a boolean value. - - Args: - value: A scalar value to convert. - require_str: If True, value must be a str. - - Returns: - The bool parsed. - - Raises: - ParseError: If a boolean value couldn't be consumed. - """ - if require_str: - if value == 'true': - return True - elif value == 'false': - return False - else: - raise ParseError('Expected "true" or "false", not {0}'.format(value)) - - if not isinstance(value, bool): - raise ParseError('Expected true or false without quotes') - return value - -_WKTJSONMETHODS = { - 'google.protobuf.Any': ['_AnyMessageToJsonObject', - '_ConvertAnyMessage'], - 'google.protobuf.Duration': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', - '_ConvertListValueMessage'], - 'google.protobuf.Struct': ['_StructMessageToJsonObject', - '_ConvertStructMessage'], - 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.Value': ['_ValueMessageToJsonObject', - '_ConvertValueMessage'] -} diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/message.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/message.py deleted file mode 100644 index 76c6802f70..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/message.py +++ /dev/null @@ -1,424 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# TODO(robinson): We should just make these methods all "pure-virtual" and move -# all implementation out, into reflection.py for now. - - -"""Contains an abstract base class for protocol messages.""" - -__author__ = 'robinson@google.com (Will Robinson)' - -class Error(Exception): - """Base error type for this module.""" - pass - - -class DecodeError(Error): - """Exception raised when deserializing messages.""" - pass - - -class EncodeError(Error): - """Exception raised when serializing messages.""" - pass - - -class Message(object): - - """Abstract base class for protocol messages. - - Protocol message classes are almost always generated by the protocol - compiler. These generated types subclass Message and implement the methods - shown below. - """ - - # TODO(robinson): Link to an HTML document here. - - # TODO(robinson): Document that instances of this class will also - # have an Extensions attribute with __getitem__ and __setitem__. - # Again, not sure how to best convey this. - - # TODO(robinson): Document that the class must also have a static - # RegisterExtension(extension_field) method. - # Not sure how to best express at this point. - - # TODO(robinson): Document these fields and methods. - - __slots__ = [] - - #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. - DESCRIPTOR = None - - def __deepcopy__(self, memo=None): - clone = type(self)() - clone.MergeFrom(self) - return clone - - def __eq__(self, other_msg): - """Recursively compares two messages by value and structure.""" - raise NotImplementedError - - def __ne__(self, other_msg): - # Can't just say self != other_msg, since that would infinitely recurse. :) - return not self == other_msg - - def __hash__(self): - raise TypeError('unhashable object') - - def __str__(self): - """Outputs a human-readable representation of the message.""" - raise NotImplementedError - - def __unicode__(self): - """Outputs a human-readable representation of the message.""" - raise NotImplementedError - - def MergeFrom(self, other_msg): - """Merges the contents of the specified message into current message. - - This method merges the contents of the specified message into the current - message. Singular fields that are set in the specified message overwrite - the corresponding fields in the current message. Repeated fields are - appended. Singular sub-messages and groups are recursively merged. - - Args: - other_msg (Message): A message to merge into the current message. - """ - raise NotImplementedError - - def CopyFrom(self, other_msg): - """Copies the content of the specified message into the current message. - - The method clears the current message and then merges the specified - message using MergeFrom. - - Args: - other_msg (Message): A message to copy into the current one. - """ - if self is other_msg: - return - self.Clear() - self.MergeFrom(other_msg) - - def Clear(self): - """Clears all data that was set in the message.""" - raise NotImplementedError - - def SetInParent(self): - """Mark this as present in the parent. - - This normally happens automatically when you assign a field of a - sub-message, but sometimes you want to make the sub-message - present while keeping it empty. If you find yourself using this, - you may want to reconsider your design. - """ - raise NotImplementedError - - def IsInitialized(self): - """Checks if the message is initialized. - - Returns: - bool: The method returns True if the message is initialized (i.e. all of - its required fields are set). - """ - raise NotImplementedError - - # TODO(robinson): MergeFromString() should probably return None and be - # implemented in terms of a helper that returns the # of bytes read. Our - # deserialization routines would use the helper when recursively - # deserializing, but the end user would almost always just want the no-return - # MergeFromString(). - - def MergeFromString(self, serialized): - """Merges serialized protocol buffer data into this message. - - When we find a field in `serialized` that is already present - in this message: - - - If it's a "repeated" field, we append to the end of our list. - - Else, if it's a scalar, we overwrite our field. - - Else, (it's a nonrepeated composite), we recursively merge - into the existing composite. - - Args: - serialized (bytes): Any object that allows us to call - ``memoryview(serialized)`` to access a string of bytes using the - buffer interface. - - Returns: - int: The number of bytes read from `serialized`. - For non-group messages, this will always be `len(serialized)`, - but for messages which are actually groups, this will - generally be less than `len(serialized)`, since we must - stop when we reach an ``END_GROUP`` tag. Note that if - we *do* stop because of an ``END_GROUP`` tag, the number - of bytes returned does not include the bytes - for the ``END_GROUP`` tag information. - - Raises: - DecodeError: if the input cannot be parsed. - """ - # TODO(robinson): Document handling of unknown fields. - # TODO(robinson): When we switch to a helper, this will return None. - raise NotImplementedError - - def ParseFromString(self, serialized): - """Parse serialized protocol buffer data into this message. - - Like :func:`MergeFromString()`, except we clear the object first. - - Raises: - message.DecodeError if the input cannot be parsed. - """ - self.Clear() - return self.MergeFromString(serialized) - - def SerializeToString(self, **kwargs): - """Serializes the protocol message to a binary string. - - Keyword Args: - deterministic (bool): If true, requests deterministic serialization - of the protobuf, with predictable ordering of map keys. - - Returns: - A binary string representation of the message if all of the required - fields in the message are set (i.e. the message is initialized). - - Raises: - EncodeError: if the message isn't initialized (see :func:`IsInitialized`). - """ - raise NotImplementedError - - def SerializePartialToString(self, **kwargs): - """Serializes the protocol message to a binary string. - - This method is similar to SerializeToString but doesn't check if the - message is initialized. - - Keyword Args: - deterministic (bool): If true, requests deterministic serialization - of the protobuf, with predictable ordering of map keys. - - Returns: - bytes: A serialized representation of the partial message. - """ - raise NotImplementedError - - # TODO(robinson): Decide whether we like these better - # than auto-generated has_foo() and clear_foo() methods - # on the instances themselves. This way is less consistent - # with C++, but it makes reflection-type access easier and - # reduces the number of magically autogenerated things. - # - # TODO(robinson): Be sure to document (and test) exactly - # which field names are accepted here. Are we case-sensitive? - # What do we do with fields that share names with Python keywords - # like 'lambda' and 'yield'? - # - # nnorwitz says: - # """ - # Typically (in python), an underscore is appended to names that are - # keywords. So they would become lambda_ or yield_. - # """ - def ListFields(self): - """Returns a list of (FieldDescriptor, value) tuples for present fields. - - A message field is non-empty if HasField() would return true. A singular - primitive field is non-empty if HasField() would return true in proto2 or it - is non zero in proto3. A repeated field is non-empty if it contains at least - one element. The fields are ordered by field number. - - Returns: - list[tuple(FieldDescriptor, value)]: field descriptors and values - for all fields in the message which are not empty. The values vary by - field type. - """ - raise NotImplementedError - - def HasField(self, field_name): - """Checks if a certain field is set for the message. - - For a oneof group, checks if any field inside is set. Note that if the - field_name is not defined in the message descriptor, :exc:`ValueError` will - be raised. - - Args: - field_name (str): The name of the field to check for presence. - - Returns: - bool: Whether a value has been set for the named field. - - Raises: - ValueError: if the `field_name` is not a member of this message. - """ - raise NotImplementedError - - def ClearField(self, field_name): - """Clears the contents of a given field. - - Inside a oneof group, clears the field set. If the name neither refers to a - defined field or oneof group, :exc:`ValueError` is raised. - - Args: - field_name (str): The name of the field to check for presence. - - Raises: - ValueError: if the `field_name` is not a member of this message. - """ - raise NotImplementedError - - def WhichOneof(self, oneof_group): - """Returns the name of the field that is set inside a oneof group. - - If no field is set, returns None. - - Args: - oneof_group (str): the name of the oneof group to check. - - Returns: - str or None: The name of the group that is set, or None. - - Raises: - ValueError: no group with the given name exists - """ - raise NotImplementedError - - def HasExtension(self, extension_handle): - """Checks if a certain extension is present for this message. - - Extensions are retrieved using the :attr:`Extensions` mapping (if present). - - Args: - extension_handle: The handle for the extension to check. - - Returns: - bool: Whether the extension is present for this message. - - Raises: - KeyError: if the extension is repeated. Similar to repeated fields, - there is no separate notion of presence: a "not present" repeated - extension is an empty list. - """ - raise NotImplementedError - - def ClearExtension(self, extension_handle): - """Clears the contents of a given extension. - - Args: - extension_handle: The handle for the extension to clear. - """ - raise NotImplementedError - - def UnknownFields(self): - """Returns the UnknownFieldSet. - - Returns: - UnknownFieldSet: The unknown fields stored in this message. - """ - raise NotImplementedError - - def DiscardUnknownFields(self): - """Clears all fields in the :class:`UnknownFieldSet`. - - This operation is recursive for nested message. - """ - raise NotImplementedError - - def ByteSize(self): - """Returns the serialized size of this message. - - Recursively calls ByteSize() on all contained messages. - - Returns: - int: The number of bytes required to serialize this message. - """ - raise NotImplementedError - - @classmethod - def FromString(cls, s): - raise NotImplementedError - - @staticmethod - def RegisterExtension(extension_handle): - raise NotImplementedError - - def _SetListener(self, message_listener): - """Internal method used by the protocol message implementation. - Clients should not call this directly. - - Sets a listener that this message will call on certain state transitions. - - The purpose of this method is to register back-edges from children to - parents at runtime, for the purpose of setting "has" bits and - byte-size-dirty bits in the parent and ancestor objects whenever a child or - descendant object is modified. - - If the client wants to disconnect this Message from the object tree, she - explicitly sets callback to None. - - If message_listener is None, unregisters any existing listener. Otherwise, - message_listener must implement the MessageListener interface in - internal/message_listener.py, and we discard any listener registered - via a previous _SetListener() call. - """ - raise NotImplementedError - - def __getstate__(self): - """Support the pickle protocol.""" - return dict(serialized=self.SerializePartialToString()) - - def __setstate__(self, state): - """Support the pickle protocol.""" - self.__init__() - serialized = state['serialized'] - # On Python 3, using encoding='latin1' is required for unpickling - # protos pickled by Python 2. - if not isinstance(serialized, bytes): - serialized = serialized.encode('latin1') - self.ParseFromString(serialized) - - def __reduce__(self): - message_descriptor = self.DESCRIPTOR - if message_descriptor.containing_type is None: - return type(self), (), self.__getstate__() - # the message type must be nested. - # Python does not pickle nested classes; use the symbol_database on the - # receiving end. - container = message_descriptor - return (_InternalConstructMessage, (container.full_name,), - self.__getstate__()) - - -def _InternalConstructMessage(full_name): - """Constructs a nested message.""" - from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top - - return symbol_database.Default().GetSymbol(full_name)() diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/message_factory.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/message_factory.py deleted file mode 100644 index 3656fa6874..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/message_factory.py +++ /dev/null @@ -1,185 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides a factory class for generating dynamic messages. - -The easiest way to use this class is if you have access to the FileDescriptor -protos containing the messages you want to create you can just do the following: - -message_classes = message_factory.GetMessages(iterable_of_file_descriptors) -my_proto_instance = message_classes['some.proto.package.MessageName']() -""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -from google.protobuf.internal import api_implementation -from google.protobuf import descriptor_pool -from google.protobuf import message - -if api_implementation.Type() == 'cpp': - from google.protobuf.pyext import cpp_message as message_impl -else: - from google.protobuf.internal import python_message as message_impl - - -# The type of all Message classes. -_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType - - -class MessageFactory(object): - """Factory for creating Proto2 messages from descriptors in a pool.""" - - def __init__(self, pool=None): - """Initializes a new factory.""" - self.pool = pool or descriptor_pool.DescriptorPool() - - # local cache of all classes built from protobuf descriptors - self._classes = {} - - def GetPrototype(self, descriptor): - """Obtains a proto2 message class based on the passed in descriptor. - - Passing a descriptor with a fully qualified name matching a previous - invocation will cause the same class to be returned. - - Args: - descriptor: The descriptor to build from. - - Returns: - A class describing the passed in descriptor. - """ - if descriptor not in self._classes: - result_class = self.CreatePrototype(descriptor) - # The assignment to _classes is redundant for the base implementation, but - # might avoid confusion in cases where CreatePrototype gets overridden and - # does not call the base implementation. - self._classes[descriptor] = result_class - return result_class - return self._classes[descriptor] - - def CreatePrototype(self, descriptor): - """Builds a proto2 message class based on the passed in descriptor. - - Don't call this function directly, it always creates a new class. Call - GetPrototype() instead. This method is meant to be overridden in subblasses - to perform additional operations on the newly constructed class. - - Args: - descriptor: The descriptor to build from. - - Returns: - A class describing the passed in descriptor. - """ - descriptor_name = descriptor.name - result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( - descriptor_name, - (message.Message,), - { - 'DESCRIPTOR': descriptor, - # If module not set, it wrongly points to message_factory module. - '__module__': None, - }) - result_class._FACTORY = self # pylint: disable=protected-access - # Assign in _classes before doing recursive calls to avoid infinite - # recursion. - self._classes[descriptor] = result_class - for field in descriptor.fields: - if field.message_type: - self.GetPrototype(field.message_type) - for extension in result_class.DESCRIPTOR.extensions: - if extension.containing_type not in self._classes: - self.GetPrototype(extension.containing_type) - extended_class = self._classes[extension.containing_type] - extended_class.RegisterExtension(extension) - return result_class - - def GetMessages(self, files): - """Gets all the messages from a specified file. - - This will find and resolve dependencies, failing if the descriptor - pool cannot satisfy them. - - Args: - files: The file names to extract messages from. - - Returns: - A dictionary mapping proto names to the message classes. This will include - any dependent messages as well as any messages defined in the same file as - a specified message. - """ - result = {} - for file_name in files: - file_desc = self.pool.FindFileByName(file_name) - for desc in file_desc.message_types_by_name.values(): - result[desc.full_name] = self.GetPrototype(desc) - - # While the extension FieldDescriptors are created by the descriptor pool, - # the python classes created in the factory need them to be registered - # explicitly, which is done below. - # - # The call to RegisterExtension will specifically check if the - # extension was already registered on the object and either - # ignore the registration if the original was the same, or raise - # an error if they were different. - - for extension in file_desc.extensions_by_name.values(): - if extension.containing_type not in self._classes: - self.GetPrototype(extension.containing_type) - extended_class = self._classes[extension.containing_type] - extended_class.RegisterExtension(extension) - return result - - -_FACTORY = MessageFactory() - - -def GetMessages(file_protos): - """Builds a dictionary of all the messages available in a set of files. - - Args: - file_protos: Iterable of FileDescriptorProto to build messages out of. - - Returns: - A dictionary mapping proto names to the message classes. This will include - any dependent messages as well as any messages defined in the same file as - a specified message. - """ - # The cpp implementation of the protocol buffer library requires to add the - # message in topological order of the dependency graph. - file_by_name = {file_proto.name: file_proto for file_proto in file_protos} - def _AddFile(file_proto): - for dependency in file_proto.dependency: - if dependency in file_by_name: - # Remove from elements to be visited, in order to cut cycles. - _AddFile(file_by_name.pop(dependency)) - _FACTORY.pool.Add(file_proto) - while file_by_name: - _AddFile(file_by_name.popitem()[1]) - return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/proto_builder.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/proto_builder.py deleted file mode 100644 index a4667ce63e..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/proto_builder.py +++ /dev/null @@ -1,134 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Dynamic Protobuf class creator.""" - -from collections import OrderedDict -import hashlib -import os - -from google.protobuf import descriptor_pb2 -from google.protobuf import descriptor -from google.protobuf import message_factory - - -def _GetMessageFromFactory(factory, full_name): - """Get a proto class from the MessageFactory by name. - - Args: - factory: a MessageFactory instance. - full_name: str, the fully qualified name of the proto type. - Returns: - A class, for the type identified by full_name. - Raises: - KeyError, if the proto is not found in the factory's descriptor pool. - """ - proto_descriptor = factory.pool.FindMessageTypeByName(full_name) - proto_cls = factory.GetPrototype(proto_descriptor) - return proto_cls - - -def MakeSimpleProtoClass(fields, full_name=None, pool=None): - """Create a Protobuf class whose fields are basic types. - - Note: this doesn't validate field names! - - Args: - fields: dict of {name: field_type} mappings for each field in the proto. If - this is an OrderedDict the order will be maintained, otherwise the - fields will be sorted by name. - full_name: optional str, the fully-qualified name of the proto type. - pool: optional DescriptorPool instance. - Returns: - a class, the new protobuf class with a FileDescriptor. - """ - factory = message_factory.MessageFactory(pool=pool) - - if full_name is not None: - try: - proto_cls = _GetMessageFromFactory(factory, full_name) - return proto_cls - except KeyError: - # The factory's DescriptorPool doesn't know about this class yet. - pass - - # Get a list of (name, field_type) tuples from the fields dict. If fields was - # an OrderedDict we keep the order, but otherwise we sort the field to ensure - # consistent ordering. - field_items = fields.items() - if not isinstance(fields, OrderedDict): - field_items = sorted(field_items) - - # Use a consistent file name that is unlikely to conflict with any imported - # proto files. - fields_hash = hashlib.sha1() - for f_name, f_type in field_items: - fields_hash.update(f_name.encode('utf-8')) - fields_hash.update(str(f_type).encode('utf-8')) - proto_file_name = fields_hash.hexdigest() + '.proto' - - # If the proto is anonymous, use the same hash to name it. - if full_name is None: - full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + - fields_hash.hexdigest()) - try: - proto_cls = _GetMessageFromFactory(factory, full_name) - return proto_cls - except KeyError: - # The factory's DescriptorPool doesn't know about this class yet. - pass - - # This is the first time we see this proto: add a new descriptor to the pool. - factory.pool.Add( - _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) - return _GetMessageFromFactory(factory, full_name) - - -def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): - """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" - package, name = full_name.rsplit('.', 1) - file_proto = descriptor_pb2.FileDescriptorProto() - file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) - file_proto.package = package - desc_proto = file_proto.message_type.add() - desc_proto.name = name - for f_number, (f_name, f_type) in enumerate(field_items, 1): - field_proto = desc_proto.field.add() - field_proto.name = f_name - # # If the number falls in the reserved range, reassign it to the correct - # # number after the range. - if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: - f_number += ( - descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - - descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) - field_proto.number = f_number - field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL - field_proto.type = f_type - return file_proto diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/__init__.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/cpp_message.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/cpp_message.py deleted file mode 100644 index fc8eb32d79..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/cpp_message.py +++ /dev/null @@ -1,65 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Protocol message implementation hooks for C++ implementation. - -Contains helper functions used to create protocol message classes from -Descriptor objects at runtime backed by the protocol buffer C++ API. -""" - -__author__ = 'tibell@google.com (Johan Tibell)' - -from google.protobuf.pyext import _message - - -class GeneratedProtocolMessageType(_message.MessageMeta): - - """Metaclass for protocol message classes created at runtime from Descriptors. - - The protocol compiler currently uses this metaclass to create protocol - message classes at runtime. Clients can also manually create their own - classes at runtime, as in this example: - - mydescriptor = Descriptor(.....) - factory = symbol_database.Default() - factory.pool.AddDescriptor(mydescriptor) - MyProtoClass = factory.GetPrototype(mydescriptor) - myproto_instance = MyProtoClass() - myproto.foo_field = 23 - ... - - The above example will not work for nested types. If you wish to include them, - use reflection.MakeClass() instead of manually instantiating the class in - order to create the appropriate class structure. - """ - - # Must be consistent with the protocol-compiler code in - # proto2/compiler/internal/generator.*. - _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/python_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/python_pb2.py deleted file mode 100644 index 2c6ecf4c98..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/pyext/python_pb2.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/pyext/python.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestAllExtensions.RegisterExtension(optional_nested_message_extension) - TestAllExtensions.RegisterExtension(repeated_nested_message_extension) - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'H\001' - _TESTALLTYPES._serialized_start=72 - _TESTALLTYPES._serialized_end=388 - _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 - _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 - _FOREIGNMESSAGE._serialized_start=390 - _FOREIGNMESSAGE._serialized_end=428 - _TESTALLEXTENSIONS._serialized_start=430 - _TESTALLEXTENSIONS._serialized_end=459 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/reflection.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/reflection.py deleted file mode 100644 index 81e18859a8..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/reflection.py +++ /dev/null @@ -1,95 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This code is meant to work on Python 2.4 and above only. - -"""Contains a metaclass and helper functions used to create -protocol message classes from Descriptor objects at runtime. - -Recall that a metaclass is the "type" of a class. -(A class is to a metaclass what an instance is to a class.) - -In this case, we use the GeneratedProtocolMessageType metaclass -to inject all the useful functionality into the classes -output by the protocol compiler at compile-time. - -The upshot of all this is that the real implementation -details for ALL pure-Python protocol buffers are *here in -this file*. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - - -from google.protobuf import message_factory -from google.protobuf import symbol_database - -# The type of all Message classes. -# Part of the public interface, but normally only used by message factories. -GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE - -MESSAGE_CLASS_CACHE = {} - - -# Deprecated. Please NEVER use reflection.ParseMessage(). -def ParseMessage(descriptor, byte_str): - """Generate a new Message instance from this Descriptor and a byte string. - - DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). - Please use MessageFactory.GetPrototype() instead. - - Args: - descriptor: Protobuf Descriptor object - byte_str: Serialized protocol buffer byte string - - Returns: - Newly created protobuf Message object. - """ - result_class = MakeClass(descriptor) - new_msg = result_class() - new_msg.ParseFromString(byte_str) - return new_msg - - -# Deprecated. Please NEVER use reflection.MakeClass(). -def MakeClass(descriptor): - """Construct a class object for a protobuf described by descriptor. - - DEPRECATED: use MessageFactory.GetPrototype() instead. - - Args: - descriptor: A descriptor.Descriptor object describing the protobuf. - Returns: - The Message class object described by the descriptor. - """ - # Original implementation leads to duplicate message classes, which won't play - # well with extensions. Message factory info is also missing. - # Redirect to message_factory. - return symbol_database.Default().GetPrototype(descriptor) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/service.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/service.py deleted file mode 100644 index 5625246324..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/service.py +++ /dev/null @@ -1,228 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""DEPRECATED: Declares the RPC service interfaces. - -This module declares the abstract interfaces underlying proto2 RPC -services. These are intended to be independent of any particular RPC -implementation, so that proto2 services can be used on top of a variety -of implementations. Starting with version 2.3.0, RPC implementations should -not try to build on these, but should instead provide code generator plugins -which generate code specific to the particular RPC implementation. This way -the generated code can be more appropriate for the implementation in use -and can avoid unnecessary layers of indirection. -""" - -__author__ = 'petar@google.com (Petar Petrov)' - - -class RpcException(Exception): - """Exception raised on failed blocking RPC method call.""" - pass - - -class Service(object): - - """Abstract base interface for protocol-buffer-based RPC services. - - Services themselves are abstract classes (implemented either by servers or as - stubs), but they subclass this base interface. The methods of this - interface can be used to call the methods of the service without knowing - its exact type at compile time (analogous to the Message interface). - """ - - def GetDescriptor(): - """Retrieves this service's descriptor.""" - raise NotImplementedError - - def CallMethod(self, method_descriptor, rpc_controller, - request, done): - """Calls a method of the service specified by method_descriptor. - - If "done" is None then the call is blocking and the response - message will be returned directly. Otherwise the call is asynchronous - and "done" will later be called with the response value. - - In the blocking case, RpcException will be raised on error. - - Preconditions: - - * method_descriptor.service == GetDescriptor - * request is of the exact same classes as returned by - GetRequestClass(method). - * After the call has started, the request must not be modified. - * "rpc_controller" is of the correct type for the RPC implementation being - used by this Service. For stubs, the "correct type" depends on the - RpcChannel which the stub is using. - - Postconditions: - - * "done" will be called when the method is complete. This may be - before CallMethod() returns or it may be at some point in the future. - * If the RPC failed, the response value passed to "done" will be None. - Further details about the failure can be found by querying the - RpcController. - """ - raise NotImplementedError - - def GetRequestClass(self, method_descriptor): - """Returns the class of the request message for the specified method. - - CallMethod() requires that the request is of a particular subclass of - Message. GetRequestClass() gets the default instance of this required - type. - - Example: - method = service.GetDescriptor().FindMethodByName("Foo") - request = stub.GetRequestClass(method)() - request.ParseFromString(input) - service.CallMethod(method, request, callback) - """ - raise NotImplementedError - - def GetResponseClass(self, method_descriptor): - """Returns the class of the response message for the specified method. - - This method isn't really needed, as the RpcChannel's CallMethod constructs - the response protocol message. It's provided anyway in case it is useful - for the caller to know the response type in advance. - """ - raise NotImplementedError - - -class RpcController(object): - - """An RpcController mediates a single method call. - - The primary purpose of the controller is to provide a way to manipulate - settings specific to the RPC implementation and to find out about RPC-level - errors. The methods provided by the RpcController interface are intended - to be a "least common denominator" set of features which we expect all - implementations to support. Specific implementations may provide more - advanced features (e.g. deadline propagation). - """ - - # Client-side methods below - - def Reset(self): - """Resets the RpcController to its initial state. - - After the RpcController has been reset, it may be reused in - a new call. Must not be called while an RPC is in progress. - """ - raise NotImplementedError - - def Failed(self): - """Returns true if the call failed. - - After a call has finished, returns true if the call failed. The possible - reasons for failure depend on the RPC implementation. Failed() must not - be called before a call has finished. If Failed() returns true, the - contents of the response message are undefined. - """ - raise NotImplementedError - - def ErrorText(self): - """If Failed is true, returns a human-readable description of the error.""" - raise NotImplementedError - - def StartCancel(self): - """Initiate cancellation. - - Advises the RPC system that the caller desires that the RPC call be - canceled. The RPC system may cancel it immediately, may wait awhile and - then cancel it, or may not even cancel the call at all. If the call is - canceled, the "done" callback will still be called and the RpcController - will indicate that the call failed at that time. - """ - raise NotImplementedError - - # Server-side methods below - - def SetFailed(self, reason): - """Sets a failure reason. - - Causes Failed() to return true on the client side. "reason" will be - incorporated into the message returned by ErrorText(). If you find - you need to return machine-readable information about failures, you - should incorporate it into your response protocol buffer and should - NOT call SetFailed(). - """ - raise NotImplementedError - - def IsCanceled(self): - """Checks if the client cancelled the RPC. - - If true, indicates that the client canceled the RPC, so the server may - as well give up on replying to it. The server should still call the - final "done" callback. - """ - raise NotImplementedError - - def NotifyOnCancel(self, callback): - """Sets a callback to invoke on cancel. - - Asks that the given callback be called when the RPC is canceled. The - callback will always be called exactly once. If the RPC completes without - being canceled, the callback will be called after completion. If the RPC - has already been canceled when NotifyOnCancel() is called, the callback - will be called immediately. - - NotifyOnCancel() must be called no more than once per request. - """ - raise NotImplementedError - - -class RpcChannel(object): - - """Abstract interface for an RPC channel. - - An RpcChannel represents a communication line to a service which can be used - to call that service's methods. The service may be running on another - machine. Normally, you should not use an RpcChannel directly, but instead - construct a stub {@link Service} wrapping it. Example: - - Example: - RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") - RpcController controller = rpcImpl.Controller() - MyService service = MyService_Stub(channel) - service.MyMethod(controller, request, callback) - """ - - def CallMethod(self, method_descriptor, rpc_controller, - request, response_class, done): - """Calls the method identified by the descriptor. - - Call the given method of the remote service. The signature of this - procedure looks the same as Service.CallMethod(), but the requirements - are less strict in one important way: the request object doesn't have to - be of any specific class as long as its descriptor is method.input_type. - """ - raise NotImplementedError diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/service_reflection.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/service_reflection.py deleted file mode 100644 index f82ab7145a..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/service_reflection.py +++ /dev/null @@ -1,295 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains metaclasses used to create protocol service and service stub -classes from ServiceDescriptor objects at runtime. - -The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to -inject all useful functionality into the classes output by the protocol -compiler at compile-time. -""" - -__author__ = 'petar@google.com (Petar Petrov)' - - -class GeneratedServiceType(type): - - """Metaclass for service classes created at runtime from ServiceDescriptors. - - Implementations for all methods described in the Service class are added here - by this class. We also create properties to allow getting/setting all fields - in the protocol message. - - The protocol compiler currently uses this metaclass to create protocol service - classes at runtime. Clients can also manually create their own classes at - runtime, as in this example:: - - mydescriptor = ServiceDescriptor(.....) - class MyProtoService(service.Service): - __metaclass__ = GeneratedServiceType - DESCRIPTOR = mydescriptor - myservice_instance = MyProtoService() - # ... - """ - - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __init__(cls, name, bases, dictionary): - """Creates a message service class. - - Args: - name: Name of the class (ignored, but required by the metaclass - protocol). - bases: Base classes of the class being constructed. - dictionary: The class dictionary of the class being constructed. - dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object - describing this protocol service type. - """ - # Don't do anything if this class doesn't have a descriptor. This happens - # when a service class is subclassed. - if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: - return - - descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] - service_builder = _ServiceBuilder(descriptor) - service_builder.BuildService(cls) - cls.DESCRIPTOR = descriptor - - -class GeneratedServiceStubType(GeneratedServiceType): - - """Metaclass for service stubs created at runtime from ServiceDescriptors. - - This class has similar responsibilities as GeneratedServiceType, except that - it creates the service stub classes. - """ - - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __init__(cls, name, bases, dictionary): - """Creates a message service stub class. - - Args: - name: Name of the class (ignored, here). - bases: Base classes of the class being constructed. - dictionary: The class dictionary of the class being constructed. - dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object - describing this protocol service type. - """ - super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) - # Don't do anything if this class doesn't have a descriptor. This happens - # when a service stub is subclassed. - if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: - return - - descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] - service_stub_builder = _ServiceStubBuilder(descriptor) - service_stub_builder.BuildServiceStub(cls) - - -class _ServiceBuilder(object): - - """This class constructs a protocol service class using a service descriptor. - - Given a service descriptor, this class constructs a class that represents - the specified service descriptor. One service builder instance constructs - exactly one service class. That means all instances of that class share the - same builder. - """ - - def __init__(self, service_descriptor): - """Initializes an instance of the service class builder. - - Args: - service_descriptor: ServiceDescriptor to use when constructing the - service class. - """ - self.descriptor = service_descriptor - - def BuildService(builder, cls): - """Constructs the service class. - - Args: - cls: The class that will be constructed. - """ - - # CallMethod needs to operate with an instance of the Service class. This - # internal wrapper function exists only to be able to pass the service - # instance to the method that does the real CallMethod work. - # Making sure to use exact argument names from the abstract interface in - # service.py to match the type signature - def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): - return builder._CallMethod(self, method_descriptor, rpc_controller, - request, done) - - def _WrapGetRequestClass(self, method_descriptor): - return builder._GetRequestClass(method_descriptor) - - def _WrapGetResponseClass(self, method_descriptor): - return builder._GetResponseClass(method_descriptor) - - builder.cls = cls - cls.CallMethod = _WrapCallMethod - cls.GetDescriptor = staticmethod(lambda: builder.descriptor) - cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' - cls.GetRequestClass = _WrapGetRequestClass - cls.GetResponseClass = _WrapGetResponseClass - for method in builder.descriptor.methods: - setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) - - def _CallMethod(self, srvc, method_descriptor, - rpc_controller, request, callback): - """Calls the method described by a given method descriptor. - - Args: - srvc: Instance of the service for which this method is called. - method_descriptor: Descriptor that represent the method to call. - rpc_controller: RPC controller to use for this method's execution. - request: Request protocol message. - callback: A callback to invoke after the method has completed. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'CallMethod() given method descriptor for wrong service type.') - method = getattr(srvc, method_descriptor.name) - return method(rpc_controller, request, callback) - - def _GetRequestClass(self, method_descriptor): - """Returns the class of the request protocol message. - - Args: - method_descriptor: Descriptor of the method for which to return the - request protocol message class. - - Returns: - A class that represents the input protocol message of the specified - method. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'GetRequestClass() given method descriptor for wrong service type.') - return method_descriptor.input_type._concrete_class - - def _GetResponseClass(self, method_descriptor): - """Returns the class of the response protocol message. - - Args: - method_descriptor: Descriptor of the method for which to return the - response protocol message class. - - Returns: - A class that represents the output protocol message of the specified - method. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'GetResponseClass() given method descriptor for wrong service type.') - return method_descriptor.output_type._concrete_class - - def _GenerateNonImplementedMethod(self, method): - """Generates and returns a method that can be set for a service methods. - - Args: - method: Descriptor of the service method for which a method is to be - generated. - - Returns: - A method that can be added to the service class. - """ - return lambda inst, rpc_controller, request, callback: ( - self._NonImplementedMethod(method.name, rpc_controller, callback)) - - def _NonImplementedMethod(self, method_name, rpc_controller, callback): - """The body of all methods in the generated service class. - - Args: - method_name: Name of the method being executed. - rpc_controller: RPC controller used to execute this method. - callback: A callback which will be invoked when the method finishes. - """ - rpc_controller.SetFailed('Method %s not implemented.' % method_name) - callback(None) - - -class _ServiceStubBuilder(object): - - """Constructs a protocol service stub class using a service descriptor. - - Given a service descriptor, this class constructs a suitable stub class. - A stub is just a type-safe wrapper around an RpcChannel which emulates a - local implementation of the service. - - One service stub builder instance constructs exactly one class. It means all - instances of that class share the same service stub builder. - """ - - def __init__(self, service_descriptor): - """Initializes an instance of the service stub class builder. - - Args: - service_descriptor: ServiceDescriptor to use when constructing the - stub class. - """ - self.descriptor = service_descriptor - - def BuildServiceStub(self, cls): - """Constructs the stub class. - - Args: - cls: The class that will be constructed. - """ - - def _ServiceStubInit(stub, rpc_channel): - stub.rpc_channel = rpc_channel - self.cls = cls - cls.__init__ = _ServiceStubInit - for method in self.descriptor.methods: - setattr(cls, method.name, self._GenerateStubMethod(method)) - - def _GenerateStubMethod(self, method): - return (lambda inst, rpc_controller, request, callback=None: - self._StubMethod(inst, method, rpc_controller, request, callback)) - - def _StubMethod(self, stub, method_descriptor, - rpc_controller, request, callback): - """The body of all service methods in the generated stub class. - - Args: - stub: Stub instance. - method_descriptor: Descriptor of the invoked method. - rpc_controller: Rpc controller to execute the method. - request: Request protocol message. - callback: A callback to execute when the method finishes. - Returns: - Response message (in case of blocking call). - """ - return stub.rpc_channel.CallMethod( - method_descriptor, rpc_controller, request, - method_descriptor.output_type._concrete_class, callback) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/source_context_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/source_context_pb2.py deleted file mode 100644 index 30cca2e06e..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/source_context_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/source_context.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _SOURCECONTEXT._serialized_start=57 - _SOURCECONTEXT._serialized_end=91 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/struct_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/struct_pb2.py deleted file mode 100644 index 149728ca08..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/struct_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/struct.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _STRUCT_FIELDSENTRY._options = None - _STRUCT_FIELDSENTRY._serialized_options = b'8\001' - _NULLVALUE._serialized_start=474 - _NULLVALUE._serialized_end=501 - _STRUCT._serialized_start=50 - _STRUCT._serialized_end=182 - _STRUCT_FIELDSENTRY._serialized_start=113 - _STRUCT_FIELDSENTRY._serialized_end=182 - _VALUE._serialized_start=185 - _VALUE._serialized_end=419 - _LISTVALUE._serialized_start=421 - _LISTVALUE._serialized_end=472 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/symbol_database.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/symbol_database.py deleted file mode 100644 index fdcf8cf06c..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/symbol_database.py +++ /dev/null @@ -1,194 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""A database of Python protocol buffer generated symbols. - -SymbolDatabase is the MessageFactory for messages generated at compile time, -and makes it easy to create new instances of a registered type, given only the -type's protocol buffer symbol name. - -Example usage:: - - db = symbol_database.SymbolDatabase() - - # Register symbols of interest, from one or multiple files. - db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) - db.RegisterMessage(my_proto_pb2.MyMessage) - db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) - - # The database can be used as a MessageFactory, to generate types based on - # their name: - types = db.GetMessages(['my_proto.proto']) - my_message_instance = types['MyMessage']() - - # The database's underlying descriptor pool can be queried, so it's not - # necessary to know a type's filename to be able to generate it: - filename = db.pool.FindFileContainingSymbol('MyMessage') - my_message_instance = db.GetMessages([filename])['MyMessage']() - - # This functionality is also provided directly via a convenience method: - my_message_instance = db.GetSymbol('MyMessage')() -""" - - -from google.protobuf.internal import api_implementation -from google.protobuf import descriptor_pool -from google.protobuf import message_factory - - -class SymbolDatabase(message_factory.MessageFactory): - """A database of Python generated symbols.""" - - def RegisterMessage(self, message): - """Registers the given message type in the local database. - - Calls to GetSymbol() and GetMessages() will return messages registered here. - - Args: - message: A :class:`google.protobuf.message.Message` subclass (or - instance); its descriptor will be registered. - - Returns: - The provided message. - """ - - desc = message.DESCRIPTOR - self._classes[desc] = message - self.RegisterMessageDescriptor(desc) - return message - - def RegisterMessageDescriptor(self, message_descriptor): - """Registers the given message descriptor in the local database. - - Args: - message_descriptor (Descriptor): the message descriptor to add. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddDescriptor(message_descriptor) - - def RegisterEnumDescriptor(self, enum_descriptor): - """Registers the given enum descriptor in the local database. - - Args: - enum_descriptor (EnumDescriptor): The enum descriptor to register. - - Returns: - EnumDescriptor: The provided descriptor. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddEnumDescriptor(enum_descriptor) - return enum_descriptor - - def RegisterServiceDescriptor(self, service_descriptor): - """Registers the given service descriptor in the local database. - - Args: - service_descriptor (ServiceDescriptor): the service descriptor to - register. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddServiceDescriptor(service_descriptor) - - def RegisterFileDescriptor(self, file_descriptor): - """Registers the given file descriptor in the local database. - - Args: - file_descriptor (FileDescriptor): The file descriptor to register. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._InternalAddFileDescriptor(file_descriptor) - - def GetSymbol(self, symbol): - """Tries to find a symbol in the local database. - - Currently, this method only returns message.Message instances, however, if - may be extended in future to support other symbol types. - - Args: - symbol (str): a protocol buffer symbol. - - Returns: - A Python class corresponding to the symbol. - - Raises: - KeyError: if the symbol could not be found. - """ - - return self._classes[self.pool.FindMessageTypeByName(symbol)] - - def GetMessages(self, files): - # TODO(amauryfa): Fix the differences with MessageFactory. - """Gets all registered messages from a specified file. - - Only messages already created and registered will be returned; (this is the - case for imported _pb2 modules) - But unlike MessageFactory, this version also returns already defined nested - messages, but does not register any message extensions. - - Args: - files (list[str]): The file names to extract messages from. - - Returns: - A dictionary mapping proto names to the message classes. - - Raises: - KeyError: if a file could not be found. - """ - - def _GetAllMessages(desc): - """Walk a message Descriptor and recursively yields all message names.""" - yield desc - for msg_desc in desc.nested_types: - for nested_desc in _GetAllMessages(msg_desc): - yield nested_desc - - result = {} - for file_name in files: - file_desc = self.pool.FindFileByName(file_name) - for msg_desc in file_desc.message_types_by_name.values(): - for desc in _GetAllMessages(msg_desc): - try: - result[desc.full_name] = self._classes[desc] - except KeyError: - # This descriptor has no registered class, skip it. - pass - return result - - -_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) - - -def Default(): - """Returns the default SymbolDatabase.""" - return _DEFAULT diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/text_encoding.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/text_encoding.py deleted file mode 100644 index 759cf11f62..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/text_encoding.py +++ /dev/null @@ -1,110 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Encoding related utilities.""" -import re - -_cescape_chr_to_symbol_map = {} -_cescape_chr_to_symbol_map[9] = r'\t' # optional escape -_cescape_chr_to_symbol_map[10] = r'\n' # optional escape -_cescape_chr_to_symbol_map[13] = r'\r' # optional escape -_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape -_cescape_chr_to_symbol_map[39] = r"\'" # optional escape -_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape - -# Lookup table for unicode -_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] -for byte, string in _cescape_chr_to_symbol_map.items(): - _cescape_unicode_to_str[byte] = string - -# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) -_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + - [chr(i) for i in range(32, 127)] + - [r'\%03o' % i for i in range(127, 256)]) -for byte, string in _cescape_chr_to_symbol_map.items(): - _cescape_byte_to_str[byte] = string -del byte, string - - -def CEscape(text, as_utf8): - # type: (...) -> str - """Escape a bytes string for use in an text protocol buffer. - - Args: - text: A byte string to be escaped. - as_utf8: Specifies if result may contain non-ASCII characters. - In Python 3 this allows unescaped non-ASCII Unicode characters. - In Python 2 the return value will be valid UTF-8 rather than only ASCII. - Returns: - Escaped string (str). - """ - # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not - # satisfy our needs; they encodes unprintable characters using two-digit hex - # escapes whereas our C++ unescaping function allows hex escapes to be any - # length. So, "\0011".encode('string_escape') ends up being "\\x011", which - # will be decoded in C++ as a single-character string with char code 0x11. - text_is_unicode = isinstance(text, str) - if as_utf8 and text_is_unicode: - # We're already unicode, no processing beyond control char escapes. - return text.translate(_cescape_chr_to_symbol_map) - ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. - if as_utf8: - return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) - return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) - - -_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') - - -def CUnescape(text): - # type: (str) -> bytes - """Unescape a text string with C-style escape sequences to UTF-8 bytes. - - Args: - text: The data to parse in a str. - Returns: - A byte string. - """ - - def ReplaceHex(m): - # Only replace the match if the number of leading back slashes is odd. i.e. - # the slash itself is not escaped. - if len(m.group(1)) & 1: - return m.group(1) + 'x0' + m.group(2) - return m.group(0) - - # This is required because the 'string_escape' encoding doesn't - # allow single-digit hex escapes (like '\xf'). - result = _CUNESCAPE_HEX.sub(ReplaceHex, text) - - return (result.encode('utf-8') # Make it bytes to allow decode. - .decode('unicode_escape') - # Make it bytes again to return the proper type. - .encode('raw_unicode_escape')) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/text_format.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/text_format.py deleted file mode 100644 index 412385c26f..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/text_format.py +++ /dev/null @@ -1,1795 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains routines for printing protocol messages in text format. - -Simple usage example:: - - # Create a proto object and serialize it to a text proto string. - message = my_proto_pb2.MyMessage(foo='bar') - text_proto = text_format.MessageToString(message) - - # Parse a text proto string. - message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) -""" - -__author__ = 'kenton@google.com (Kenton Varda)' - -# TODO(b/129989314) Import thread contention leads to test failures. -import encodings.raw_unicode_escape # pylint: disable=unused-import -import encodings.unicode_escape # pylint: disable=unused-import -import io -import math -import re - -from google.protobuf.internal import decoder -from google.protobuf.internal import type_checkers -from google.protobuf import descriptor -from google.protobuf import text_encoding - -# pylint: disable=g-import-not-at-top -__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', - 'PrintFieldValue', 'Merge', 'MessageToBytes'] - -_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), - type_checkers.Int32ValueChecker(), - type_checkers.Uint64ValueChecker(), - type_checkers.Int64ValueChecker()) -_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) -_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) -_QUOTES = frozenset(("'", '"')) -_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' - - -class Error(Exception): - """Top-level module error for text_format.""" - - -class ParseError(Error): - """Thrown in case of text parsing or tokenizing error.""" - - def __init__(self, message=None, line=None, column=None): - if message is not None and line is not None: - loc = str(line) - if column is not None: - loc += ':{0}'.format(column) - message = '{0} : {1}'.format(loc, message) - if message is not None: - super(ParseError, self).__init__(message) - else: - super(ParseError, self).__init__() - self._line = line - self._column = column - - def GetLine(self): - return self._line - - def GetColumn(self): - return self._column - - -class TextWriter(object): - - def __init__(self, as_utf8): - self._writer = io.StringIO() - - def write(self, val): - return self._writer.write(val) - - def close(self): - return self._writer.close() - - def getvalue(self): - return self._writer.getvalue() - - -def MessageToString( - message, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - indent=0, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - # type: (...) -> str - """Convert protobuf message to text format. - - Double values can be formatted compactly with 15 digits of - precision (which is the most that IEEE 754 "double" can guarantee) - using double_format='.15g'. To ensure that converting to text and back to a - proto will result in an identical value, double_format='.17g' should be used. - - Args: - message: The protocol buffers message. - as_utf8: Return unescaped Unicode for non-ASCII characters. - In Python 3 actual Unicode characters may appear as is in strings. - In Python 2 the return value will be valid UTF-8 rather than only ASCII. - as_one_line: Don't introduce newlines between fields. - use_short_repeated_primitives: Use short repeated format for primitives. - pointy_brackets: If True, use angle brackets instead of curly braces for - nesting. - use_index_order: If True, fields of a proto message will be printed using - the order defined in source code instead of the field number, extensions - will be printed at the end of the message and their relative order is - determined by the extension number. By default, use the field number - order. - float_format (str): If set, use this to specify float field formatting - (per the "Format Specification Mini-Language"); otherwise, shortest float - that has same value in wire will be printed. Also affect double field - if double_format is not set but float_format is set. - double_format (str): If set, use this to specify double field formatting - (per the "Format Specification Mini-Language"); if it is not set but - float_format is set, use float_format. Otherwise, use ``str()`` - use_field_number: If True, print field numbers instead of names. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - indent (int): The initial indent level, in terms of spaces, for pretty - print. - message_formatter (function(message, indent, as_one_line) -> unicode|None): - Custom formatter for selected sub-messages (usually based on message - type). Use to pretty print parts of the protobuf for easier diffing. - print_unknown_fields: If True, unknown fields will be printed. - force_colon: If set, a colon will be added after the field name even if the - field is a proto message. - - Returns: - str: A string of the text formatted protocol buffer message. - """ - out = TextWriter(as_utf8) - printer = _Printer( - out, - indent, - as_utf8, - as_one_line, - use_short_repeated_primitives, - pointy_brackets, - use_index_order, - float_format, - double_format, - use_field_number, - descriptor_pool, - message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintMessage(message) - result = out.getvalue() - out.close() - if as_one_line: - return result.rstrip() - return result - - -def MessageToBytes(message, **kwargs): - # type: (...) -> bytes - """Convert protobuf message to encoded text format. See MessageToString.""" - text = MessageToString(message, **kwargs) - if isinstance(text, bytes): - return text - codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' - return text.encode(codec) - - -def _IsMapEntry(field): - return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.message_type.has_options and - field.message_type.GetOptions().map_entry) - - -def PrintMessage(message, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - printer = _Printer( - out=out, indent=indent, as_utf8=as_utf8, - as_one_line=as_one_line, - use_short_repeated_primitives=use_short_repeated_primitives, - pointy_brackets=pointy_brackets, - use_index_order=use_index_order, - float_format=float_format, - double_format=double_format, - use_field_number=use_field_number, - descriptor_pool=descriptor_pool, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintMessage(message) - - -def PrintField(field, - value, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Print a single field name/value pair.""" - printer = _Printer(out, indent, as_utf8, as_one_line, - use_short_repeated_primitives, pointy_brackets, - use_index_order, float_format, double_format, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintField(field, value) - - -def PrintFieldValue(field, - value, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Print a single field value (not including name).""" - printer = _Printer(out, indent, as_utf8, as_one_line, - use_short_repeated_primitives, pointy_brackets, - use_index_order, float_format, double_format, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintFieldValue(field, value) - - -def _BuildMessageFromTypeName(type_name, descriptor_pool): - """Returns a protobuf message instance. - - Args: - type_name: Fully-qualified protobuf message type name string. - descriptor_pool: DescriptorPool instance. - - Returns: - A Message instance of type matching type_name, or None if the a Descriptor - wasn't found matching type_name. - """ - # pylint: disable=g-import-not-at-top - if descriptor_pool is None: - from google.protobuf import descriptor_pool as pool_mod - descriptor_pool = pool_mod.Default() - from google.protobuf import symbol_database - database = symbol_database.Default() - try: - message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) - except KeyError: - return None - message_type = database.GetPrototype(message_descriptor) - return message_type() - - -# These values must match WireType enum in google/protobuf/wire_format.h. -WIRETYPE_LENGTH_DELIMITED = 2 -WIRETYPE_START_GROUP = 3 - - -class _Printer(object): - """Text format printer for protocol message.""" - - def __init__( - self, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Initialize the Printer. - - Double values can be formatted compactly with 15 digits of precision - (which is the most that IEEE 754 "double" can guarantee) using - double_format='.15g'. To ensure that converting to text and back to a proto - will result in an identical value, double_format='.17g' should be used. - - Args: - out: To record the text format result. - indent: The initial indent level for pretty print. - as_utf8: Return unescaped Unicode for non-ASCII characters. - In Python 3 actual Unicode characters may appear as is in strings. - In Python 2 the return value will be valid UTF-8 rather than ASCII. - as_one_line: Don't introduce newlines between fields. - use_short_repeated_primitives: Use short repeated format for primitives. - pointy_brackets: If True, use angle brackets instead of curly braces for - nesting. - use_index_order: If True, print fields of a proto message using the order - defined in source code instead of the field number. By default, use the - field number order. - float_format: If set, use this to specify float field formatting - (per the "Format Specification Mini-Language"); otherwise, shortest - float that has same value in wire will be printed. Also affect double - field if double_format is not set but float_format is set. - double_format: If set, use this to specify double field formatting - (per the "Format Specification Mini-Language"); if it is not set but - float_format is set, use float_format. Otherwise, str() is used. - use_field_number: If True, print field numbers instead of names. - descriptor_pool: A DescriptorPool used to resolve Any types. - message_formatter: A function(message, indent, as_one_line): unicode|None - to custom format selected sub-messages (usually based on message type). - Use to pretty print parts of the protobuf for easier diffing. - print_unknown_fields: If True, unknown fields will be printed. - force_colon: If set, a colon will be added after the field name even if - the field is a proto message. - """ - self.out = out - self.indent = indent - self.as_utf8 = as_utf8 - self.as_one_line = as_one_line - self.use_short_repeated_primitives = use_short_repeated_primitives - self.pointy_brackets = pointy_brackets - self.use_index_order = use_index_order - self.float_format = float_format - if double_format is not None: - self.double_format = double_format - else: - self.double_format = float_format - self.use_field_number = use_field_number - self.descriptor_pool = descriptor_pool - self.message_formatter = message_formatter - self.print_unknown_fields = print_unknown_fields - self.force_colon = force_colon - - def _TryPrintAsAnyMessage(self, message): - """Serializes if message is a google.protobuf.Any field.""" - if '/' not in message.type_url: - return False - packed_message = _BuildMessageFromTypeName(message.TypeName(), - self.descriptor_pool) - if packed_message: - packed_message.MergeFromString(message.value) - colon = ':' if self.force_colon else '' - self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) - self._PrintMessageFieldValue(packed_message) - self.out.write(' ' if self.as_one_line else '\n') - return True - else: - return False - - def _TryCustomFormatMessage(self, message): - formatted = self.message_formatter(message, self.indent, self.as_one_line) - if formatted is None: - return False - - out = self.out - out.write(' ' * self.indent) - out.write(formatted) - out.write(' ' if self.as_one_line else '\n') - return True - - def PrintMessage(self, message): - """Convert protobuf message to text format. - - Args: - message: The protocol buffers message. - """ - if self.message_formatter and self._TryCustomFormatMessage(message): - return - if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and - self._TryPrintAsAnyMessage(message)): - return - fields = message.ListFields() - if self.use_index_order: - fields.sort( - key=lambda x: x[0].number if x[0].is_extension else x[0].index) - for field, value in fields: - if _IsMapEntry(field): - for key in sorted(value): - # This is slow for maps with submessage entries because it copies the - # entire tree. Unfortunately this would take significant refactoring - # of this file to work around. - # - # TODO(haberman): refactor and optimize if this becomes an issue. - entry_submsg = value.GetEntryClass()(key=key, value=value[key]) - self.PrintField(field, entry_submsg) - elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if (self.use_short_repeated_primitives - and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE - and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): - self._PrintShortRepeatedPrimitivesValue(field, value) - else: - for element in value: - self.PrintField(field, element) - else: - self.PrintField(field, value) - - if self.print_unknown_fields: - self._PrintUnknownFields(message.UnknownFields()) - - def _PrintUnknownFields(self, unknown_fields): - """Print unknown fields.""" - out = self.out - for field in unknown_fields: - out.write(' ' * self.indent) - out.write(str(field.field_number)) - if field.wire_type == WIRETYPE_START_GROUP: - if self.as_one_line: - out.write(' { ') - else: - out.write(' {\n') - self.indent += 2 - - self._PrintUnknownFields(field.data) - - if self.as_one_line: - out.write('} ') - else: - self.indent -= 2 - out.write(' ' * self.indent + '}\n') - elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: - try: - # If this field is parseable as a Message, it is probably - # an embedded message. - # pylint: disable=protected-access - (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( - memoryview(field.data), 0, len(field.data)) - except Exception: # pylint: disable=broad-except - pos = 0 - - if pos == len(field.data): - if self.as_one_line: - out.write(' { ') - else: - out.write(' {\n') - self.indent += 2 - - self._PrintUnknownFields(embedded_unknown_message) - - if self.as_one_line: - out.write('} ') - else: - self.indent -= 2 - out.write(' ' * self.indent + '}\n') - else: - # A string or bytes field. self.as_utf8 may not work. - out.write(': \"') - out.write(text_encoding.CEscape(field.data, False)) - out.write('\" ' if self.as_one_line else '\"\n') - else: - # varint, fixed32, fixed64 - out.write(': ') - out.write(str(field.data)) - out.write(' ' if self.as_one_line else '\n') - - def _PrintFieldName(self, field): - """Print field name.""" - out = self.out - out.write(' ' * self.indent) - if self.use_field_number: - out.write(str(field.number)) - else: - if field.is_extension: - out.write('[') - if (field.containing_type.GetOptions().message_set_wire_format and - field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): - out.write(field.message_type.full_name) - else: - out.write(field.full_name) - out.write(']') - elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: - # For groups, use the capitalized name. - out.write(field.message_type.name) - else: - out.write(field.name) - - if (self.force_colon or - field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): - # The colon is optional in this case, but our cross-language golden files - # don't include it. Here, the colon is only included if force_colon is - # set to True - out.write(':') - - def PrintField(self, field, value): - """Print a single field name/value pair.""" - self._PrintFieldName(field) - self.out.write(' ') - self.PrintFieldValue(field, value) - self.out.write(' ' if self.as_one_line else '\n') - - def _PrintShortRepeatedPrimitivesValue(self, field, value): - """"Prints short repeated primitives value.""" - # Note: this is called only when value has at least one element. - self._PrintFieldName(field) - self.out.write(' [') - for i in range(len(value) - 1): - self.PrintFieldValue(field, value[i]) - self.out.write(', ') - self.PrintFieldValue(field, value[-1]) - self.out.write(']') - self.out.write(' ' if self.as_one_line else '\n') - - def _PrintMessageFieldValue(self, value): - if self.pointy_brackets: - openb = '<' - closeb = '>' - else: - openb = '{' - closeb = '}' - - if self.as_one_line: - self.out.write('%s ' % openb) - self.PrintMessage(value) - self.out.write(closeb) - else: - self.out.write('%s\n' % openb) - self.indent += 2 - self.PrintMessage(value) - self.indent -= 2 - self.out.write(' ' * self.indent + closeb) - - def PrintFieldValue(self, field, value): - """Print a single field value (not including name). - - For repeated fields, the value should be a single element. - - Args: - field: The descriptor of the field to be printed. - value: The value of the field. - """ - out = self.out - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - self._PrintMessageFieldValue(value) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: - enum_value = field.enum_type.values_by_number.get(value, None) - if enum_value is not None: - out.write(enum_value.name) - else: - out.write(str(value)) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: - out.write('\"') - if isinstance(value, str) and not self.as_utf8: - out_value = value.encode('utf-8') - else: - out_value = value - if field.type == descriptor.FieldDescriptor.TYPE_BYTES: - # We always need to escape all binary data in TYPE_BYTES fields. - out_as_utf8 = False - else: - out_as_utf8 = self.as_utf8 - out.write(text_encoding.CEscape(out_value, out_as_utf8)) - out.write('\"') - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: - if value: - out.write('true') - else: - out.write('false') - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: - if self.float_format is not None: - out.write('{1:{0}}'.format(self.float_format, value)) - else: - if math.isnan(value): - out.write(str(value)) - else: - out.write(str(type_checkers.ToShortestFloat(value))) - elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and - self.double_format is not None): - out.write('{1:{0}}'.format(self.double_format, value)) - else: - out.write(str(value)) - - -def Parse(text, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - NOTE: for historical reasons this function does not clear the input - message. This is different from what the binary msg.ParseFrom(...) does. - If text contains a field already set in message, the value is appended if the - field is repeated. Otherwise, an error is raised. - - Example:: - - a = MyProto() - a.repeated_field.append('test') - b = MyProto() - - # Repeated fields are combined - text_format.Parse(repr(a), b) - text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] - - # Non-repeated fields cannot be overwritten - a.singular_field = 1 - b.singular_field = 2 - text_format.Parse(repr(a), b) # ParseError - - # Binary version: - b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" - - Caller is responsible for clearing the message as needed. - - Args: - text (str): Message text representation. - message (Message): A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - Message: The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), - message, - allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - - -def Merge(text, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - Like Parse(), but allows repeated values for a non-repeated field, and uses - the last one. This means any non-repeated, top-level fields specified in text - replace those in the message. - - Args: - text (str): Message text representation. - message (Message): A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - Message: The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - return MergeLines( - text.split(b'\n' if isinstance(text, bytes) else u'\n'), - message, - allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - - -def ParseLines(lines, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - See Parse() for caveats. - - Args: - lines: An iterable of lines of a message's text representation. - message: A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool: A DescriptorPool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - parser = _Parser(allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - return parser.ParseLines(lines, message) - - -def MergeLines(lines, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - See Merge() for more details. - - Args: - lines: An iterable of lines of a message's text representation. - message: A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool: A DescriptorPool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - parser = _Parser(allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - return parser.MergeLines(lines, message) - - -class _Parser(object): - """Text format parser for protocol message.""" - - def __init__(self, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - self.allow_unknown_extension = allow_unknown_extension - self.allow_field_number = allow_field_number - self.descriptor_pool = descriptor_pool - self.allow_unknown_field = allow_unknown_field - - def ParseLines(self, lines, message): - """Parses a text representation of a protocol message into a message.""" - self._allow_multiple_scalars = False - self._ParseOrMerge(lines, message) - return message - - def MergeLines(self, lines, message): - """Merges a text representation of a protocol message into a message.""" - self._allow_multiple_scalars = True - self._ParseOrMerge(lines, message) - return message - - def _ParseOrMerge(self, lines, message): - """Converts a text representation of a protocol message into a message. - - Args: - lines: Lines of a message's text representation. - message: A protocol buffer message to merge into. - - Raises: - ParseError: On text parsing problems. - """ - # Tokenize expects native str lines. - str_lines = ( - line if isinstance(line, str) else line.decode('utf-8') - for line in lines) - tokenizer = Tokenizer(str_lines) - while not tokenizer.AtEnd(): - self._MergeField(tokenizer, message) - - def _MergeField(self, tokenizer, message): - """Merges a single protocol message field into a message. - - Args: - tokenizer: A tokenizer to parse the field name and values. - message: A protocol message to record the data. - - Raises: - ParseError: In case of text parsing problems. - """ - message_descriptor = message.DESCRIPTOR - if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and - tokenizer.TryConsume('[')): - type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) - tokenizer.Consume(']') - tokenizer.TryConsume(':') - if tokenizer.TryConsume('<'): - expanded_any_end_token = '>' - else: - tokenizer.Consume('{') - expanded_any_end_token = '}' - expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, - self.descriptor_pool) - if not expanded_any_sub_message: - raise ParseError('Type %s not found in descriptor pool' % - packed_type_name) - while not tokenizer.TryConsume(expanded_any_end_token): - if tokenizer.AtEnd(): - raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % - (expanded_any_end_token,)) - self._MergeField(tokenizer, expanded_any_sub_message) - deterministic = False - - message.Pack(expanded_any_sub_message, - type_url_prefix=type_url_prefix, - deterministic=deterministic) - return - - if tokenizer.TryConsume('['): - name = [tokenizer.ConsumeIdentifier()] - while tokenizer.TryConsume('.'): - name.append(tokenizer.ConsumeIdentifier()) - name = '.'.join(name) - - if not message_descriptor.is_extendable: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" does not have extensions.' % - message_descriptor.full_name) - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(name) - # pylint: enable=protected-access - - - if not field: - if self.allow_unknown_extension: - field = None - else: - raise tokenizer.ParseErrorPreviousToken( - 'Extension "%s" not registered. ' - 'Did you import the _pb2 module which defines it? ' - 'If you are trying to place the extension in the MessageSet ' - 'field of another message that is in an Any or MessageSet field, ' - 'that message\'s _pb2 module must be imported as well' % name) - elif message_descriptor != field.containing_type: - raise tokenizer.ParseErrorPreviousToken( - 'Extension "%s" does not extend message type "%s".' % - (name, message_descriptor.full_name)) - - tokenizer.Consume(']') - - else: - name = tokenizer.ConsumeIdentifierOrNumber() - if self.allow_field_number and name.isdigit(): - number = ParseInteger(name, True, True) - field = message_descriptor.fields_by_number.get(number, None) - if not field and message_descriptor.is_extendable: - field = message.Extensions._FindExtensionByNumber(number) - else: - field = message_descriptor.fields_by_name.get(name, None) - - # Group names are expected to be capitalized as they appear in the - # .proto file, which actually matches their type names, not their field - # names. - if not field: - field = message_descriptor.fields_by_name.get(name.lower(), None) - if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: - field = None - - if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and - field.message_type.name != name): - field = None - - if not field and not self.allow_unknown_field: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" has no field named "%s".' % - (message_descriptor.full_name, name)) - - if field: - if not self._allow_multiple_scalars and field.containing_oneof: - # Check if there's a different field set in this oneof. - # Note that we ignore the case if the same field was set before, and we - # apply _allow_multiple_scalars to non-scalar fields as well. - which_oneof = message.WhichOneof(field.containing_oneof.name) - if which_oneof is not None and which_oneof != field.name: - raise tokenizer.ParseErrorPreviousToken( - 'Field "%s" is specified along with field "%s", another member ' - 'of oneof "%s" for message type "%s".' % - (field.name, which_oneof, field.containing_oneof.name, - message_descriptor.full_name)) - - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - tokenizer.TryConsume(':') - merger = self._MergeMessageField - else: - tokenizer.Consume(':') - merger = self._MergeScalarField - - if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and - tokenizer.TryConsume('[')): - # Short repeated format, e.g. "foo: [1, 2, 3]" - if not tokenizer.TryConsume(']'): - while True: - merger(tokenizer, message, field) - if tokenizer.TryConsume(']'): - break - tokenizer.Consume(',') - - else: - merger(tokenizer, message, field) - - else: # Proto field is unknown. - assert (self.allow_unknown_extension or self.allow_unknown_field) - _SkipFieldContents(tokenizer) - - # For historical reasons, fields may optionally be separated by commas or - # semicolons. - if not tokenizer.TryConsume(','): - tokenizer.TryConsume(';') - - - def _ConsumeAnyTypeUrl(self, tokenizer): - """Consumes a google.protobuf.Any type URL and returns the type name.""" - # Consume "type.googleapis.com/". - prefix = [tokenizer.ConsumeIdentifier()] - tokenizer.Consume('.') - prefix.append(tokenizer.ConsumeIdentifier()) - tokenizer.Consume('.') - prefix.append(tokenizer.ConsumeIdentifier()) - tokenizer.Consume('/') - # Consume the fully-qualified type name. - name = [tokenizer.ConsumeIdentifier()] - while tokenizer.TryConsume('.'): - name.append(tokenizer.ConsumeIdentifier()) - return '.'.join(prefix), '.'.join(name) - - def _MergeMessageField(self, tokenizer, message, field): - """Merges a single scalar field into a message. - - Args: - tokenizer: A tokenizer to parse the field value. - message: The message of which field is a member. - field: The descriptor of the field to be merged. - - Raises: - ParseError: In case of text parsing problems. - """ - is_map_entry = _IsMapEntry(field) - - if tokenizer.TryConsume('<'): - end_token = '>' - else: - tokenizer.Consume('{') - end_token = '}' - - if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if field.is_extension: - sub_message = message.Extensions[field].add() - elif is_map_entry: - sub_message = getattr(message, field.name).GetEntryClass()() - else: - sub_message = getattr(message, field.name).add() - else: - if field.is_extension: - if (not self._allow_multiple_scalars and - message.HasExtension(field)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" extensions.' % - (message.DESCRIPTOR.full_name, field.full_name)) - sub_message = message.Extensions[field] - else: - # Also apply _allow_multiple_scalars to message field. - # TODO(jieluo): Change to _allow_singular_overwrites. - if (not self._allow_multiple_scalars and - message.HasField(field.name)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" fields.' % - (message.DESCRIPTOR.full_name, field.name)) - sub_message = getattr(message, field.name) - sub_message.SetInParent() - - while not tokenizer.TryConsume(end_token): - if tokenizer.AtEnd(): - raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) - self._MergeField(tokenizer, sub_message) - - if is_map_entry: - value_cpptype = field.message_type.fields_by_name['value'].cpp_type - if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - value = getattr(message, field.name)[sub_message.key] - value.CopyFrom(sub_message.value) - else: - getattr(message, field.name)[sub_message.key] = sub_message.value - - @staticmethod - def _IsProto3Syntax(message): - message_descriptor = message.DESCRIPTOR - return (hasattr(message_descriptor, 'syntax') and - message_descriptor.syntax == 'proto3') - - def _MergeScalarField(self, tokenizer, message, field): - """Merges a single scalar field into a message. - - Args: - tokenizer: A tokenizer to parse the field value. - message: A protocol message to record the data. - field: The descriptor of the field to be merged. - - Raises: - ParseError: In case of text parsing problems. - RuntimeError: On runtime errors. - """ - _ = self.allow_unknown_extension - value = None - - if field.type in (descriptor.FieldDescriptor.TYPE_INT32, - descriptor.FieldDescriptor.TYPE_SINT32, - descriptor.FieldDescriptor.TYPE_SFIXED32): - value = _ConsumeInt32(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, - descriptor.FieldDescriptor.TYPE_SINT64, - descriptor.FieldDescriptor.TYPE_SFIXED64): - value = _ConsumeInt64(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, - descriptor.FieldDescriptor.TYPE_FIXED32): - value = _ConsumeUint32(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, - descriptor.FieldDescriptor.TYPE_FIXED64): - value = _ConsumeUint64(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, - descriptor.FieldDescriptor.TYPE_DOUBLE): - value = tokenizer.ConsumeFloat() - elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: - value = tokenizer.ConsumeBool() - elif field.type == descriptor.FieldDescriptor.TYPE_STRING: - value = tokenizer.ConsumeString() - elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: - value = tokenizer.ConsumeByteString() - elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: - value = tokenizer.ConsumeEnum(field) - else: - raise RuntimeError('Unknown field type %d' % field.type) - - if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if field.is_extension: - message.Extensions[field].append(value) - else: - getattr(message, field.name).append(value) - else: - if field.is_extension: - if (not self._allow_multiple_scalars and - not self._IsProto3Syntax(message) and - message.HasExtension(field)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" extensions.' % - (message.DESCRIPTOR.full_name, field.full_name)) - else: - message.Extensions[field] = value - else: - duplicate_error = False - if not self._allow_multiple_scalars: - if self._IsProto3Syntax(message): - # Proto3 doesn't represent presence so we try best effort to check - # multiple scalars by compare to default values. - duplicate_error = bool(getattr(message, field.name)) - else: - duplicate_error = message.HasField(field.name) - - if duplicate_error: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" fields.' % - (message.DESCRIPTOR.full_name, field.name)) - else: - setattr(message, field.name, value) - - -def _SkipFieldContents(tokenizer): - """Skips over contents (value or message) of a field. - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - # Try to guess the type of this field. - # If this field is not a message, there should be a ":" between the - # field name and the field value and also the field value should not - # start with "{" or "<" which indicates the beginning of a message body. - # If there is no ":" or there is a "{" or "<" after ":", this field has - # to be a message or the input is ill-formed. - if tokenizer.TryConsume(':') and not tokenizer.LookingAt( - '{') and not tokenizer.LookingAt('<'): - _SkipFieldValue(tokenizer) - else: - _SkipFieldMessage(tokenizer) - - -def _SkipField(tokenizer): - """Skips over a complete field (name and value/message). - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - if tokenizer.TryConsume('['): - # Consume extension name. - tokenizer.ConsumeIdentifier() - while tokenizer.TryConsume('.'): - tokenizer.ConsumeIdentifier() - tokenizer.Consume(']') - else: - tokenizer.ConsumeIdentifierOrNumber() - - _SkipFieldContents(tokenizer) - - # For historical reasons, fields may optionally be separated by commas or - # semicolons. - if not tokenizer.TryConsume(','): - tokenizer.TryConsume(';') - - -def _SkipFieldMessage(tokenizer): - """Skips over a field message. - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - - if tokenizer.TryConsume('<'): - delimiter = '>' - else: - tokenizer.Consume('{') - delimiter = '}' - - while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): - _SkipField(tokenizer) - - tokenizer.Consume(delimiter) - - -def _SkipFieldValue(tokenizer): - """Skips over a field value. - - Args: - tokenizer: A tokenizer to parse the field name and values. - - Raises: - ParseError: In case an invalid field value is found. - """ - # String/bytes tokens can come in multiple adjacent string literals. - # If we can consume one, consume as many as we can. - if tokenizer.TryConsumeByteString(): - while tokenizer.TryConsumeByteString(): - pass - return - - if (not tokenizer.TryConsumeIdentifier() and - not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and - not tokenizer.TryConsumeFloat()): - raise ParseError('Invalid field value: ' + tokenizer.token) - - -class Tokenizer(object): - """Protocol buffer text representation tokenizer. - - This class handles the lower level string parsing by splitting it into - meaningful tokens. - - It was directly ported from the Java protocol buffer API. - """ - - _WHITESPACE = re.compile(r'\s+') - _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) - _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) - _TOKEN = re.compile('|'.join([ - r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier - r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number - ] + [ # quoted str for each quote mark - # Avoid backtracking! https://stackoverflow.com/a/844267 - r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) - for mark in _QUOTES - ])) - - _IDENTIFIER = re.compile(r'[^\d\W]\w*') - _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') - - def __init__(self, lines, skip_comments=True): - self._position = 0 - self._line = -1 - self._column = 0 - self._token_start = None - self.token = '' - self._lines = iter(lines) - self._current_line = '' - self._previous_line = 0 - self._previous_column = 0 - self._more_lines = True - self._skip_comments = skip_comments - self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT - or self._WHITESPACE) - self._SkipWhitespace() - self.NextToken() - - def LookingAt(self, token): - return self.token == token - - def AtEnd(self): - """Checks the end of the text was reached. - - Returns: - True iff the end was reached. - """ - return not self.token - - def _PopLine(self): - while len(self._current_line) <= self._column: - try: - self._current_line = next(self._lines) - except StopIteration: - self._current_line = '' - self._more_lines = False - return - else: - self._line += 1 - self._column = 0 - - def _SkipWhitespace(self): - while True: - self._PopLine() - match = self._whitespace_pattern.match(self._current_line, self._column) - if not match: - break - length = len(match.group(0)) - self._column += length - - def TryConsume(self, token): - """Tries to consume a given piece of text. - - Args: - token: Text to consume. - - Returns: - True iff the text was consumed. - """ - if self.token == token: - self.NextToken() - return True - return False - - def Consume(self, token): - """Consumes a piece of text. - - Args: - token: Text to consume. - - Raises: - ParseError: If the text couldn't be consumed. - """ - if not self.TryConsume(token): - raise self.ParseError('Expected "%s".' % token) - - def ConsumeComment(self): - result = self.token - if not self._COMMENT.match(result): - raise self.ParseError('Expected comment.') - self.NextToken() - return result - - def ConsumeCommentOrTrailingComment(self): - """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" - - # Tokenizer initializes _previous_line and _previous_column to 0. As the - # tokenizer starts, it looks like there is a previous token on the line. - just_started = self._line == 0 and self._column == 0 - - before_parsing = self._previous_line - comment = self.ConsumeComment() - - # A trailing comment is a comment on the same line than the previous token. - trailing = (self._previous_line == before_parsing - and not just_started) - - return trailing, comment - - def TryConsumeIdentifier(self): - try: - self.ConsumeIdentifier() - return True - except ParseError: - return False - - def ConsumeIdentifier(self): - """Consumes protocol message field identifier. - - Returns: - Identifier string. - - Raises: - ParseError: If an identifier couldn't be consumed. - """ - result = self.token - if not self._IDENTIFIER.match(result): - raise self.ParseError('Expected identifier.') - self.NextToken() - return result - - def TryConsumeIdentifierOrNumber(self): - try: - self.ConsumeIdentifierOrNumber() - return True - except ParseError: - return False - - def ConsumeIdentifierOrNumber(self): - """Consumes protocol message field identifier. - - Returns: - Identifier string. - - Raises: - ParseError: If an identifier couldn't be consumed. - """ - result = self.token - if not self._IDENTIFIER_OR_NUMBER.match(result): - raise self.ParseError('Expected identifier or number, got %s.' % result) - self.NextToken() - return result - - def TryConsumeInteger(self): - try: - self.ConsumeInteger() - return True - except ParseError: - return False - - def ConsumeInteger(self): - """Consumes an integer number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an integer couldn't be consumed. - """ - try: - result = _ParseAbstractInteger(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def TryConsumeFloat(self): - try: - self.ConsumeFloat() - return True - except ParseError: - return False - - def ConsumeFloat(self): - """Consumes an floating point number. - - Returns: - The number parsed. - - Raises: - ParseError: If a floating point number couldn't be consumed. - """ - try: - result = ParseFloat(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ConsumeBool(self): - """Consumes a boolean value. - - Returns: - The bool parsed. - - Raises: - ParseError: If a boolean value couldn't be consumed. - """ - try: - result = ParseBool(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def TryConsumeByteString(self): - try: - self.ConsumeByteString() - return True - except ParseError: - return False - - def ConsumeString(self): - """Consumes a string value. - - Returns: - The string parsed. - - Raises: - ParseError: If a string value couldn't be consumed. - """ - the_bytes = self.ConsumeByteString() - try: - return str(the_bytes, 'utf-8') - except UnicodeDecodeError as e: - raise self._StringParseError(e) - - def ConsumeByteString(self): - """Consumes a byte array value. - - Returns: - The array parsed (as a string). - - Raises: - ParseError: If a byte array value couldn't be consumed. - """ - the_list = [self._ConsumeSingleByteString()] - while self.token and self.token[0] in _QUOTES: - the_list.append(self._ConsumeSingleByteString()) - return b''.join(the_list) - - def _ConsumeSingleByteString(self): - """Consume one token of a string literal. - - String literals (whether bytes or text) can come in multiple adjacent - tokens which are automatically concatenated, like in C or Python. This - method only consumes one token. - - Returns: - The token parsed. - Raises: - ParseError: When the wrong format data is found. - """ - text = self.token - if len(text) < 1 or text[0] not in _QUOTES: - raise self.ParseError('Expected string but found: %r' % (text,)) - - if len(text) < 2 or text[-1] != text[0]: - raise self.ParseError('String missing ending quote: %r' % (text,)) - - try: - result = text_encoding.CUnescape(text[1:-1]) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ConsumeEnum(self, field): - try: - result = ParseEnum(field, self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ParseErrorPreviousToken(self, message): - """Creates and *returns* a ParseError for the previously read token. - - Args: - message: A message to set for the exception. - - Returns: - A ParseError instance. - """ - return ParseError(message, self._previous_line + 1, - self._previous_column + 1) - - def ParseError(self, message): - """Creates and *returns* a ParseError for the current token.""" - return ParseError('\'' + self._current_line + '\': ' + message, - self._line + 1, self._column + 1) - - def _StringParseError(self, e): - return self.ParseError('Couldn\'t parse string: ' + str(e)) - - def NextToken(self): - """Reads the next meaningful token.""" - self._previous_line = self._line - self._previous_column = self._column - - self._column += len(self.token) - self._SkipWhitespace() - - if not self._more_lines: - self.token = '' - return - - match = self._TOKEN.match(self._current_line, self._column) - if not match and not self._skip_comments: - match = self._COMMENT.match(self._current_line, self._column) - if match: - token = match.group(0) - self.token = token - else: - self.token = self._current_line[self._column] - -# Aliased so it can still be accessed by current visibility violators. -# TODO(dbarnett): Migrate violators to textformat_tokenizer. -_Tokenizer = Tokenizer # pylint: disable=invalid-name - - -def _ConsumeInt32(tokenizer): - """Consumes a signed 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If a signed 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) - - -def _ConsumeUint32(tokenizer): - """Consumes an unsigned 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an unsigned 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) - - -def _TryConsumeInt64(tokenizer): - try: - _ConsumeInt64(tokenizer) - return True - except ParseError: - return False - - -def _ConsumeInt64(tokenizer): - """Consumes a signed 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If a signed 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) - - -def _TryConsumeUint64(tokenizer): - try: - _ConsumeUint64(tokenizer) - return True - except ParseError: - return False - - -def _ConsumeUint64(tokenizer): - """Consumes an unsigned 64bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an unsigned 64bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) - - -def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): - """Consumes an integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - is_signed: True if a signed integer must be parsed. - is_long: True if a long integer must be parsed. - - Returns: - The integer parsed. - - Raises: - ParseError: If an integer with given characteristics couldn't be consumed. - """ - try: - result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) - except ValueError as e: - raise tokenizer.ParseError(str(e)) - tokenizer.NextToken() - return result - - -def ParseInteger(text, is_signed=False, is_long=False): - """Parses an integer. - - Args: - text: The text to parse. - is_signed: True if a signed integer must be parsed. - is_long: True if a long integer must be parsed. - - Returns: - The integer value. - - Raises: - ValueError: Thrown Iff the text is not a valid integer. - """ - # Do the actual parsing. Exception handling is propagated to caller. - result = _ParseAbstractInteger(text) - - # Check if the integer is sane. Exceptions handled by callers. - checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] - checker.CheckValue(result) - return result - - -def _ParseAbstractInteger(text): - """Parses an integer without checking size/signedness. - - Args: - text: The text to parse. - - Returns: - The integer value. - - Raises: - ValueError: Thrown Iff the text is not a valid integer. - """ - # Do the actual parsing. Exception handling is propagated to caller. - orig_text = text - c_octal_match = re.match(r'(-?)0(\d+)$', text) - if c_octal_match: - # Python 3 no longer supports 0755 octal syntax without the 'o', so - # we always use the '0o' prefix for multi-digit numbers starting with 0. - text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) - try: - return int(text, 0) - except ValueError: - raise ValueError('Couldn\'t parse integer: %s' % orig_text) - - -def ParseFloat(text): - """Parse a floating point number. - - Args: - text: Text to parse. - - Returns: - The number parsed. - - Raises: - ValueError: If a floating point number couldn't be parsed. - """ - try: - # Assume Python compatible syntax. - return float(text) - except ValueError: - # Check alternative spellings. - if _FLOAT_INFINITY.match(text): - if text[0] == '-': - return float('-inf') - else: - return float('inf') - elif _FLOAT_NAN.match(text): - return float('nan') - else: - # assume '1.0f' format - try: - return float(text.rstrip('f')) - except ValueError: - raise ValueError('Couldn\'t parse float: %s' % text) - - -def ParseBool(text): - """Parse a boolean value. - - Args: - text: Text to parse. - - Returns: - Boolean values parsed - - Raises: - ValueError: If text is not a valid boolean. - """ - if text in ('true', 't', '1', 'True'): - return True - elif text in ('false', 'f', '0', 'False'): - return False - else: - raise ValueError('Expected "true" or "false".') - - -def ParseEnum(field, value): - """Parse an enum value. - - The value can be specified by a number (the enum value), or by - a string literal (the enum name). - - Args: - field: Enum field descriptor. - value: String value. - - Returns: - Enum value number. - - Raises: - ValueError: If the enum value could not be parsed. - """ - enum_descriptor = field.enum_type - try: - number = int(value, 0) - except ValueError: - # Identifier. - enum_value = enum_descriptor.values_by_name.get(value, None) - if enum_value is None: - raise ValueError('Enum type "%s" has no value named %s.' % - (enum_descriptor.full_name, value)) - else: - # Numeric value. - if hasattr(field.file, 'syntax'): - # Attribute is checked for compatibility. - if field.file.syntax == 'proto3': - # Proto3 accept numeric unknown enums. - return number - enum_value = enum_descriptor.values_by_number.get(number, None) - if enum_value is None: - raise ValueError('Enum type "%s" has no value with number %d.' % - (enum_descriptor.full_name, number)) - return enum_value.number diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/timestamp_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/timestamp_pb2.py deleted file mode 100644 index 558d496941..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/timestamp_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/timestamp.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _TIMESTAMP._serialized_start=52 - _TIMESTAMP._serialized_end=95 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/type_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/type_pb2.py deleted file mode 100644 index 19903fb6b4..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/type_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/type.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _SYNTAX._serialized_start=1413 - _SYNTAX._serialized_end=1459 - _TYPE._serialized_start=113 - _TYPE._serialized_end=328 - _FIELD._serialized_start=331 - _FIELD._serialized_end=1056 - _FIELD_KIND._serialized_start=610 - _FIELD_KIND._serialized_end=938 - _FIELD_CARDINALITY._serialized_start=940 - _FIELD_CARDINALITY._serialized_end=1056 - _ENUM._serialized_start=1059 - _ENUM._serialized_end=1265 - _ENUMVALUE._serialized_start=1267 - _ENUMVALUE._serialized_end=1350 - _OPTION._serialized_start=1352 - _OPTION._serialized_end=1411 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/__init__.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/json_format_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/json_format_pb2.py deleted file mode 100644 index 66a5836c82..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/json_format_pb2.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/util/json_format.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) - - DESCRIPTOR._options = None - _TESTBOOLMAP_BOOLMAPENTRY._options = None - _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGMAP_STRINGMAPENTRY._options = None - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' - _ENUMVALUE._serialized_start=1607 - _ENUMVALUE._serialized_end=1657 - _TESTFLAGSANDSTRINGS._serialized_start=62 - _TESTFLAGSANDSTRINGS._serialized_end=199 - _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 - _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 - _TESTBASE64BYTEARRAYS._serialized_start=201 - _TESTBASE64BYTEARRAYS._serialized_end=234 - _TESTJAVASCRIPTJSON._serialized_start=236 - _TESTJAVASCRIPTJSON._serialized_end=307 - _TESTJAVASCRIPTORDERJSON1._serialized_start=309 - _TESTJAVASCRIPTORDERJSON1._serialized_end=390 - _TESTJAVASCRIPTORDERJSON2._serialized_start=393 - _TESTJAVASCRIPTORDERJSON2._serialized_end=530 - _TESTLARGEINT._serialized_start=532 - _TESTLARGEINT._serialized_end=568 - _TESTNUMBERS._serialized_start=571 - _TESTNUMBERS._serialized_end=731 - _TESTNUMBERS_MYTYPE._serialized_start=691 - _TESTNUMBERS_MYTYPE._serialized_end=731 - _TESTCAMELCASE._serialized_start=733 - _TESTCAMELCASE._serialized_end=817 - _TESTBOOLMAP._serialized_start=819 - _TESTBOOLMAP._serialized_end=943 - _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 - _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 - _TESTRECURSION._serialized_start=945 - _TESTRECURSION._serialized_end=1024 - _TESTSTRINGMAP._serialized_start=1027 - _TESTSTRINGMAP._serialized_end=1161 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 - _TESTSTRINGSERIALIZER._serialized_start=1164 - _TESTSTRINGSERIALIZER._serialized_end=1360 - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 - _TESTMESSAGEWITHEXTENSION._serialized_start=1362 - _TESTMESSAGEWITHEXTENSION._serialized_end=1398 - _TESTEXTENSION._serialized_start=1400 - _TESTEXTENSION._serialized_end=1522 - _TESTDEFAULTENUMVALUE._serialized_start=1524 - _TESTDEFAULTENUMVALUE._serialized_end=1605 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py deleted file mode 100644 index 5498deafa9..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/util/json_format_proto3.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' - _TESTMAP_BOOLMAPENTRY._options = None - _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTMAP_INT32MAPENTRY._options = None - _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' - _TESTMAP_INT64MAPENTRY._options = None - _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' - _TESTMAP_UINT32MAPENTRY._options = None - _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' - _TESTMAP_UINT64MAPENTRY._options = None - _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' - _TESTMAP_STRINGMAPENTRY._options = None - _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_BOOLMAPENTRY._options = None - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_INT32MAPENTRY._options = None - _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_INT64MAPENTRY._options = None - _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_UINT32MAPENTRY._options = None - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_UINT64MAPENTRY._options = None - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_STRINGMAPENTRY._options = None - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_MAPMAPENTRY._options = None - _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGMAP_STRINGMAPENTRY._options = None - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTBOOLVALUE_BOOLMAPENTRY._options = None - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' - _ENUMTYPE._serialized_start=4849 - _ENUMTYPE._serialized_end=4877 - _MESSAGETYPE._serialized_start=277 - _MESSAGETYPE._serialized_end=305 - _TESTMESSAGE._serialized_start=308 - _TESTMESSAGE._serialized_end=968 - _TESTONEOF._serialized_start=971 - _TESTONEOF._serialized_end=1239 - _TESTMAP._serialized_start=1242 - _TESTMAP._serialized_end=1851 - _TESTMAP_BOOLMAPENTRY._serialized_start=1557 - _TESTMAP_BOOLMAPENTRY._serialized_end=1603 - _TESTMAP_INT32MAPENTRY._serialized_start=1605 - _TESTMAP_INT32MAPENTRY._serialized_end=1652 - _TESTMAP_INT64MAPENTRY._serialized_start=1654 - _TESTMAP_INT64MAPENTRY._serialized_end=1701 - _TESTMAP_UINT32MAPENTRY._serialized_start=1703 - _TESTMAP_UINT32MAPENTRY._serialized_end=1751 - _TESTMAP_UINT64MAPENTRY._serialized_start=1753 - _TESTMAP_UINT64MAPENTRY._serialized_end=1801 - _TESTMAP_STRINGMAPENTRY._serialized_start=1803 - _TESTMAP_STRINGMAPENTRY._serialized_end=1851 - _TESTNESTEDMAP._serialized_start=1854 - _TESTNESTEDMAP._serialized_end=2627 - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 - _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 - _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 - _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 - _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 - _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 - _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 - _TESTSTRINGMAP._serialized_start=2629 - _TESTSTRINGMAP._serialized_end=2752 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 - _TESTWRAPPER._serialized_start=2755 - _TESTWRAPPER._serialized_end=3761 - _TESTTIMESTAMP._serialized_start=3763 - _TESTTIMESTAMP._serialized_end=3873 - _TESTDURATION._serialized_start=3875 - _TESTDURATION._serialized_end=3982 - _TESTFIELDMASK._serialized_start=3984 - _TESTFIELDMASK._serialized_end=4042 - _TESTSTRUCT._serialized_start=4044 - _TESTSTRUCT._serialized_end=4145 - _TESTANY._serialized_start=4147 - _TESTANY._serialized_end=4239 - _TESTVALUE._serialized_start=4241 - _TESTVALUE._serialized_end=4339 - _TESTLISTVALUE._serialized_start=4341 - _TESTLISTVALUE._serialized_end=4451 - _TESTBOOLVALUE._serialized_start=4454 - _TESTBOOLVALUE._serialized_end=4591 - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 - _TESTCUSTOMJSONNAME._serialized_start=4593 - _TESTCUSTOMJSONNAME._serialized_end=4636 - _TESTEXTENSIONS._serialized_start=4638 - _TESTEXTENSIONS._serialized_end=4712 - _TESTENUMVALUE._serialized_start=4715 - _TESTENUMVALUE._serialized_end=4847 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/wrappers_pb2.py b/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/wrappers_pb2.py deleted file mode 100644 index e49eb4c15d..0000000000 --- a/server_addon/hiero/client/ayon_hiero/vendor/google/protobuf/wrappers_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/wrappers.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _DOUBLEVALUE._serialized_start=51 - _DOUBLEVALUE._serialized_end=79 - _FLOATVALUE._serialized_start=81 - _FLOATVALUE._serialized_end=108 - _INT64VALUE._serialized_start=110 - _INT64VALUE._serialized_end=137 - _UINT64VALUE._serialized_start=139 - _UINT64VALUE._serialized_end=167 - _INT32VALUE._serialized_start=169 - _INT32VALUE._serialized_end=196 - _UINT32VALUE._serialized_start=198 - _UINT32VALUE._serialized_end=226 - _BOOLVALUE._serialized_start=228 - _BOOLVALUE._serialized_end=254 - _STRINGVALUE._serialized_start=256 - _STRINGVALUE._serialized_end=284 - _BYTESVALUE._serialized_start=286 - _BYTESVALUE._serialized_end=313 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/hiero/client/ayon_hiero/version.py b/server_addon/hiero/client/ayon_hiero/version.py deleted file mode 100644 index 74ebfba8b0..0000000000 --- a/server_addon/hiero/client/ayon_hiero/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring AYON addon 'hiero' version.""" -__version__ = "0.2.2" diff --git a/server_addon/hiero/package.py b/server_addon/hiero/package.py deleted file mode 100644 index eba3fb12f4..0000000000 --- a/server_addon/hiero/package.py +++ /dev/null @@ -1,9 +0,0 @@ -name = "hiero" -title = "Hiero" -version = "0.2.2" -client_dir = "ayon_hiero" - -ayon_required_addons = { - "core": ">0.3.2", -} -ayon_compatible_addons = {} diff --git a/server_addon/hiero/server/__init__.py b/server_addon/hiero/server/__init__.py deleted file mode 100644 index 3db78eafd7..0000000000 --- a/server_addon/hiero/server/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Type - -from ayon_server.addons import BaseServerAddon - -from .settings import HieroSettings, DEFAULT_VALUES - - -class HieroAddon(BaseServerAddon): - settings_model: Type[HieroSettings] = HieroSettings - - async def get_default_settings(self): - settings_model_cls = self.get_settings_model() - return settings_model_cls(**DEFAULT_VALUES) diff --git a/server_addon/hiero/server/settings/__init__.py b/server_addon/hiero/server/settings/__init__.py deleted file mode 100644 index 246c8203e9..0000000000 --- a/server_addon/hiero/server/settings/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .main import ( - HieroSettings, - DEFAULT_VALUES, -) - - -__all__ = ( - "HieroSettings", - "DEFAULT_VALUES", -) diff --git a/server_addon/hiero/server/settings/common.py b/server_addon/hiero/server/settings/common.py deleted file mode 100644 index 7b5e4390c5..0000000000 --- a/server_addon/hiero/server/settings/common.py +++ /dev/null @@ -1,97 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ( - ColorRGBA_float, - ColorRGB_uint8 -) - - -class Vector2d(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - - -class Vector3d(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - z: float = SettingsField(1.0, title="Z") - - -def formatable_knob_type_enum(): - return [ - {"value": "text", "label": "Text"}, - {"value": "number", "label": "Number"}, - {"value": "decimal_number", "label": "Decimal number"}, - {"value": "2d_vector", "label": "2D vector"}, - # "3D vector" - ] - - -class Formatable(BaseSettingsModel): - _layout = "compact" - - template: str = SettingsField( - "", - placeholder="""{{key}} or {{key}};{{key}}""", - title="Template" - ) - to_type: str = SettingsField( - "Text", - title="To Knob type", - enum_resolver=formatable_knob_type_enum, - ) - - -knob_types_enum = [ - {"value": "text", "label": "Text"}, - {"value": "formatable", "label": "Formate from template"}, - {"value": "color_gui", "label": "Color GUI"}, - {"value": "boolean", "label": "Boolean"}, - {"value": "number", "label": "Number"}, - {"value": "decimal_number", "label": "Decimal number"}, - {"value": "vector_2d", "label": "2D vector"}, - {"value": "vector_3d", "label": "3D vector"}, - {"value": "color", "label": "Color"} -] - - -class KnobModel(BaseSettingsModel): - _layout = "expanded" - - type: str = SettingsField( - title="Type", - description="Switch between different knob types", - enum_resolver=lambda: knob_types_enum, - conditionalEnum=True - ) - name: str = SettingsField( - title="Name", - placeholder="Name" - ) - text: str = SettingsField("", title="Value") - color_gui: ColorRGB_uint8 = SettingsField( - (0, 0, 255), - title="RGB Uint8", - ) - boolean: bool = SettingsField(False, title="Value") - number: int = SettingsField(0, title="Value") - decimal_number: float = SettingsField(0.0, title="Value") - vector_2d: Vector2d = SettingsField( - default_factory=Vector2d, - title="Value" - ) - vector_3d: Vector3d = SettingsField( - default_factory=Vector3d, - title="Value" - ) - color: ColorRGBA_float = SettingsField( - (0.0, 0.0, 1.0, 1.0), - title="RGBA Float" - ) - formatable: Formatable = SettingsField( - default_factory=Formatable, - title="Value" - ) diff --git a/server_addon/hiero/server/settings/create_plugins.py b/server_addon/hiero/server/settings/create_plugins.py deleted file mode 100644 index 80e0b67182..0000000000 --- a/server_addon/hiero/server/settings/create_plugins.py +++ /dev/null @@ -1,96 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class CreateShotClipModels(BaseSettingsModel): - hierarchy: str = SettingsField( - "{folder}/{sequence}", - title="Shot parent hierarchy", - section="Shot Hierarchy And Rename Settings" - ) - clipRename: bool = SettingsField( - True, - title="Rename clips" - ) - clipName: str = SettingsField( - "{track}{sequence}{shot}", - title="Clip name template" - ) - countFrom: int = SettingsField( - 10, - title="Count sequence from" - ) - countSteps: int = SettingsField( - 10, - title="Stepping number" - ) - - folder: str = SettingsField( - "shots", - title="{folder}", - section="Shot Template Keywords" - ) - episode: str = SettingsField( - "ep01", - title="{episode}" - ) - sequence: str = SettingsField( - "sq01", - title="{sequence}" - ) - track: str = SettingsField( - "{_track_}", - title="{track}" - ) - shot: str = SettingsField( - "sh###", - title="{shot}" - ) - - vSyncOn: bool = SettingsField( - False, - title="Enable Vertical Sync", - section="Vertical Synchronization Of Attributes" - ) - - workfileFrameStart: int = SettingsField( - 1001, - title="Workfiles Start Frame", - section="Shot Attributes" - ) - handleStart: int = SettingsField( - 10, - title="Handle start (head)" - ) - handleEnd: int = SettingsField( - 10, - title="Handle end (tail)" - ) - - -class CreatorPluginsSettings(BaseSettingsModel): - CreateShotClip: CreateShotClipModels = SettingsField( - default_factory=CreateShotClipModels, - title="Create Shot Clip" - ) - - -DEFAULT_CREATE_SETTINGS = { - "create": { - "CreateShotClip": { - "hierarchy": "{folder}/{sequence}", - "clipRename": True, - "clipName": "{track}{sequence}{shot}", - "countFrom": 10, - "countSteps": 10, - "folder": "shots", - "episode": "ep01", - "sequence": "sq01", - "track": "{_track_}", - "shot": "sh###", - "vSyncOn": False, - "workfileFrameStart": 1001, - "handleStart": 10, - "handleEnd": 10 - } - } -} diff --git a/server_addon/hiero/server/settings/filters.py b/server_addon/hiero/server/settings/filters.py deleted file mode 100644 index 095d30a004..0000000000 --- a/server_addon/hiero/server/settings/filters.py +++ /dev/null @@ -1,25 +0,0 @@ -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, -) - - -class PublishGUIFilterItemModel(BaseSettingsModel): - _layout = "compact" - name: str = SettingsField(title="Name") - value: bool = SettingsField(True, title="Active") - - -class PublishGUIFiltersModel(BaseSettingsModel): - _layout = "compact" - name: str = SettingsField(title="Name") - value: list[PublishGUIFilterItemModel] = SettingsField( - default_factory=list - ) - - @validator("value") - def validate_unique_outputs(cls, value): - ensure_unique_names(value) - return value diff --git a/server_addon/hiero/server/settings/imageio.py b/server_addon/hiero/server/settings/imageio.py deleted file mode 100644 index 83ae7024f9..0000000000 --- a/server_addon/hiero/server/settings/imageio.py +++ /dev/null @@ -1,185 +0,0 @@ -from pydantic import validator - -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, -) - - -def ocio_configs_switcher_enum(): - return [ - {"value": "nuke-default", "label": "nuke-default"}, - {"value": "spi-vfx", "label": "spi-vfx"}, - {"value": "spi-anim", "label": "spi-anim"}, - {"value": "aces_0.1.1", "label": "aces_0.1.1"}, - {"value": "aces_0.7.1", "label": "aces_0.7.1"}, - {"value": "aces_1.0.1", "label": "aces_1.0.1"}, - {"value": "aces_1.0.3", "label": "aces_1.0.3"}, - {"value": "aces_1.1", "label": "aces_1.1"}, - {"value": "aces_1.2", "label": "aces_1.2"}, - {"value": "aces_1.3", "label": "aces_1.3"}, - {"value": "custom", "label": "custom"} - ] - - -class WorkfileColorspaceSettings(BaseSettingsModel): - """Hiero workfile colorspace preset. """ - """# TODO: enhance settings with host api: - we need to add mapping to resolve properly keys. - Hiero is excpecting camel case key names, - but for better code consistency we are using snake_case: - - ocio_config = ocioConfigName - working_space_name = workingSpace - int_16_name = sixteenBitLut - int_8_name = eightBitLut - float_name = floatLut - log_name = logLut - viewer_name = viewerLut - thumbnail_name = thumbnailLut - """ - - ocioConfigName: str = SettingsField( - title="OpenColorIO Config", - description="Switch between OCIO configs", - enum_resolver=ocio_configs_switcher_enum, - conditionalEnum=True - ) - workingSpace: str = SettingsField( - title="Working Space" - ) - viewerLut: str = SettingsField( - title="Viewer" - ) - eightBitLut: str = SettingsField( - title="8-bit files" - ) - sixteenBitLut: str = SettingsField( - title="16-bit files" - ) - logLut: str = SettingsField( - title="Log files" - ) - floatLut: str = SettingsField( - title="Float files" - ) - thumbnailLut: str = SettingsField( - title="Thumnails" - ) - monitorOutLut: str = SettingsField( - title="Monitor" - ) - - -class ClipColorspaceRulesItems(BaseSettingsModel): - _layout = "expanded" - - regex: str = SettingsField("", title="Regex expression") - colorspace: str = SettingsField("", title="Colorspace") - - -class RegexInputsModel(BaseSettingsModel): - inputs: list[ClipColorspaceRulesItems] = SettingsField( - default_factory=list, - title="Inputs" - ) - - -class ImageIOConfigModel(BaseSettingsModel): - """[DEPRECATED] Addon OCIO config settings. Please set the OCIO config - path in the Core addon profiles here - (ayon+settings://core/imageio/ocio_config_profiles). - """ - - override_global_config: bool = SettingsField( - False, - title="Override global OCIO config", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - filepath: list[str] = SettingsField( - default_factory=list, - title="Config path", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - - -class ImageIOFileRuleModel(BaseSettingsModel): - name: str = SettingsField("", title="Rule name") - pattern: str = SettingsField("", title="Regex pattern") - colorspace: str = SettingsField("", title="Colorspace name") - ext: str = SettingsField("", title="File extension") - - -class ImageIOFileRulesModel(BaseSettingsModel): - activate_host_rules: bool = SettingsField(False) - rules: list[ImageIOFileRuleModel] = SettingsField( - default_factory=list, - title="Rules" - ) - - @validator("rules") - def validate_unique_outputs(cls, value): - ensure_unique_names(value) - return value - - -class ImageIOSettings(BaseSettingsModel): - """Hiero color management project settings. """ - _isGroup: bool = True - activate_host_color_management: bool = SettingsField( - True, title="Enable Color Management" - ) - ocio_config: ImageIOConfigModel = SettingsField( - default_factory=ImageIOConfigModel, - title="OCIO config" - ) - file_rules: ImageIOFileRulesModel = SettingsField( - default_factory=ImageIOFileRulesModel, - title="File Rules" - ) - workfile: WorkfileColorspaceSettings = SettingsField( - default_factory=WorkfileColorspaceSettings, - title="Workfile" - ) - """# TODO: enhance settings with host api: - - old settings are using `regexInputs` key but we - need to rename to `regex_inputs` - - no need for `inputs` middle part. It can stay - directly on `regex_inputs` - """ - regexInputs: RegexInputsModel = SettingsField( - default_factory=RegexInputsModel, - title="Assign colorspace to clips via rules" - ) - - -DEFAULT_IMAGEIO_SETTINGS = { - "workfile": { - "ocioConfigName": "aces_1.2", - "workingSpace": "role_scene_linear", - "viewerLut": "ACES/sRGB", - "eightBitLut": "role_matte_paint", - "sixteenBitLut": "role_texture_paint", - "logLut": "role_compositing_log", - "floatLut": "role_scene_linear", - "thumbnailLut": "ACES/sRGB", - "monitorOutLut": "ACES/sRGB" - }, - "regexInputs": { - "inputs": [ - { - "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", - "colorspace": "sRGB" - } - ] - } -} diff --git a/server_addon/hiero/server/settings/loader_plugins.py b/server_addon/hiero/server/settings/loader_plugins.py deleted file mode 100644 index 682f9fd2d9..0000000000 --- a/server_addon/hiero/server/settings/loader_plugins.py +++ /dev/null @@ -1,37 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class LoadClipModel(BaseSettingsModel): - enabled: bool = SettingsField( - True, - title="Enabled" - ) - product_types: list[str] = SettingsField( - default_factory=list, - title="Product types" - ) - clip_name_template: str = SettingsField( - title="Clip name template" - ) - - -class LoaderPluginsModel(BaseSettingsModel): - LoadClip: LoadClipModel = SettingsField( - default_factory=LoadClipModel, - title="Load Clip" - ) - - -DEFAULT_LOADER_PLUGINS_SETTINGS = { - "LoadClip": { - "enabled": True, - "product_types": [ - "render2d", - "source", - "plate", - "render", - "review" - ], - "clip_name_template": "{folder[name]}_{product[name]}_{representation}" - } -} diff --git a/server_addon/hiero/server/settings/main.py b/server_addon/hiero/server/settings/main.py deleted file mode 100644 index 378af6a539..0000000000 --- a/server_addon/hiero/server/settings/main.py +++ /dev/null @@ -1,62 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - -from .imageio import ( - ImageIOSettings, - DEFAULT_IMAGEIO_SETTINGS -) -from .create_plugins import ( - CreatorPluginsSettings, - DEFAULT_CREATE_SETTINGS -) -from .loader_plugins import ( - LoaderPluginsModel, - DEFAULT_LOADER_PLUGINS_SETTINGS -) -from .publish_plugins import ( - PublishPluginsModel, - DEFAULT_PUBLISH_PLUGIN_SETTINGS -) -from .scriptsmenu import ( - ScriptsmenuSettings, - DEFAULT_SCRIPTSMENU_SETTINGS -) -from .filters import PublishGUIFilterItemModel - - -class HieroSettings(BaseSettingsModel): - """Nuke addon settings.""" - - imageio: ImageIOSettings = SettingsField( - default_factory=ImageIOSettings, - title="Color Management (imageio)", - ) - - create: CreatorPluginsSettings = SettingsField( - default_factory=CreatorPluginsSettings, - title="Creator Plugins", - ) - load: LoaderPluginsModel = SettingsField( - default_factory=LoaderPluginsModel, - title="Loader plugins" - ) - publish: PublishPluginsModel = SettingsField( - default_factory=PublishPluginsModel, - title="Publish plugins" - ) - scriptsmenu: ScriptsmenuSettings = SettingsField( - default_factory=ScriptsmenuSettings, - title="Scripts Menu Definition", - ) - filters: list[PublishGUIFilterItemModel] = SettingsField( - default_factory=list - ) - - -DEFAULT_VALUES = { - "imageio": DEFAULT_IMAGEIO_SETTINGS, - "create": DEFAULT_CREATE_SETTINGS, - "load": DEFAULT_LOADER_PLUGINS_SETTINGS, - "publish": DEFAULT_PUBLISH_PLUGIN_SETTINGS, - "scriptsmenu": DEFAULT_SCRIPTSMENU_SETTINGS, - "filters": [], -} diff --git a/server_addon/hiero/server/settings/publish_plugins.py b/server_addon/hiero/server/settings/publish_plugins.py deleted file mode 100644 index 0e746d1cc1..0000000000 --- a/server_addon/hiero/server/settings/publish_plugins.py +++ /dev/null @@ -1,56 +0,0 @@ -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, - normalize_name, -) - - -class CollectClipEffectsDefModel(BaseSettingsModel): - _layout = "expanded" - name: str = SettingsField("", title="Name") - effect_classes: list[str] = SettingsField( - default_factory=list, title="Effect Classes" - ) - - @validator("name") - def validate_name(cls, value): - """Ensure name does not contain weird characters""" - return normalize_name(value) - - -class CollectClipEffectsTracksModel(BaseSettingsModel): - _layout = "expanded" - name: str = SettingsField("", title="Name") - track_names: list[str] = SettingsField("", title="Track Names") - - -class CollectClipEffectsModel(BaseSettingsModel): - effect_categories: list[CollectClipEffectsDefModel] = SettingsField( - default_factory=list, title="Effect Categories" - ) - - effect_tracks: list[CollectClipEffectsTracksModel] = SettingsField( - default_factory=list, title="Effect Tracks" - ) - - @validator("effect_categories") - def validate_unique_outputs(cls, value): - ensure_unique_names(value) - return value - - -class PublishPluginsModel(BaseSettingsModel): - CollectClipEffects: CollectClipEffectsModel = SettingsField( - default_factory=CollectClipEffectsModel, - title="Collect Clip Effects" - ) - - -DEFAULT_PUBLISH_PLUGIN_SETTINGS = { - "CollectClipEffectsModel": { - "effect_categories": [], - "effect_tracks": [] - } -} diff --git a/server_addon/hiero/server/settings/scriptsmenu.py b/server_addon/hiero/server/settings/scriptsmenu.py deleted file mode 100644 index a627da9643..0000000000 --- a/server_addon/hiero/server/settings/scriptsmenu.py +++ /dev/null @@ -1,40 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class ScriptsmenuSubmodel(BaseSettingsModel): - """Item Definition""" - _isGroup = True - - type: str = SettingsField(title="Type") - command: str = SettingsField(title="Command") - sourcetype: str = SettingsField(title="Source Type") - title: str = SettingsField(title="Title") - tooltip: str = SettingsField(title="Tooltip") - - -class ScriptsmenuSettings(BaseSettingsModel): - """Nuke script menu project settings.""" - _isGroup = True - - """# TODO: enhance settings with host api: - - in api rename key `name` to `menu_name` - """ - name: str = SettingsField(title="Menu name") - definition: list[ScriptsmenuSubmodel] = SettingsField( - default_factory=list, - title="Definition", - description="Scriptmenu Items Definition") - - -DEFAULT_SCRIPTSMENU_SETTINGS = { - "name": "Custom Tools", - "definition": [ - { - "type": "action", - "sourcetype": "python", - "title": "Ayon Hiero Docs", - "command": "import webbrowser;webbrowser.open(url='https://ayon.ynput.io/docs/addon_hiero_artist')", # noqa - "tooltip": "Open the Ayon Hiero user doc page" - } - ] -} diff --git a/server_addon/jobqueue/client/ayon_jobqueue/__init__.py b/server_addon/jobqueue/client/ayon_jobqueue/__init__.py deleted file mode 100644 index 041782dd29..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .version import __version__ -from .addon import JobQueueAddon - - -__all__ = ( - "__version__", - - "JobQueueAddon", -) diff --git a/server_addon/jobqueue/client/ayon_jobqueue/addon.py b/server_addon/jobqueue/client/ayon_jobqueue/addon.py deleted file mode 100644 index ffd32feb89..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/addon.py +++ /dev/null @@ -1,234 +0,0 @@ -"""Job queue AYON addon was created for remote execution of commands. - -## Why is needed -Primarily created for hosts which are not easilly controlled from command line -or in headless mode and is easier to keep one process of host running listening -for jobs to do. - -### Example -One of examples is TVPaint which does not have headless mode, can run only one -process at one time and it's impossible to know what should be executed inside -TVPaint before we know all data about the file that should be processed. - -## Idea -Idea is that there is a server, workers and workstation/s which need to process -something on a worker. - -Workers and workstation/s must have access to server through adress to it's -running instance. Workers use WebSockets and workstations are using HTTP calls. -Also both of them must have access to job queue root which is set in -settings. Root is used as temp where files needed for job can be stored before -sending the job or where result files are stored when job is done. - -Server's address must be set in settings when is running so workers and -workstations know where to send or receive jobs. - -## Command line commands -### start_server -- start server which is handles jobs -- it is possible to specify port and host address (default is localhost:8079) - -### start_worker -- start worker which will process jobs -- has required possitional argument which is application name from AYON - settings e.g. 'tvpaint/11-5' ('tvpaint' is group '11-5' is variant) -- it is possible to specify server url but url from settings is used when not - passed (this is added mainly for developing purposes) -""" - -import json -import copy -import platform -from urllib.parse import urlsplit, urlunsplit - -import requests - -from ayon_core.addon import AYONAddon, click_wrap -from ayon_core.settings import get_studio_settings - -from .version import __version__ - - -class JobQueueAddon(AYONAddon): - name = "jobqueue" - version = __version__ - - def initialize(self, studio_settings): - addon_settings = studio_settings.get(self.name) or {} - server_url = addon_settings.get("server_url") or "" - - self._server_url = self.url_conversion(server_url) - jobs_root_mapping = self._roots_mapping_conversion( - addon_settings.get("jobs_root") - ) - - self._jobs_root_mapping = jobs_root_mapping - - @classmethod - def _root_conversion(cls, root_path): - """Make sure root path does not end with slash.""" - # Return empty string if path is invalid - if not root_path: - return "" - - # Remove all slashes - while root_path.endswith("/") or root_path.endswith("\\"): - root_path = root_path[:-1] - return root_path - - @classmethod - def _roots_mapping_conversion(cls, roots_mapping): - roots_mapping = roots_mapping or {} - for platform_name in ("windows", "linux", "darwin"): - roots_mapping[platform_name] = cls._root_conversion( - roots_mapping.get(platform_name) - ) - return roots_mapping - - @staticmethod - def url_conversion(url, ws=False): - if not url: - return url - - url_parts = list(urlsplit(url)) - scheme = url_parts[0] - if not scheme: - if ws: - url = "ws://{}".format(url) - else: - url = "http://{}".format(url) - url_parts = list(urlsplit(url)) - - elif ws: - if scheme not in ("ws", "wss"): - if scheme == "https": - url_parts[0] = "wss" - else: - url_parts[0] = "ws" - - elif scheme not in ("http", "https"): - if scheme == "wss": - url_parts[0] = "https" - else: - url_parts[0] = "http" - - return urlunsplit(url_parts) - - def get_jobs_root_mapping(self): - return copy.deepcopy(self._jobs_root_mapping) - - def get_jobs_root(self): - return self._jobs_root_mapping.get(platform.system().lower()) - - @classmethod - def get_jobs_root_from_settings(cls): - studio_settings = get_studio_settings() - jobs_root_mapping = studio_settings.get(cls.name, {}).get("jobs_root") - converted_mapping = cls._roots_mapping_conversion(jobs_root_mapping) - - return converted_mapping[platform.system().lower()] - - @property - def server_url(self): - return self._server_url - - def send_job(self, host_name, job_data): - job_data = job_data or {} - job_data["host_name"] = host_name - api_path = "{}/api/jobs".format(self._server_url) - post_request = requests.post(api_path, data=json.dumps(job_data)) - return str(post_request.content.decode()) - - def get_job_status(self, job_id): - api_path = "{}/api/jobs/{}".format(self._server_url, job_id) - return requests.get(api_path).json() - - def cli(self, click_group): - click_group.add_command(cli_main.to_click_obj()) - - @classmethod - def get_server_url_from_settings(cls): - studio_settings = get_studio_settings() - return cls.url_conversion( - studio_settings - .get(cls.name, {}) - .get("server_url") - ) - - @classmethod - def start_server(cls, port=None, host=None): - from .job_server import main - - return main(port, host) - - @classmethod - def start_worker(cls, app_name, server_url=None): - from ayon_applications import ApplicationManager - - if not server_url: - server_url = cls.get_server_url_from_settings() - - if not server_url: - raise ValueError("Server url is not set.") - - http_server_url = cls.url_conversion(server_url) - - # Validate url - requests.get(http_server_url) - - ws_server_url = cls.url_conversion(server_url) + "/ws" - - app_manager = ApplicationManager() - app = app_manager.applications.get(app_name) - if app is None: - raise ValueError( - "Didn't find application \"{}\" in settings.".format(app_name) - ) - - if app.host_name == "tvpaint": - return cls._start_tvpaint_worker(app, ws_server_url) - raise ValueError("Unknown host \"{}\"".format(app.host_name)) - - @classmethod - def _start_tvpaint_worker(cls, app, server_url): - from ayon_tvpaint.worker import main - - executable = app.find_executable() - if not executable: - raise ValueError(( - "Executable for app \"{}\" is not set" - " or accessible on this workstation." - ).format(app.full_name)) - - return main(str(executable), server_url) - - -@click_wrap.group( - JobQueueAddon.name, - help="Application job server. Can be used as render farm." -) -def cli_main(): - pass - - -@cli_main.command( - "start_server", - help="Start server handling workers and their jobs." -) -@click_wrap.option("--port", help="Server port") -@click_wrap.option("--host", help="Server host (ip address)") -def cli_start_server(port, host): - JobQueueAddon.start_server(port, host) - - -@cli_main.command( - "start_worker", help=( - "Start a worker for a specific application. (e.g. \"tvpaint/11.5\")" - ) -) -@click_wrap.argument("app_name") -@click_wrap.option( - "--server_url", - help="Server url which handle workers and jobs.") -def cli_start_worker(app_name, server_url): - JobQueueAddon.start_worker(app_name, server_url) diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/__init__.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/__init__.py deleted file mode 100644 index c73d830257..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .server import WebServerManager -from .utils import main - - -__all__ = ( - "WebServerManager", - "main" -) diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/job_queue_route.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/job_queue_route.py deleted file mode 100644 index 8929e64dc5..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/job_queue_route.py +++ /dev/null @@ -1,62 +0,0 @@ -import json - -from aiohttp.web_response import Response - - -class JobQueueResource: - def __init__(self, job_queue, server_manager): - self.server_manager = server_manager - - self._prefix = "/api" - - self._job_queue = job_queue - - self.endpoint_defs = ( - ("POST", "/jobs", self.post_job), - ("GET", "/jobs", self.get_jobs), - ("GET", "/jobs/{job_id}", self.get_job) - ) - - self.register() - - def register(self): - for methods, url, callback in self.endpoint_defs: - final_url = self._prefix + url - self.server_manager.add_route( - methods, final_url, callback - ) - - async def get_jobs(self, request): - jobs_data = [] - for job in self._job_queue.get_jobs(): - jobs_data.append(job.status()) - return Response(status=200, body=self.encode(jobs_data)) - - async def post_job(self, request): - data = await request.json() - host_name = data.get("host_name") - if not host_name: - return Response( - status=400, message="Key \"host_name\" not filled." - ) - - job = self._job_queue.create_job(host_name, data) - return Response(status=201, text=job.id) - - async def get_job(self, request): - job_id = request.match_info["job_id"] - content = self._job_queue.get_job_status(job_id) - if content is None: - content = {} - return Response( - status=200, - body=self.encode(content), - content_type="application/json" - ) - - @classmethod - def encode(cls, data): - return json.dumps( - data, - indent=4 - ).encode("utf-8") diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/jobs.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/jobs.py deleted file mode 100644 index 0fc3c381d4..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/jobs.py +++ /dev/null @@ -1,240 +0,0 @@ -import datetime -import collections -from uuid import uuid4 - - -class Job: - """Job related to specific host name. - - Data must contain everything needed to finish the job. - """ - # Remove done jobs each n days to clear memory - keep_in_memory_days = 3 - - def __init__(self, host_name, data, job_id=None, created_time=None): - if job_id is None: - job_id = str(uuid4()) - self._id = job_id - if created_time is None: - created_time = datetime.datetime.now() - self._created_time = created_time - self._started_time = None - self._done_time = None - self.host_name = host_name - self.data = data - self._result_data = None - - self._started = False - self._done = False - self._errored = False - self._message = None - self._deleted = False - - self._worker = None - - def keep_in_memory(self): - if self._done_time is None: - return True - - now = datetime.datetime.now() - delta = now - self._done_time - return delta.days < self.keep_in_memory_days - - @property - def id(self): - return self._id - - @property - def done(self): - return self._done - - def reset(self): - self._started = False - self._started_time = None - self._done = False - self._done_time = None - self._errored = False - self._message = None - - self._worker = None - - @property - def started(self): - return self._started - - @property - def deleted(self): - return self._deleted - - def set_deleted(self): - self._deleted = True - self.set_worker(None) - - def set_worker(self, worker): - if worker is self._worker: - return - - if self._worker is not None: - self._worker.set_current_job(None) - - self._worker = worker - if worker is not None: - worker.set_current_job(self) - - def set_started(self): - self._started_time = datetime.datetime.now() - self._started = True - - def set_done(self, success=True, message=None, data=None): - self._done = True - self._done_time = datetime.datetime.now() - self._errored = not success - self._message = message - self._result_data = data - if self._worker is not None: - self._worker.set_current_job(None) - - def status(self): - worker_id = None - if self._worker is not None: - worker_id = self._worker.id - output = { - "id": self.id, - "worker_id": worker_id, - "done": self._done - } - output["message"] = self._message or None - - state = "waiting" - if self._deleted: - state = "deleted" - elif self._errored: - state = "error" - elif self._done: - state = "done" - elif self._started: - state = "started" - - output["result"] = self._result_data - - output["state"] = state - - return output - - -class JobQueue: - """Queue holds jobs that should be done and workers that can do them. - - Also asign jobs to a worker. - """ - old_jobs_check_minutes_interval = 30 - - def __init__(self): - self._last_old_jobs_check = datetime.datetime.now() - self._jobs_by_id = {} - self._job_queue_by_host_name = collections.defaultdict( - collections.deque - ) - self._workers_by_id = {} - self._workers_by_host_name = collections.defaultdict(list) - - def workers(self): - """All currently registered workers.""" - return self._workers_by_id.values() - - def add_worker(self, worker): - host_name = worker.host_name - print("Added new worker for \"{}\"".format(host_name)) - self._workers_by_id[worker.id] = worker - self._workers_by_host_name[host_name].append(worker) - - def get_worker(self, worker_id): - return self._workers_by_id.get(worker_id) - - def remove_worker(self, worker): - # Look if worker had assigned job to do - job = worker.current_job - if job is not None and not job.done: - # Reset job - job.set_worker(None) - job.reset() - # Add job back to queue - self._job_queue_by_host_name[job.host_name].appendleft(job) - - # Remove worker from registered workers - self._workers_by_id.pop(worker.id, None) - host_name = worker.host_name - if worker in self._workers_by_host_name[host_name]: - self._workers_by_host_name[host_name].remove(worker) - - print("Removed worker for \"{}\"".format(host_name)) - - def assign_jobs(self): - """Try to assign job for each idle worker. - - Error all jobs without needed worker. - """ - available_host_names = set() - for worker in self._workers_by_id.values(): - host_name = worker.host_name - available_host_names.add(host_name) - if worker.is_idle(): - jobs = self._job_queue_by_host_name[host_name] - while jobs: - job = jobs.popleft() - if not job.deleted: - worker.set_current_job(job) - break - - for host_name in tuple(self._job_queue_by_host_name.keys()): - if host_name in available_host_names: - continue - - jobs_deque = self._job_queue_by_host_name[host_name] - message = ("Not available workers for \"{}\"").format(host_name) - while jobs_deque: - job = jobs_deque.popleft() - if not job.deleted: - job.set_done(False, message) - self._remove_old_jobs() - - def get_jobs(self): - return self._jobs_by_id.values() - - def get_job(self, job_id): - """Job by it's id.""" - return self._jobs_by_id.get(job_id) - - def create_job(self, host_name, job_data): - """Create new job from passed data and add it to queue.""" - job = Job(host_name, job_data) - self._jobs_by_id[job.id] = job - self._job_queue_by_host_name[host_name].append(job) - return job - - def _remove_old_jobs(self): - """Once in specific time look if should remove old finished jobs.""" - delta = datetime.datetime.now() - self._last_old_jobs_check - if delta.seconds < self.old_jobs_check_minutes_interval: - return - - for job_id in tuple(self._jobs_by_id.keys()): - job = self._jobs_by_id[job_id] - if not job.keep_in_memory(): - self._jobs_by_id.pop(job_id) - - def remove_job(self, job_id): - """Delete job and eventually stop it.""" - job = self._jobs_by_id.get(job_id) - if job is None: - return - - job.set_deleted() - self._jobs_by_id.pop(job.id) - - def get_job_status(self, job_id): - """Job's status based on id.""" - job = self._jobs_by_id.get(job_id) - if job is None: - return {} - return job.status() diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/server.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/server.py deleted file mode 100644 index cc0968b6b6..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/server.py +++ /dev/null @@ -1,154 +0,0 @@ -import threading -import asyncio -import logging - -from aiohttp import web - -from .jobs import JobQueue -from .job_queue_route import JobQueueResource -from .workers_rpc_route import WorkerRpc - -log = logging.getLogger(__name__) - - -class WebServerManager: - """Manger that care about web server thread.""" - def __init__(self, port, host, loop=None): - self.port = port - self.host = host - self.app = web.Application() - if loop is None: - loop = asyncio.new_event_loop() - - # add route with multiple methods for single "external app" - self.webserver_thread = WebServerThread(self, loop) - - @property - def url(self): - return "http://{}:{}".format(self.host, self.port) - - def add_route(self, *args, **kwargs): - self.app.router.add_route(*args, **kwargs) - - def add_static(self, *args, **kwargs): - self.app.router.add_static(*args, **kwargs) - - def start_server(self): - if self.webserver_thread and not self.webserver_thread.is_alive(): - self.webserver_thread.start() - - def stop_server(self): - if not self.is_running: - return - - try: - log.debug("Stopping Web server") - self.webserver_thread.stop() - - except Exception as exc: - print("Errored", str(exc)) - log.warning( - "Error has happened during Killing Web server", - exc_info=True - ) - - @property - def is_running(self): - if self.webserver_thread is not None: - return self.webserver_thread.is_running - return False - - -class WebServerThread(threading.Thread): - """ Listener for requests in thread.""" - def __init__(self, manager, loop): - super(WebServerThread, self).__init__() - - self._is_running = False - self._stopped = False - self.manager = manager - self.loop = loop - self.runner = None - self.site = None - - job_queue = JobQueue() - self.job_queue_route = JobQueueResource(job_queue, manager) - self.workers_route = WorkerRpc(job_queue, manager, loop=loop) - - @property - def port(self): - return self.manager.port - - @property - def host(self): - return self.manager.host - - @property - def stopped(self): - return self._stopped - - @property - def is_running(self): - return self._is_running - - def run(self): - self._is_running = True - - try: - log.info("Starting WebServer server") - asyncio.set_event_loop(self.loop) - self.loop.run_until_complete(self.start_server()) - - asyncio.ensure_future(self.check_shutdown(), loop=self.loop) - self.loop.run_forever() - - except Exception: - log.warning( - "Web Server service has failed", exc_info=True - ) - finally: - self.loop.close() - - self._is_running = False - log.info("Web server stopped") - - async def start_server(self): - """ Starts runner and TCPsite """ - self.runner = web.AppRunner(self.manager.app) - await self.runner.setup() - self.site = web.TCPSite(self.runner, self.host, self.port) - await self.site.start() - - def stop(self): - """Sets _stopped flag to True, 'check_shutdown' shuts server down""" - self._stopped = True - - async def check_shutdown(self): - """ Future that is running and checks if server should be running - periodically. - """ - while not self._stopped: - await asyncio.sleep(0.5) - - print("Starting shutdown") - if self.workers_route: - await self.workers_route.stop() - - print("Stopping site") - await self.site.stop() - print("Site stopped") - await self.runner.cleanup() - - print("Runner stopped") - tasks = [ - task - for task in asyncio.all_tasks() - if task is not asyncio.current_task() - ] - list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks - results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') - await self.loop.shutdown_asyncgens() - # to really make sure everything else has time to stop - await asyncio.sleep(0.07) - self.loop.stop() diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/utils.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/utils.py deleted file mode 100644 index 127ca5f090..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/utils.py +++ /dev/null @@ -1,51 +0,0 @@ -import sys -import signal -import time -import socket - -from .server import WebServerManager - - -class SharedObjects: - stopped = False - - @classmethod - def stop(cls): - cls.stopped = True - - -def main(port=None, host=None): - def signal_handler(sig, frame): - print("Signal to kill process received. Termination starts.") - SharedObjects.stop() - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - port = int(port or 8079) - host = str(host or "localhost") - - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con: - result_of_check = con.connect_ex((host, port)) - - if result_of_check == 0: - print(( - "Server {}:{} is already running or address is occupied." - ).format(host, port)) - return 1 - - print("Running server {}:{}".format(host, port)) - manager = WebServerManager(port, host) - manager.start_server() - - stopped = False - while manager.is_running: - if not stopped and SharedObjects.stopped: - stopped = True - manager.stop_server() - time.sleep(0.1) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/workers.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/workers.py deleted file mode 100644 index 28ca649c03..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/workers.py +++ /dev/null @@ -1,122 +0,0 @@ -import asyncio -from uuid import uuid4 -from aiohttp import WSCloseCode -from aiohttp_json_rpc.protocol import encode_request - - -class WorkerState: - IDLE = object() - JOB_ASSIGNED = object() - JOB_SENT = object() - - -class Worker: - """Worker that can handle jobs of specific host.""" - def __init__(self, host_name, http_request): - self._id = None - self.host_name = host_name - self._http_request = http_request - self._state = WorkerState.IDLE - self._job = None - - # Give ability to send requests to worker - http_request.request_id = str(uuid4()) - http_request.pending_requests = {} - - async def send_job(self): - if self._job is not None: - data = { - "job_id": self._job.id, - "worker_id": self.id, - "data": self._job.data - } - return await self.call("start_job", data) - return False - - async def call(self, method, params=None, timeout=None): - """Call method on worker's side.""" - request_id = self._http_request.request_id - self._http_request.request_id = str(uuid4()) - pending_requests = self._http_request.pending_requests - pending_requests[request_id] = asyncio.Future() - - request = encode_request(method, id=request_id, params=params) - - await self._http_request.ws.send_str(request) - - if timeout: - await asyncio.wait_for( - pending_requests[request_id], - timeout=timeout - ) - - else: - await pending_requests[request_id] - - result = pending_requests[request_id].result() - del pending_requests[request_id] - - return result - - async def close(self): - return await self.ws.close( - code=WSCloseCode.GOING_AWAY, - message="Server shutdown" - ) - - @property - def id(self): - if self._id is None: - self._id = str(uuid4()) - return self._id - - @property - def state(self): - return self._state - - @property - def current_job(self): - return self._job - - @property - def http_request(self): - return self._http_request - - @property - def ws(self): - return self.http_request.ws - - def connection_is_alive(self): - if self.ws.closed or self.ws._writer.transport.is_closing(): - return False - return True - - def is_idle(self): - return self._state is WorkerState.IDLE - - def job_assigned(self): - return ( - self._state is WorkerState.JOB_ASSIGNED - or self._state is WorkerState.JOB_SENT - ) - - def is_working(self): - return self._state is WorkerState.JOB_SENT - - def set_current_job(self, job): - if job is self._job: - return - - self._job = job - if job is None: - self._set_idle() - else: - self._state = WorkerState.JOB_ASSIGNED - job.set_worker(self) - - def _set_idle(self): - self._job = None - self._state = WorkerState.IDLE - - def set_working(self): - self._state = WorkerState.JOB_SENT diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_server/workers_rpc_route.py b/server_addon/jobqueue/client/ayon_jobqueue/job_server/workers_rpc_route.py deleted file mode 100644 index e3c67fb3c3..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_server/workers_rpc_route.py +++ /dev/null @@ -1,124 +0,0 @@ -import asyncio - -import aiohttp -from aiohttp_json_rpc import JsonRpc -from aiohttp_json_rpc.protocol import ( - encode_error, decode_msg, JsonRpcMsgTyp -) -from aiohttp_json_rpc.exceptions import RpcError -from .workers import Worker - - -class WorkerRpc(JsonRpc): - def __init__(self, job_queue, manager, **kwargs): - super().__init__(**kwargs) - - self._job_queue = job_queue - self._manager = manager - - self._stopped = False - - # Register methods - self.add_methods( - ("", self.register_worker), - ("", self.job_done) - ) - asyncio.ensure_future(self._rpc_loop(), loop=self.loop) - - self._manager.add_route( - "*", "/ws", self.handle_request - ) - - # Panel routes for tools - async def register_worker(self, request, host_name): - worker = Worker(host_name, request.http_request) - self._job_queue.add_worker(worker) - return worker.id - - async def _rpc_loop(self): - while self.loop.is_running(): - if self._stopped: - break - - for worker in tuple(self._job_queue.workers()): - if not worker.connection_is_alive(): - self._job_queue.remove_worker(worker) - self._job_queue.assign_jobs() - - await self.send_jobs() - await asyncio.sleep(5) - - async def job_done(self, worker_id, job_id, success, message, data): - worker = self._job_queue.get_worker(worker_id) - if worker is not None: - worker.set_current_job(None) - - job = self._job_queue.get_job(job_id) - if job is not None: - job.set_done(success, message, data) - return True - - async def send_jobs(self): - invalid_workers = [] - for worker in self._job_queue.workers(): - if worker.job_assigned() and not worker.is_working(): - try: - await worker.send_job() - - except ConnectionResetError: - invalid_workers.append(worker) - - for worker in invalid_workers: - self._job_queue.remove_worker(worker) - - async def handle_websocket_request(self, http_request): - """Override this method to catch CLOSING messages.""" - http_request.msg_id = 0 - http_request.pending = {} - - # prepare and register websocket - ws = aiohttp.web_ws.WebSocketResponse() - await ws.prepare(http_request) - http_request.ws = ws - self.clients.append(http_request) - - while not ws.closed: - self.logger.debug('waiting for messages') - raw_msg = await ws.receive() - - if raw_msg.type == aiohttp.WSMsgType.TEXT: - self.logger.debug('raw msg received: %s', raw_msg.data) - self.loop.create_task( - self._handle_rpc_msg(http_request, raw_msg) - ) - - elif raw_msg.type == aiohttp.WSMsgType.CLOSING: - break - - self.clients.remove(http_request) - return ws - - async def _handle_rpc_msg(self, http_request, raw_msg): - # This is duplicated code from super but there is no way how to do it - # to be able handle server->client requests - try: - _raw_message = raw_msg.data - msg = decode_msg(_raw_message) - - except RpcError as error: - await self._ws_send_str(http_request, encode_error(error)) - return - - if msg.type in (JsonRpcMsgTyp.RESULT, JsonRpcMsgTyp.ERROR): - request_id = msg.data["id"] - if request_id in http_request.pending_requests: - future = http_request.pending_requests[request_id] - future.set_result(msg.data["result"]) - return - - return await super()._handle_rpc_msg(http_request, raw_msg) - - async def stop(self): - self._stopped = True - for worker in tuple(self._job_queue.workers()): - await worker.close() diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_workers/__init__.py b/server_addon/jobqueue/client/ayon_jobqueue/job_workers/__init__.py deleted file mode 100644 index f771797aea..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_workers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .base_worker import WorkerJobsConnection - -__all__ = ( - "WorkerJobsConnection", -) diff --git a/server_addon/jobqueue/client/ayon_jobqueue/job_workers/base_worker.py b/server_addon/jobqueue/client/ayon_jobqueue/job_workers/base_worker.py deleted file mode 100644 index 85506565f4..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/job_workers/base_worker.py +++ /dev/null @@ -1,190 +0,0 @@ -import sys -import datetime -import asyncio -import traceback - -from aiohttp_json_rpc import JsonRpcClient - - -class WorkerClient(JsonRpcClient): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.add_methods( - ("", self.start_job), - ) - self.current_job = None - self._id = None - - def set_id(self, worker_id): - self._id = worker_id - - async def start_job(self, job_data): - if self.current_job is not None: - return False - - print("Got new job {}".format(str(job_data))) - self.current_job = job_data - return True - - def finish_job(self, success, message, data): - asyncio.ensure_future( - self._finish_job(success, message, data), - loop=self._loop - ) - - async def _finish_job(self, success, message, data): - print("Current job", self.current_job) - job_id = self.current_job["job_id"] - self.current_job = None - - return await self.call( - "job_done", [self._id, job_id, success, message, data] - ) - - -class WorkerJobsConnection: - """WS connection to Job server. - - Helper class to create a connection to process jobs from job server. - - To be able receive jobs is needed to create a connection and then register - as worker for specific host. - """ - retry_time_seconds = 5 - - def __init__(self, server_url, host_name, loop=None): - self.client = None - self._loop = loop - - self._host_name = host_name - self._server_url = server_url - - self._is_running = False - self._connecting = False - self._connected = False - self._stopped = False - - def stop(self): - print("Stopping worker") - self._stopped = True - - @property - def is_running(self): - return self._is_running - - @property - def current_job(self): - if self.client is not None: - return self.client.current_job - return None - - def finish_job(self, success=True, message=None, data=None): - """Worker finished job and sets the result which is send to server.""" - if self.client is None: - print(( - "Couldn't sent job status to server because" - " client is not connected." - )) - else: - self.client.finish_job(success, message, data) - - async def main_loop(self, register_worker=True): - """Main loop of connection which keep connection to server alive.""" - self._is_running = True - - while not self._stopped: - start_time = datetime.datetime.now() - await self._connection_loop(register_worker) - delta = datetime.datetime.now() - start_time - print("Connection loop took {}s".format(str(delta))) - # Check if was stopped and stop while loop in that case - if self._stopped: - break - - if delta.seconds < 60: - print(( - "Can't connect to server will try in {} seconds." - ).format(self.retry_time_seconds)) - - await asyncio.sleep(self.retry_time_seconds) - self._is_running = False - - async def _connect(self): - self.client = WorkerClient() - print("Connecting to {}".format(self._server_url)) - try: - await self.client.connect_url(self._server_url) - except KeyboardInterrupt: - raise - except Exception: - traceback.print_exception(*sys.exc_info()) - - async def _connection_loop(self, register_worker): - self._connecting = True - future = asyncio.run_coroutine_threadsafe( - self._connect(), loop=self._loop - ) - - while self._connecting: - if not future.done(): - await asyncio.sleep(0.07) - continue - - session = getattr(self.client, "_session", None) - ws = getattr(self.client, "_ws", None) - if session is not None: - if session.closed: - self._connecting = False - self._connected = False - break - - elif ws is not None: - self._connecting = False - self._connected = True - - if self._stopped: - break - - await asyncio.sleep(0.07) - - if not self._connected: - self.client = None - return - - print("Connected to job queue server") - if register_worker: - self.register_as_worker() - - while self._connected and self._loop.is_running(): - if self._stopped or ws.closed: - break - - await asyncio.sleep(0.3) - - await self._stop_cleanup() - - def register_as_worker(self): - """Register as worker ready to work on server side.""" - asyncio.ensure_future(self._register_as_worker(), loop=self._loop) - - async def _register_as_worker(self): - worker_id = await self.client.call( - "register_worker", [self._host_name] - ) - self.client.set_id(worker_id) - print( - "Registered as worker with id {}".format(worker_id) - ) - - async def disconnect(self): - await self._stop_cleanup() - - async def _stop_cleanup(self): - print("Cleanup after stop") - if self.client is not None and hasattr(self.client, "_ws"): - await self.client.disconnect() - - self.client = None - self._connecting = False - self._connected = False diff --git a/server_addon/jobqueue/client/ayon_jobqueue/version.py b/server_addon/jobqueue/client/ayon_jobqueue/version.py deleted file mode 100644 index b29847c090..0000000000 --- a/server_addon/jobqueue/client/ayon_jobqueue/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring AYON addon 'jobqueue' version.""" -__version__ = "1.1.0" diff --git a/server_addon/jobqueue/client/pyproject.toml b/server_addon/jobqueue/client/pyproject.toml deleted file mode 100644 index f15d4baa92..0000000000 --- a/server_addon/jobqueue/client/pyproject.toml +++ /dev/null @@ -1,6 +0,0 @@ -[project] -name="jobqueue" -description="AYON JobQueue addon." - -[ayon.runtimeDependencies] -aiohttp_json_rpc = "*" diff --git a/server_addon/jobqueue/package.py b/server_addon/jobqueue/package.py deleted file mode 100644 index 75a7572c4e..0000000000 --- a/server_addon/jobqueue/package.py +++ /dev/null @@ -1,11 +0,0 @@ -name = "jobqueue" -title = "JobQueue" -version = "1.1.0" -client_dir = "ayon_jobqueue" - -ayon_required_addons = { - "core": ">0.3.2", -} -ayon_compatible_addons = { - "tvpaint": ">=0.2.0", -} diff --git a/server_addon/jobqueue/server/__init__.py b/server_addon/jobqueue/server/__init__.py deleted file mode 100644 index 9ec74fce53..0000000000 --- a/server_addon/jobqueue/server/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from ayon_server.addons import BaseServerAddon - - -class JobQueueAddon(BaseServerAddon): - pass diff --git a/server_addon/nuke/client/ayon_nuke/__init__.py b/server_addon/nuke/client/ayon_nuke/__init__.py deleted file mode 100644 index 29ea039739..0000000000 --- a/server_addon/nuke/client/ayon_nuke/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .version import __version__ -from .addon import ( - NUKE_ROOT_DIR, - NukeAddon, -) - - -__all__ = ( - "__version__", - - "NUKE_ROOT_DIR", - "NukeAddon", -) diff --git a/server_addon/nuke/client/ayon_nuke/addon.py b/server_addon/nuke/client/ayon_nuke/addon.py deleted file mode 100644 index ccb7379c0f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/addon.py +++ /dev/null @@ -1,74 +0,0 @@ -import os -import platform -from ayon_core.addon import AYONAddon, IHostAddon - -from .version import __version__ - -NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) - - -class NukeAddon(AYONAddon, IHostAddon): - name = "nuke" - version = __version__ - host_name = "nuke" - - def add_implementation_envs(self, env, _app): - # Add requirements to NUKE_PATH - new_nuke_paths = [ - os.path.join(NUKE_ROOT_DIR, "startup") - ] - old_nuke_path = env.get("NUKE_PATH") or "" - for path in old_nuke_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_nuke_paths: - new_nuke_paths.append(norm_path) - - env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) - # Remove auto screen scale factor for Qt - # - let Nuke decide it's value - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - # Remove tkinter library paths if are set - env.pop("TK_LIBRARY", None) - env.pop("TCL_LIBRARY", None) - - # Add vendor to PYTHONPATH - python_path = env["PYTHONPATH"] - python_path_parts = [] - if python_path: - python_path_parts = python_path.split(os.pathsep) - vendor_path = os.path.join(NUKE_ROOT_DIR, "vendor") - python_path_parts.insert(0, vendor_path) - env["PYTHONPATH"] = os.pathsep.join(python_path_parts) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - def get_launch_hook_paths(self, app): - if app.host_name != self.host_name: - return [] - return [ - os.path.join(NUKE_ROOT_DIR, "hooks") - ] - - def get_workfile_extensions(self): - return [".nk"] diff --git a/server_addon/nuke/client/ayon_nuke/api/__init__.py b/server_addon/nuke/client/ayon_nuke/api/__init__.py deleted file mode 100644 index caefba766f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -from .workio import ( - file_extensions, - has_unsaved_changes, - save_file, - open_file, - current_file, - work_root, -) -from .command import ( - viewer_update_and_undo_stop -) -from .plugin import ( - NukeCreator, - NukeWriteCreator, - NukeCreatorError, - get_instance_group_node_childs, - get_colorspace_from_node -) -from .pipeline import ( - NukeHost, - - ls, - - list_instances, - remove_instance, - select_instance, - - containerise, - parse_container, - update_container, - -) -from .lib import ( - INSTANCE_DATA_KNOB, - ROOT_DATA_KNOB, - maintained_selection, - reset_selection, - select_nodes, - get_view_process_node, - duplicate_node, - convert_knob_value_to_correct_type, - get_node_data, - set_node_data, - update_node_data, - create_write_node, - link_knobs -) -from .utils import ( - colorspace_exists_on_node, - get_colorspace_list -) - -from .actions import ( - SelectInvalidAction, - SelectInstanceNodeAction -) - -__all__ = ( - "file_extensions", - "has_unsaved_changes", - "save_file", - "open_file", - "current_file", - "work_root", - - "viewer_update_and_undo_stop", - - "NukeCreator", - "NukeWriteCreator", - "NukeCreatorError", - "NukeHost", - "get_instance_group_node_childs", - "get_colorspace_from_node", - - "ls", - - "list_instances", - "remove_instance", - "select_instance", - - "containerise", - "parse_container", - "update_container", - - "INSTANCE_DATA_KNOB", - "ROOT_DATA_KNOB", - "maintained_selection", - "reset_selection", - "select_nodes", - "get_view_process_node", - "duplicate_node", - "convert_knob_value_to_correct_type", - "get_node_data", - "set_node_data", - "update_node_data", - "create_write_node", - "link_knobs", - - "colorspace_exists_on_node", - "get_colorspace_list", - - "SelectInvalidAction", - "SelectInstanceNodeAction" -) diff --git a/server_addon/nuke/client/ayon_nuke/api/actions.py b/server_addon/nuke/client/ayon_nuke/api/actions.py deleted file mode 100644 index a7bcb5b44f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/actions.py +++ /dev/null @@ -1,77 +0,0 @@ -import pyblish.api - -from ayon_core.pipeline.publish import get_errored_instances_from_context -from .lib import ( - reset_selection, - select_nodes -) - - -class SelectInvalidAction(pyblish.api.Action): - """Select invalid nodes in Nuke when plug-in failed. - - To retrieve the invalid nodes this assumes a static `get_invalid()` - method is available on the plugin. - - """ - label = "Select invalid nodes" - on = "failed" # This action is only available on a failed plug-in - icon = "search" # Icon from Awesome Icon - - def process(self, context, plugin): - - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - - # Get the invalid nodes for the plug-ins - self.log.info("Finding invalid nodes..") - invalid = set() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.update(invalid_nodes) - else: - self.log.warning("Plug-in returned to be invalid, " - "but has no selectable nodes.") - - if invalid: - self.log.info("Selecting invalid nodes: {}".format(invalid)) - reset_selection() - select_nodes(invalid) - else: - self.log.info("No invalid nodes found.") - - -class SelectInstanceNodeAction(pyblish.api.Action): - """Select instance node for failed plugin.""" - label = "Select instance node" - on = "failed" # This action is only available on a failed plug-in - icon = "mdi.cursor-default-click" - - def process(self, context, plugin): - - # Get the errored instances for the plug-in - errored_instances = get_errored_instances_from_context( - context, plugin) - - # Get the invalid nodes for the plug-ins - self.log.info("Finding instance nodes..") - nodes = set() - for instance in errored_instances: - instance_node = instance.data.get("transientData", {}).get("node") - if not instance_node: - raise RuntimeError( - "No transientData['node'] found on instance: {}".format( - instance - ) - ) - nodes.add(instance_node) - - if nodes: - self.log.info("Selecting instance nodes: {}".format(nodes)) - reset_selection() - select_nodes(nodes) - else: - self.log.info("No instance nodes found.") diff --git a/server_addon/nuke/client/ayon_nuke/api/command.py b/server_addon/nuke/client/ayon_nuke/api/command.py deleted file mode 100644 index 2f772469d8..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/command.py +++ /dev/null @@ -1,21 +0,0 @@ -import logging -import contextlib -import nuke - -log = logging.getLogger(__name__) - - -@contextlib.contextmanager -def viewer_update_and_undo_stop(): - """Lock viewer from updating and stop recording undo steps""" - try: - # stop active viewer to update any change - viewer = nuke.activeViewer() - if viewer: - viewer.stop() - else: - log.warning("No available active Viewer") - nuke.Undo.disable() - yield - finally: - nuke.Undo.enable() diff --git a/server_addon/nuke/client/ayon_nuke/api/constants.py b/server_addon/nuke/client/ayon_nuke/api/constants.py deleted file mode 100644 index 110199720f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/constants.py +++ /dev/null @@ -1,4 +0,0 @@ -import os - - -ASSIST = bool(os.getenv("NUKEASSIST")) diff --git a/server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py b/server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py deleted file mode 100644 index 435e4a5806..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py +++ /dev/null @@ -1,92 +0,0 @@ -import os -import re -import nuke - -from ayon_core.lib import Logger - -log = Logger.get_logger(__name__) - - -class GizmoMenu(): - def __init__(self, title, icon=None): - - self.toolbar = self._create_toolbar_menu( - title, - icon=icon - ) - - self._script_actions = [] - - def _create_toolbar_menu(self, name, icon=None): - nuke_node_menu = nuke.menu("Nodes") - return nuke_node_menu.addMenu( - name, - icon=icon - ) - - def _make_menu_path(self, path, icon=None): - parent = self.toolbar - for folder in re.split(r"/|\\", path): - if not folder: - continue - existing_menu = parent.findItem(folder) - if existing_menu: - parent = existing_menu - else: - parent = parent.addMenu(folder, icon=icon) - - return parent - - def build_from_configuration(self, configuration): - for menu in configuration: - # Construct parent path else parent is toolbar - parent = self.toolbar - gizmo_toolbar_path = menu.get("gizmo_toolbar_path") - if gizmo_toolbar_path: - parent = self._make_menu_path(gizmo_toolbar_path) - - for item in menu["sub_gizmo_list"]: - assert isinstance(item, dict), "Configuration is wrong!" - - if not item.get("title"): - continue - - item_type = item.get("sourcetype") - - if item_type == "python": - parent.addCommand( - item["title"], - command=str(item["command"]), - icon=item.get("icon"), - shortcut=item.get("shortcut") - ) - elif item_type == "file": - parent.addCommand( - item['title'], - "nuke.createNode('{}')".format(item.get('file_name')), - shortcut=item.get('shortcut') - ) - - # add separator - # Special behavior for separators - elif item_type == "separator": - parent.addSeparator() - - # add submenu - # items should hold a collection of submenu items (dict) - elif item_type == "menu": - # assert "items" in item, "Menu is missing 'items' key" - parent.addMenu( - item['title'], - icon=item.get('icon') - ) - - def add_gizmo_path(self, gizmo_paths): - for gizmo_path in gizmo_paths: - if os.path.isdir(gizmo_path): - for folder in os.listdir(gizmo_path): - if os.path.isdir(os.path.join(gizmo_path, folder)): - nuke.pluginAddPath(os.path.join(gizmo_path, folder)) - nuke.pluginAddPath(gizmo_path) - else: - log.warning("This path doesn't exist: {}".format(gizmo_path)) diff --git a/server_addon/nuke/client/ayon_nuke/api/lib.py b/server_addon/nuke/client/ayon_nuke/api/lib.py deleted file mode 100644 index 6caaed3801..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/lib.py +++ /dev/null @@ -1,2967 +0,0 @@ -import os -import re -import json -import six -import functools -import warnings -import platform -import tempfile -import contextlib -from collections import OrderedDict - -import nuke -from qtpy import QtCore, QtWidgets -import ayon_api - -from ayon_core.host import HostDirmap -from ayon_core.tools.utils import host_tools -from ayon_core.pipeline.workfile.workfile_template_builder import ( - TemplateProfileNotFound -) -from ayon_core.lib import ( - env_value_to_bool, - Logger, - get_version_from_path, - StringTemplate, -) - -from ayon_core.settings import ( - get_project_settings, - get_current_project_settings, -) -from ayon_core.addon import AddonsManager -from ayon_core.pipeline.template_data import get_template_data_with_names -from ayon_core.pipeline import ( - Anatomy, - get_current_host_name, - get_current_project_name, - get_current_folder_path, - get_current_task_name, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, -) -from ayon_core.pipeline.context_tools import ( - get_current_context_custom_workfile_template -) -from ayon_core.pipeline.colorspace import ( - get_current_context_imageio_config_preset -) -from ayon_core.pipeline.workfile import BuildWorkfile -from . import gizmo_menu -from .constants import ASSIST - -from .workio import save_file -from .utils import get_node_outputs - -log = Logger.get_logger(__name__) - -MENU_LABEL = os.getenv("AYON_MENU_LABEL") or "AYON" -NODE_TAB_NAME = MENU_LABEL -DATA_GROUP_KEY = "{}DataGroup".format(MENU_LABEL.capitalize()) -EXCLUDED_KNOB_TYPE_ON_READ = ( - 20, # Tab Knob - 26, # Text Knob (But for backward compatibility, still be read - # if value is not an empty string.) -) -JSON_PREFIX = "JSON:::" -ROOT_DATA_KNOB = "publish_context" -INSTANCE_DATA_KNOB = "publish_instance" - - -class DeprecatedWarning(DeprecationWarning): - pass - - -def deprecated(new_destination): - """Mark functions as deprecated. - - It will result in a warning being emitted when the function is used. - """ - - func = None - if callable(new_destination): - func = new_destination - new_destination = None - - def _decorator(decorated_func): - if new_destination is None: - warning_message = ( - " Please check content of deprecated function to figure out" - " possible replacement." - ) - else: - warning_message = " Please replace your usage with '{}'.".format( - new_destination - ) - - @functools.wraps(decorated_func) - def wrapper(*args, **kwargs): - warnings.simplefilter("always", DeprecatedWarning) - warnings.warn( - ( - "Call to deprecated function '{}'" - "\nFunction was moved or removed.{}" - ).format(decorated_func.__name__, warning_message), - category=DeprecatedWarning, - stacklevel=4 - ) - return decorated_func(*args, **kwargs) - return wrapper - - if func is None: - return _decorator - return _decorator(func) - - -class Context: - main_window = None - context_action_item = None - project_name = os.getenv("AYON_PROJECT_NAME") - # Workfile related code - workfiles_launched = False - workfiles_tool_timer = None - - # Seems unused - _project_entity = None - - -def get_main_window(): - """Acquire Nuke's main window""" - if Context.main_window is None: - - top_widgets = QtWidgets.QApplication.topLevelWidgets() - name = "Foundry::UI::DockMainWindow" - for widget in top_widgets: - if ( - widget.inherits("QMainWindow") - and widget.metaObject().className() == name - ): - Context.main_window = widget - break - return Context.main_window - - -def set_node_data(node, knobname, data): - """Write data to node invisible knob - - Will create new in case it doesn't exists - or update the one already created. - - Args: - node (nuke.Node): node object - knobname (str): knob name - data (dict): data to be stored in knob - """ - # if exists then update data - if knobname in node.knobs(): - update_node_data(node, knobname, data) - return - - # else create new - knob_value = JSON_PREFIX + json.dumps(data) - knob = nuke.String_Knob(knobname) - knob.setValue(knob_value) - knob.setFlag(nuke.INVISIBLE) - node.addKnob(knob) - - -def get_node_data(node, knobname): - """Read data from node. - - Args: - node (nuke.Node): node object - knobname (str): knob name - - Returns: - dict: data stored in knob - """ - if knobname not in node.knobs(): - return - - rawdata = node[knobname].getValue() - if ( - isinstance(rawdata, six.string_types) - and rawdata.startswith(JSON_PREFIX) - ): - try: - return json.loads(rawdata[len(JSON_PREFIX):]) - except json.JSONDecodeError: - return - - -def update_node_data(node, knobname, data): - """Update already present data. - - Args: - node (nuke.Node): node object - knobname (str): knob name - data (dict): data to update knob value - """ - knob = node[knobname] - node_data = get_node_data(node, knobname) or {} - node_data.update(data) - knob_value = JSON_PREFIX + json.dumps(node_data) - knob.setValue(knob_value) - - -class Knobby(object): - """[DEPRECATED] For creating knob which it's type isn't - mapped in `create_knobs` - - Args: - type (string): Nuke knob type name - value: Value to be set with `Knob.setValue`, put `None` if not required - flags (list, optional): Knob flags to be set with `Knob.setFlag` - *args: Args other than knob name for initializing knob class - - """ - - def __init__(self, type, value, flags=None, *args): - self.type = type - self.value = value - self.flags = flags or [] - self.args = args - - def create(self, name, nice=None): - knob_cls = getattr(nuke, self.type) - knob = knob_cls(name, nice, *self.args) - if self.value is not None: - knob.setValue(self.value) - for flag in self.flags: - knob.setFlag(flag) - return knob - - @staticmethod - def nice_naming(key): - """Convert camelCase name into UI Display Name""" - words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) - return " ".join(words) - - -def create_knobs(data, tab=None): - """Create knobs by data - - Depending on the type of each dict value and creates the correct Knob. - - Mapped types: - bool: nuke.Boolean_Knob - int: nuke.Int_Knob - float: nuke.Double_Knob - list: nuke.Enumeration_Knob - six.string_types: nuke.String_Knob - - dict: If it's a nested dict (all values are dict), will turn into - A tabs group. Or just a knobs group. - - Args: - data (dict): collection of attributes and their value - tab (string, optional): Knobs' tab name - - Returns: - list: A list of `nuke.Knob` objects - - """ - def nice_naming(key): - """Convert camelCase name into UI Display Name""" - words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) - return " ".join(words) - - # Turn key-value pairs into knobs - knobs = list() - - if tab: - knobs.append(nuke.Tab_Knob(tab)) - - for key, value in data.items(): - # Knob name - if isinstance(key, tuple): - name, nice = key - else: - name, nice = key, nice_naming(key) - - # Create knob by value type - if isinstance(value, Knobby): - knobby = value - knob = knobby.create(name, nice) - - elif isinstance(value, float): - knob = nuke.Double_Knob(name, nice) - knob.setValue(value) - - elif isinstance(value, bool): - knob = nuke.Boolean_Knob(name, nice) - knob.setValue(value) - knob.setFlag(nuke.STARTLINE) - - elif isinstance(value, int): - knob = nuke.Int_Knob(name, nice) - knob.setValue(value) - - elif isinstance(value, six.string_types): - knob = nuke.String_Knob(name, nice) - knob.setValue(value) - - elif isinstance(value, list): - knob = nuke.Enumeration_Knob(name, nice, value) - - elif isinstance(value, dict): - if all(isinstance(v, dict) for v in value.values()): - # Create a group of tabs - begain = nuke.BeginTabGroup_Knob() - end = nuke.EndTabGroup_Knob() - begain.setName(name) - end.setName(name + "_End") - knobs.append(begain) - for k, v in value.items(): - knobs += create_knobs(v, tab=k) - knobs.append(end) - else: - # Create a group of knobs - knobs.append(nuke.Tab_Knob( - name, nice, nuke.TABBEGINCLOSEDGROUP)) - knobs += create_knobs(value) - knobs.append( - nuke.Tab_Knob(name + "_End", nice, nuke.TABENDGROUP)) - continue - - else: - raise TypeError("Unsupported type: %r" % type(value)) - - knobs.append(knob) - - return knobs - - -def imprint(node, data, tab=None): - """Store attributes with value on node - - Parse user data into Node knobs. - Use `collections.OrderedDict` to ensure knob order. - - Args: - node(nuke.Node): node object from Nuke - data(dict): collection of attributes and their value - - Returns: - None - - Examples: - ``` - import nuke - from ayon_nuke.api import lib - - node = nuke.createNode("NoOp") - data = { - # Regular type of attributes - "myList": ["x", "y", "z"], - "myBool": True, - "myFloat": 0.1, - "myInt": 5, - - # Creating non-default imprint type of knob - "MyFilePath": lib.Knobby("File_Knob", "/file/path"), - "divider": lib.Knobby("Text_Knob", ""), - - # Manual nice knob naming - ("my_knob", "Nice Knob Name"): "some text", - - # dict type will be created as knob group - "KnobGroup": { - "knob1": 5, - "knob2": "hello", - "knob3": ["a", "b"], - }, - - # Nested dict will be created as tab group - "TabGroup": { - "tab1": {"count": 5}, - "tab2": {"isGood": True}, - "tab3": {"direction": ["Left", "Right"]}, - }, - } - lib.imprint(node, data, tab="Demo") - - ``` - - """ - for knob in create_knobs(data, tab): - # If knob name exists we set the value. Technically there could be - # multiple knobs with the same name, but the intent is not to have - # duplicated knobs so we do not account for that. - if knob.name() in node.knobs().keys(): - node[knob.name()].setValue(knob.value()) - else: - node.addKnob(knob) - - -@deprecated -def add_publish_knob(node): - """[DEPRECATED] Add Publish knob to node - - Arguments: - node (nuke.Node): nuke node to be processed - - Returns: - node (nuke.Node): processed nuke node - - """ - if "publish" not in node.knobs(): - body = OrderedDict() - body[("divd", "Publishing")] = Knobby("Text_Knob", '') - body["publish"] = True - imprint(node, body) - return node - - -@deprecated("ayon_nuke.api.lib.set_node_data") -def set_avalon_knob_data(node, data=None, prefix="avalon:"): - """[DEPRECATED] Sets data into nodes's avalon knob - - This function is still used but soon will be deprecated. - Use `set_node_data` instead. - - Arguments: - node (nuke.Node): Nuke node to imprint with data, - data (dict, optional): Data to be imprinted into AvalonTab - prefix (str, optional): filtering prefix - - Returns: - node (nuke.Node) - - Examples: - data = { - 'folderPath': 'sq020sh0280', - 'productType': 'render', - 'productName': 'productMain' - } - """ - data = data or dict() - create = OrderedDict() - - tab_name = NODE_TAB_NAME - editable = ["folderPath", "productName", "name", "namespace"] - - existed_knobs = node.knobs() - - for key, value in data.items(): - knob_name = prefix + key - gui_name = key - - if knob_name in existed_knobs: - # Set value - try: - node[knob_name].setValue(value) - except TypeError: - node[knob_name].setValue(str(value)) - else: - # New knob - name = (knob_name, gui_name) # Hide prefix on GUI - if key in editable: - create[name] = value - else: - create[name] = Knobby("String_Knob", - str(value), - flags=[nuke.READ_ONLY]) - if tab_name in existed_knobs: - tab_name = None - else: - tab = OrderedDict() - warn = Knobby("Text_Knob", "Warning! Do not change following data!") - divd = Knobby("Text_Knob", "") - head = [ - (("warn", ""), warn), - (("divd", ""), divd), - ] - tab[DATA_GROUP_KEY] = OrderedDict(head + list(create.items())) - create = tab - - imprint(node, create, tab=tab_name) - return node - - -@deprecated("ayon_nuke.api.lib.get_node_data") -def get_avalon_knob_data(node, prefix="avalon:", create=True): - """[DEPRECATED] Gets a data from nodes's avalon knob - - This function is still used but soon will be deprecated. - Use `get_node_data` instead. - - Arguments: - node (obj): Nuke node to search for data, - prefix (str, optional): filtering prefix - - Returns: - data (dict) - """ - - data = {} - if NODE_TAB_NAME not in node.knobs(): - return data - - # check if lists - if not isinstance(prefix, list): - prefix = [prefix] - - # loop prefix - for p in prefix: - # check if the node is avalon tracked - try: - # check if data available on the node - _ = node[DATA_GROUP_KEY].value() - except NameError: - # if it doesn't then create it - if create: - node = set_avalon_knob_data(node) - return get_avalon_knob_data(node) - return {} - - # get data from filtered knobs - data.update({k.replace(p, ''): node[k].value() - for k in node.knobs().keys() - if p in k}) - - return data - - -def add_write_node(name, file_path, knobs, **kwarg): - """Adding nuke write node - - Arguments: - name (str): nuke node name - kwarg (attrs): data for nuke knobs - - Returns: - node (obj): nuke write node - """ - use_range_limit = kwarg.get("use_range_limit", None) - - w = nuke.createNode( - "Write", - "name {}".format(name), - inpanel=False - ) - - w["file"].setValue(file_path) - - # finally add knob overrides - set_node_knobs_from_settings(w, knobs, **kwarg) - - if use_range_limit: - w["use_limit"].setValue(True) - w["first"].setValue(kwarg["frame_range"][0]) - w["last"].setValue(kwarg["frame_range"][1]) - - return w - - -def read_avalon_data(node): - """Return user-defined knobs from given `node` - - Args: - node (nuke.Node): Nuke node object - - Returns: - Dict[str, nuke.Knob]: A dictionary of knob name to nuke.Knob objects - - """ - def compat_prefixed(knob_name): - if knob_name.startswith("avalon:"): - return knob_name[len("avalon:"):] - elif knob_name.startswith("ak:"): - return knob_name[len("ak:"):] - - data = dict() - - pattern = ("(?<=addUserKnob {)" - "([0-9]*) (\\S*)" # Matching knob type and knob name - "(?=[ |}])") - tcl_script = node.writeKnobs(nuke.WRITE_USER_KNOB_DEFS) - result = re.search(pattern, tcl_script) - - if result: - first_user_knob = result.group(2) - # Collect user knobs from the end of the knob list - for knob in reversed(node.allKnobs()): - knob_name = knob.name() - if not knob_name: - # Ignore unnamed knob - continue - - knob_type = nuke.knob(knob.fullyQualifiedName(), type=True) - value = knob.value() - - if ( - knob_type not in EXCLUDED_KNOB_TYPE_ON_READ or - # For compating read-only string data that imprinted - # by `nuke.Text_Knob`. - (knob_type == 26 and value) - ): - key = compat_prefixed(knob_name) - if key is not None: - data[key] = value - - if knob_name == first_user_knob: - break - - return data - - -def get_node_path(path, padding=4): - """Get filename for the Nuke write with padded number as '#' - - Arguments: - path (str): The path to render to. - - Returns: - Tuple[str, int, str]: head, padding, tail (extension) - - Examples: - >>> get_frame_path("test.exr") - ('test', 4, '.exr') - - >>> get_frame_path("filename.#####.tif") - ('filename.', 5, '.tif') - - >>> get_frame_path("foobar##.tif") - ('foobar', 2, '.tif') - - >>> get_frame_path("foobar_%08d.tif") - ('foobar_', 8, '.tif') - """ - filename, ext = os.path.splitext(path) - - # Find a final number group - if '%' in filename: - match = re.match('.*?(%[0-9]+d)$', filename) - if match: - padding = int(match.group(1).replace('%', '').replace('d', '')) - # remove number from end since fusion - # will swap it with the frame number - filename = filename.replace(match.group(1), '') - elif '#' in filename: - match = re.match('.*?(#+)$', filename) - - if match: - padding = len(match.group(1)) - # remove number from end since fusion - # will swap it with the frame number - filename = filename.replace(match.group(1), '') - - return filename, padding, ext - - -def get_nuke_imageio_settings(): - return get_project_settings(Context.project_name)["nuke"]["imageio"] - - -def get_imageio_node_setting(node_class, plugin_name, product_name): - """Get preset data for dataflow (fileType, compression, bitDepth)""" - imageio_nodes = get_nuke_imageio_settings()["nodes"] - required_nodes = imageio_nodes["required_nodes"] - - imageio_node = None - for node in required_nodes: - log.info(node) - if ( - node_class in node["nuke_node_class"] - and plugin_name in node["plugins"] - ): - imageio_node = node - break - - if not imageio_node: - return - - # find overrides and update knobs with them - get_imageio_node_override_setting( - node_class, - plugin_name, - product_name, - imageio_node["knobs"] - ) - return imageio_node - - -def get_imageio_node_override_setting( - node_class, plugin_name, product_name, knobs_settings -): - """ Get imageio node overrides from settings - """ - imageio_nodes = get_nuke_imageio_settings()["nodes"] - override_nodes = imageio_nodes["override_nodes"] - - # find matching override node - override_imageio_node = None - for onode in override_nodes: - if node_class not in onode["nuke_node_class"]: - continue - - if plugin_name not in onode["plugins"]: - continue - - # TODO change 'subsets' to 'product_names' in settings - if ( - onode["subsets"] - and not any( - re.search(s.lower(), product_name.lower()) - for s in onode["subsets"] - ) - ): - continue - - override_imageio_node = onode - break - - # add overrides to imageio_node - if override_imageio_node: - # get all knob names in imageio_node - knob_names = [k["name"] for k in knobs_settings] - - for oknob in override_imageio_node["knobs"]: - oknob_name = oknob["name"] - oknob_type = oknob["type"] - oknob_value = oknob[oknob_type] - for knob in knobs_settings: - # add missing knobs into imageio_node - if oknob_name not in knob_names: - knobs_settings.append(oknob) - knob_names.append(oknob_name) - continue - - if oknob_name != knob["name"]: - continue - - knob_type = knob["type"] - # override matching knob name - if not oknob_value: - # remove original knob if no value found in oknob - knobs_settings.remove(knob) - else: - # override knob value with oknob's - knob[knob_type] = oknob_value - - return knobs_settings - - -def get_imageio_input_colorspace(filename): - """Get input file colorspace based on regex in settings.""" - imageio_regex_inputs = ( - get_nuke_imageio_settings()["regex_inputs"]["inputs"]) - - preset_clrsp = None - for regexInput in imageio_regex_inputs: - if bool(re.search(regexInput["regex"], filename)): - preset_clrsp = str(regexInput["colorspace"]) - - return preset_clrsp - - -def get_view_process_node(): - reset_selection() - - ipn_node = None - for v_ in nuke.allNodes(filter="Viewer"): - ipn = v_['input_process_node'].getValue() - ipn_node = nuke.toNode(ipn) - - # skip if no input node is set - if not ipn: - continue - - if ipn == "VIEWER_INPUT" and not ipn_node: - # since it is set by default we can ignore it - # nobody usually use this but use it if - # it exists in nodes - continue - - if not ipn_node: - # in case a Viewer node is transferred from - # different workfile with old values - raise NameError(( - "Input process node name '{}' set in " - "Viewer '{}' is doesn't exists in nodes" - ).format(ipn, v_.name())) - - ipn_node.setSelected(True) - - if ipn_node: - return duplicate_node(ipn_node) - - -def on_script_load(): - """Callback for ffmpeg support""" - if nuke.env["LINUX"]: - nuke.tcl('load ffmpegReader') - nuke.tcl('load ffmpegWriter') - else: - nuke.tcl('load movReader') - nuke.tcl('load movWriter') - - -def check_inventory_versions(): - """ - Actual version identifier of Loaded containers - - Any time this function is run it will check all nodes and filter only - Loader nodes for its version. It will get all versions from database - and check if the node is having actual version. If not then it will color - it to red. - """ - from .pipeline import parse_container - - # get all Loader nodes by avalon attribute metadata - node_with_repre_id = [] - repre_ids = set() - # Find all containers and collect its node and representation ids - for node in nuke.allNodes(): - container = parse_container(node) - - if container: - node = nuke.toNode(container["objectName"]) - avalon_knob_data = read_avalon_data(node) - repre_id = avalon_knob_data["representation"] - - repre_ids.add(repre_id) - node_with_repre_id.append((node, repre_id)) - - # Skip if nothing was found - if not repre_ids: - return - - project_name = get_current_project_name() - # Find representations based on found containers - repre_entities = ayon_api.get_representations( - project_name, - representation_ids=repre_ids, - fields={"id", "versionId"} - ) - # Store representations by id and collect version ids - repre_entities_by_id = {} - version_ids = set() - for repre_entity in repre_entities: - # Use stringed representation id to match value in containers - repre_id = repre_entity["id"] - repre_entities_by_id[repre_id] = repre_entity - version_ids.add(repre_entity["versionId"]) - - version_entities = ayon_api.get_versions( - project_name, - version_ids=version_ids, - fields={"id", "version", "productId"}, - ) - # Store versions by id and collect product ids - version_entities_by_id = {} - product_ids = set() - for version_entity in version_entities: - version_entities_by_id[version_entity["id"]] = version_entity - product_ids.add(version_entity["productId"]) - - # Query last versions based on product ids - last_versions_by_product_id = ayon_api.get_last_versions( - project_name, product_ids=product_ids, fields={"id", "productId"} - ) - - # Loop through collected container nodes and their representation ids - for item in node_with_repre_id: - # Some python versions of nuke can't unfold tuple in for loop - node, repre_id = item - repre_entity = repre_entities_by_id.get(repre_id) - # Failsafe for not finding the representation. - if not repre_entity: - log.warning(( - "Could not find the representation on node \"{}\"" - ).format(node.name())) - continue - - version_id = repre_entity["versionId"] - version_entity = version_entities_by_id.get(version_id) - if not version_entity: - log.warning(( - "Could not find the version on node \"{}\"" - ).format(node.name())) - continue - - # Get last version based on product id - product_id = version_entity["productId"] - last_version = last_versions_by_product_id[product_id] - # Check if last version is same as current version - if last_version["id"] == version_entity["id"]: - color_value = "0x4ecd25ff" - else: - color_value = "0xd84f20ff" - node["tile_color"].setValue(int(color_value, 16)) - - -def writes_version_sync(): - """Callback synchronizing version of publishable write nodes""" - try: - rootVersion = get_version_from_path(nuke.root().name()) - padding = len(rootVersion) - new_version = "v" + str("{" + ":0>{}".format(padding) + "}").format( - int(rootVersion) - ) - except Exception: - return - - for each in nuke.allNodes(filter="Write"): - # check if the node is avalon tracked - if NODE_TAB_NAME not in each.knobs(): - continue - - avalon_knob_data = read_avalon_data(each) - - try: - if avalon_knob_data["families"] not in ["render"]: - continue - - node_file = each["file"].value() - - node_version = "v" + get_version_from_path(node_file) - - node_new_file = node_file.replace(node_version, new_version) - each["file"].setValue(node_new_file) - if not os.path.isdir(os.path.dirname(node_new_file)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(node_new_file)) - except Exception as e: - log.warning( - "Write node: `{}` has no version in path: {}".format( - each.name(), e)) - - -def version_up_script(): - """Raising working script's version""" - import nukescripts - nukescripts.script_and_write_nodes_version_up() - - -def check_product_name_exists(nodes, product_name): - """ - Checking if node is not already created to secure there is no duplicity - - Arguments: - nodes (list): list of nuke.Node objects - product_name (str): name we try to find - - Returns: - bool: True of False - """ - return next((True for n in nodes - if product_name in read_avalon_data(n).get("productName", "")), - False) - - -def format_anatomy(data): - """Helping function for formatting of anatomy paths - - Arguments: - data (dict): dictionary with attributes used for formatting - - Return: - str: Formatted path. - """ - - project_name = get_current_project_name() - anatomy = Anatomy(project_name) - - frame_padding = anatomy.templates_obj.frame_padding - - version = data.get("version") - if version is None: - file = script_name() - data["version"] = get_version_from_path(file) - - folder_path = data["folderPath"] - task_name = data["task"] - host_name = get_current_host_name() - - context_data = get_template_data_with_names( - project_name, folder_path, task_name, host_name - ) - data.update(context_data) - data.update({ - "subset": data["productName"], - "family": data["productType"], - "product": { - "name": data["productName"], - "type": data["productType"], - }, - "frame": "#" * frame_padding, - }) - return anatomy.format(data) - - -def script_name() -> str: - """Returns nuke script path""" - return nuke.root().knob("name").value() - - -def add_button_render_on_farm(node): - name = "renderOnFarm" - label = "Render On Farm" - value = ( - "from ayon_nuke.api.utils import submit_render_on_farm;" - "submit_render_on_farm(nuke.thisNode())" - ) - knob = nuke.PyScript_Knob(name, label, value) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - - -def add_button_write_to_read(node): - name = "createReadNode" - label = "Read From Rendered" - value = "import write_to_read;\ - write_to_read.write_to_read(nuke.thisNode(), allow_relative=False)" - knob = nuke.PyScript_Knob(name, label, value) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - - -def add_button_clear_rendered(node, path): - name = "clearRendered" - label = "Clear Rendered" - value = "import clear_rendered;\ - clear_rendered.clear_rendered(\"{}\")".format(path) - knob = nuke.PyScript_Knob(name, label, value) - node.addKnob(knob) - - -def create_prenodes( - prev_node, - nodes_setting, - plugin_name=None, - product_name=None, - **kwargs -): - last_node = None - for_dependency = {} - for node in nodes_setting: - # get attributes - name = node["name"] - nodeclass = node["nodeclass"] - knobs = node["knobs"] - - # create node - now_node = nuke.createNode( - nodeclass, - "name {}".format(name), - inpanel=False - ) - - # add for dependency linking - for_dependency[name] = { - "node": now_node, - "dependent": node["dependent"] - } - - if all([plugin_name, product_name]): - # find imageio overrides - get_imageio_node_override_setting( - now_node.Class(), - plugin_name, - product_name, - knobs - ) - - # add data to knob - set_node_knobs_from_settings(now_node, knobs, **kwargs) - - # switch actual node to previous - last_node = now_node - - for _node_name, node_prop in for_dependency.items(): - if not node_prop["dependent"]: - node_prop["node"].setInput( - 0, prev_node) - elif node_prop["dependent"] in for_dependency: - _prev_node = for_dependency[node_prop["dependent"]]["node"] - node_prop["node"].setInput( - 0, _prev_node) - else: - log.warning("Dependency has wrong name of node: {}".format( - node_prop - )) - - return last_node - - -def create_write_node( - name, - data, - input=None, - prenodes=None, - linked_knobs=None, - **kwargs -): - """Creating write node which is group node - - Arguments: - name (str): name of node - data (dict): creator write instance data - input (node)[optional]: selected node to connect to - prenodes (Optional[list[dict]]): nodes to be created before write - with dependency - review (bool)[optional]: adding review knob - farm (bool)[optional]: rendering workflow target - kwargs (dict)[optional]: additional key arguments for formatting - - Example: - prenodes = { - "nodeName": { - "nodeclass": "Reformat", - "dependent": [ - following_node_01, - ... - ], - "knobs": [ - { - "type": "text", - "name": "knobname", - "value": "knob value" - }, - ... - ] - }, - ... - } - - - Return: - node (nuke.Node): group node with avalon data as Knobs - """ - # Ensure name does not contain any invalid characters. - special_chars = re.escape("!@#$%^&*()=[]{}|\\;',.<>/?~+-") - special_chars_regex = re.compile(f"[{special_chars}]") - found_special_characters = list(special_chars_regex.findall(name)) - - msg = ( - f"Special characters found in name \"{name}\": " - f"{' '.join(found_special_characters)}" - ) - assert not found_special_characters, msg - - prenodes = prenodes or [] - - # filtering variables - plugin_name = data["creator"] - product_name = data["productName"] - - # get knob settings for write node - imageio_writes = get_imageio_node_setting( - node_class="Write", - plugin_name=plugin_name, - product_name=product_name - ) - - for knob in imageio_writes["knobs"]: - if knob["name"] == "file_type": - knot_type = knob["type"] - ext = knob[knot_type] - - data.update({ - "imageio_writes": imageio_writes, - "ext": ext - }) - anatomy_filled = format_anatomy(data) - - # build file path to workfiles - fdir = str( - anatomy_filled["work"]["default"]["directory"] - ).replace("\\", "/") - data["work"] = fdir - fpath = StringTemplate(data["fpath_template"]).format_strict(data) - - # create directory - if not os.path.isdir(os.path.dirname(fpath)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(fpath)) - - GN = nuke.createNode("Group", "name {}".format(name)) - - prev_node = None - with GN: - if input: - input_name = str(input.name()).replace(" ", "") - # if connected input node was defined - prev_node = nuke.createNode( - "Input", - "name {}".format(input_name), - inpanel=False - ) - else: - # generic input node connected to nothing - prev_node = nuke.createNode( - "Input", - "name {}".format("rgba"), - inpanel=False - ) - - # creating pre-write nodes `prenodes` - last_prenode = create_prenodes( - prev_node, - prenodes, - plugin_name, - product_name, - **kwargs - ) - if last_prenode: - prev_node = last_prenode - - # creating write node - write_node = now_node = add_write_node( - "inside_{}".format(name), - fpath, - imageio_writes["knobs"], - **data - ) - # connect to previous node - now_node.setInput(0, prev_node) - - # switch actual node to previous - prev_node = now_node - - now_node = nuke.createNode("Output", "name Output1", inpanel=False) - - # connect to previous node - now_node.setInput(0, prev_node) - - # add divider - GN.addKnob(nuke.Text_Knob('', 'Rendering')) - - # Add linked knobs. - linked_knob_names = [] - - # add input linked knobs and create group only if any input - if linked_knobs: - linked_knob_names.append("_grp-start_") - linked_knob_names.extend(linked_knobs) - linked_knob_names.append("_grp-end_") - - linked_knob_names.append("Render") - - for _k_name in linked_knob_names: - if "_grp-start_" in _k_name: - knob = nuke.Tab_Knob( - "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) - GN.addKnob(knob) - elif "_grp-end_" in _k_name: - knob = nuke.Tab_Knob( - "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) - GN.addKnob(knob) - else: - if "___" in _k_name: - # add divider - GN.addKnob(nuke.Text_Knob("")) - else: - # add linked knob by _k_name - link = nuke.Link_Knob("") - link.makeLink(write_node.name(), _k_name) - link.setName(_k_name) - - # make render - if "Render" in _k_name: - link.setLabel("Render Local") - link.setFlag(0x1000) - GN.addKnob(link) - - # Adding render farm submission button. - if data.get("render_on_farm", False): - add_button_render_on_farm(GN) - - # adding write to read button - add_button_write_to_read(GN) - - # adding write to read button - add_button_clear_rendered(GN, os.path.dirname(fpath)) - - # set tile color - tile_color = next( - iter( - k[k["type"]] for k in imageio_writes["knobs"] - if "tile_color" in k["name"] - ), [255, 0, 0, 255] - ) - new_tile_color = [] - for c in tile_color: - if isinstance(c, float): - c = int(c * 255) - new_tile_color.append(c) - GN["tile_color"].setValue( - color_gui_to_int(new_tile_color)) - - return GN - - -def set_node_knobs_from_settings(node, knob_settings, **kwargs): - """Overriding knob values from settings - - Using `schema_nuke_knob_inputs` for knob type definitions. - - Args: - node (nuke.Node): nuke node - knob_settings (list): list of dict. Keys are `type`, `name`, `value` - kwargs (dict)[optional]: keys for formattable knob settings - """ - for knob in knob_settings: - knob_name = knob["name"] - if knob_name not in node.knobs(): - continue - - knob_type = knob["type"] - knob_value = knob[knob_type] - if knob_type == "expression": - node[knob_name].setExpression(knob_value) - continue - - # first deal with formattable knob settings - if knob_type == "formatable": - template = knob_value["template"] - to_type = knob_value["to_type"] - try: - knob_value = template.format(**kwargs) - except KeyError as msg: - raise KeyError( - "Not able to format expression: {}".format(msg)) - - # convert value to correct type - if to_type == "2d_vector": - knob_value = knob_value.split(";").split(",") - - knob_type = to_type - - if not knob_value: - continue - - knob_value = convert_knob_value_to_correct_type( - knob_type, knob_value) - - node[knob_name].setValue(knob_value) - - -def convert_knob_value_to_correct_type(knob_type, knob_value): - # Convert 'text' to string to avoid unicode - if knob_type == "text": - return str(knob_value) - - if knob_type == "boolean": - return bool(knob_value) - - if knob_type == "decimal_number": - return float(knob_value) - - if knob_type == "number": - return int(knob_value) - - if knob_type == "color_gui": - new_color = [] - for value in knob_value: - if isinstance(value, float): - value = int(value * 255) - new_color.append(value) - return color_gui_to_int(new_color) - - if knob_type == "box": - return [ - knob_value["x"], knob_value["y"], - knob_value["r"], knob_value["t"] - ] - - if knob_type == "vector_2d": - return [knob_value["x"], knob_value["y"]] - - if knob_type == "vector_3d": - return [knob_value["x"], knob_value["y"], knob_value["z"]] - - return knob_value - - -def color_gui_to_int(color_gui): - # Append alpha channel if not present - if len(color_gui) == 3: - color_gui = list(color_gui) + [255] - hex_value = ( - "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) - return int(hex_value, 16) - - -def create_backdrop(label="", color=None, layer=0, - nodes=None): - """Create Backdrop node - - Arguments: - color (str): nuke compatible string with color code - layer (int): layer of node usually used (self.pos_layer - 1) - label (str): the message - nodes (list): list of nodes to be wrapped into backdrop - - Returns: - nuke.Node: The created backdrop node. - - """ - assert isinstance(nodes, list), "`nodes` should be a list of nodes" - - # Calculate bounds for the backdrop node. - bdX = min([node.xpos() for node in nodes]) - bdY = min([node.ypos() for node in nodes]) - bdW = max([node.xpos() + node.screenWidth() for node in nodes]) - bdX - bdH = max([node.ypos() + node.screenHeight() for node in nodes]) - bdY - - # Expand the bounds to leave a little border. Elements are offsets - # for left, top, right and bottom edges respectively - left, top, right, bottom = (-20, -65, 20, 60) - bdX += left - bdY += top - bdW += (right - left) - bdH += (bottom - top) - - bdn = nuke.createNode("BackdropNode") - bdn["z_order"].setValue(layer) - - if color: - bdn["tile_color"].setValue(int(color, 16)) - - bdn["xpos"].setValue(bdX) - bdn["ypos"].setValue(bdY) - bdn["bdwidth"].setValue(bdW) - bdn["bdheight"].setValue(bdH) - - if label: - bdn["label"].setValue(label) - - bdn["note_font_size"].setValue(20) - return bdn - - -class WorkfileSettings(object): - """ - All settings for workfile will be set - - This object is setting all possible root settings to the workfile. - Including Colorspace, Frame ranges, Resolution format. It can set it - to Root node or to any given node. - - Arguments: - root (node): nuke's root node - nodes (list): list of nuke's nodes - nodes_filter (list): filtering classes for nodes - - """ - - def __init__(self, root_node=None, nodes=None, **kwargs): - project_entity = kwargs.get("project") - if project_entity is None: - project_name = get_current_project_name() - project_entity = ayon_api.get_project(project_name) - else: - project_name = project_entity["name"] - - Context._project_entity = project_entity - self._project_name = project_name - self._folder_path = get_current_folder_path() - self._task_name = get_current_task_name() - self._folder_entity = ayon_api.get_folder_by_path( - project_name, self._folder_path - ) - self._root_node = root_node or nuke.root() - self._nodes = self.get_nodes(nodes=nodes) - - context_data = get_template_data_with_names( - project_name, self._folder_path, self._task_name, "nuke" - ) - self.formatting_data = context_data - - def get_nodes(self, nodes=None, nodes_filter=None): - - if not isinstance(nodes, list) and not isinstance(nodes_filter, list): - return [n for n in nuke.allNodes()] - elif not isinstance(nodes, list) and isinstance(nodes_filter, list): - nodes = list() - for filter in nodes_filter: - [nodes.append(n) for n in nuke.allNodes(filter=filter)] - return nodes - elif isinstance(nodes, list) and not isinstance(nodes_filter, list): - return [n for n in self._nodes] - elif isinstance(nodes, list) and isinstance(nodes_filter, list): - for filter in nodes_filter: - return [n for n in self._nodes if filter in n.Class()] - - def set_viewers_colorspace(self, imageio_nuke): - """Adds correct colorspace to viewer - - Arguments: - imageio_nuke (dict): nuke colorspace configurations - - """ - filter_knobs = [ - "viewerProcess", - "wipe_position", - "monitorOutOutputTransform" - ] - viewer_process = self._display_and_view_formatted( - imageio_nuke["viewer"] - ) - output_transform = self._display_and_view_formatted( - imageio_nuke["monitor"] - ) - erased_viewers = [] - for v in nuke.allNodes(filter="Viewer"): - # set viewProcess to preset from settings - v["viewerProcess"].setValue(viewer_process) - - if viewer_process not in v["viewerProcess"].value(): - copy_inputs = v.dependencies() - copy_knobs = { - k: v[k].value() for k in v.knobs() - if k not in filter_knobs - } - - # delete viewer with wrong settings - erased_viewers.append(v["name"].value()) - nuke.delete(v) - - # create new viewer - nv = nuke.createNode("Viewer") - - # connect to original inputs - for i, n in enumerate(copy_inputs): - nv.setInput(i, n) - - # set copied knobs - for k, v in copy_knobs.items(): - nv[k].setValue(v) - - # set viewerProcess - nv["viewerProcess"].setValue(viewer_process) - nv["monitorOutOutputTransform"].setValue(output_transform) - - if erased_viewers: - log.warning( - "Attention! Viewer nodes {} were erased." - "It had wrong color profile".format(erased_viewers)) - - def _display_and_view_formatted(self, view_profile): - """ Format display and view profile string - - Args: - view_profile (dict): view and display profile - - Returns: - str: formatted display and view profile string - """ - display_view = create_viewer_profile_string( - view_profile["view"], view_profile["display"], path_like=False - ) - # format any template tokens used in the string - return StringTemplate(display_view).format_strict(self.formatting_data) - - def set_root_colorspace(self, imageio_host): - """Adds correct colorspace to root - - Arguments: - imageio_host (dict): host colorspace configurations - - """ - config_data = get_current_context_imageio_config_preset() - - workfile_settings = imageio_host["workfile"] - color_management = workfile_settings["color_management"] - native_ocio_config = workfile_settings["native_ocio_config"] - - if not config_data: - # no ocio config found and no custom path used - if self._root_node["colorManagement"].value() \ - not in color_management: - self._root_node["colorManagement"].setValue(color_management) - - # second set ocio version - if self._root_node["OCIO_config"].value() \ - not in native_ocio_config: - self._root_node["OCIO_config"].setValue(native_ocio_config) - - else: - # OCIO config path is defined from prelaunch hook - self._root_node["colorManagement"].setValue("OCIO") - - # print previous settings in case some were found in workfile - residual_path = self._root_node["customOCIOConfigPath"].value() - if residual_path: - log.info("Residual OCIO config path found: `{}`".format( - residual_path - )) - - # set ocio config path - if config_data: - config_path = config_data["path"].replace("\\", "/") - log.info("OCIO config path found: `{}`".format( - config_path)) - - # check if there's a mismatch between environment and settings - correct_settings = self._is_settings_matching_environment( - config_data) - - # if there's no mismatch between environment and settings - if correct_settings: - self._set_ocio_config_path_to_workfile(config_data) - - workfile_settings_output = {} - # get monitor lut from settings respecting Nuke version differences - monitor_lut_data = self._get_monitor_settings( - workfile_settings["monitor_out_lut"], - workfile_settings["monitor_lut"] - ) - workfile_settings_output.update(monitor_lut_data) - workfile_settings_output.update( - { - "workingSpaceLUT": workfile_settings["working_space"], - "int8Lut": workfile_settings["int_8_lut"], - "int16Lut": workfile_settings["int_16_lut"], - "logLut": workfile_settings["log_lut"], - "floatLut": workfile_settings["float_lut"], - } - ) - - # then set the rest - for knob, value_ in workfile_settings_output.items(): - # skip unfilled ocio config path - # it will be dict in value - if isinstance(value_, dict): - continue - # skip empty values - if not value_: - continue - self._root_node[knob].setValue(str(value_)) - - def _get_monitor_settings(self, viewer_lut, monitor_lut): - """ Get monitor settings from viewer and monitor lut - - Args: - viewer_lut (str): viewer lut string - monitor_lut (str): monitor lut string - - Returns: - dict: monitor settings - """ - output_data = {} - m_display, m_viewer = get_viewer_config_from_string(monitor_lut) - v_display, v_viewer = get_viewer_config_from_string(viewer_lut) - - # set monitor lut differently for nuke version 14 - if nuke.NUKE_VERSION_MAJOR >= 14: - output_data["monitorOutLUT"] = create_viewer_profile_string( - m_viewer, m_display, path_like=False) - # monitorLut=thumbnails - viewerProcess makes more sense - output_data["monitorLut"] = create_viewer_profile_string( - v_viewer, v_display, path_like=False) - - if nuke.NUKE_VERSION_MAJOR == 13: - output_data["monitorOutLUT"] = create_viewer_profile_string( - m_viewer, m_display, path_like=False) - # monitorLut=thumbnails - viewerProcess makes more sense - output_data["monitorLut"] = create_viewer_profile_string( - v_viewer, v_display, path_like=True) - if nuke.NUKE_VERSION_MAJOR <= 12: - output_data["monitorLut"] = create_viewer_profile_string( - m_viewer, m_display, path_like=True) - - return output_data - - def _is_settings_matching_environment(self, config_data): - """ Check if OCIO config path is different from environment - - Args: - config_data (dict): OCIO config data from settings - - Returns: - bool: True if settings are matching environment, False otherwise - """ - current_ocio_path = os.environ["OCIO"] - settings_ocio_path = config_data["path"] - - # normalize all paths to forward slashes - current_ocio_path = current_ocio_path.replace("\\", "/") - settings_ocio_path = settings_ocio_path.replace("\\", "/") - - if current_ocio_path != settings_ocio_path: - message = """ -It seems like there's a mismatch between the OCIO config path set in your Nuke -settings and the actual path set in your OCIO environment. - -To resolve this, please follow these steps: -1. Close Nuke if it's currently open. -2. Reopen Nuke. - -Please note the paths for your reference: - -- The OCIO environment path currently set: - `{env_path}` - -- The path in your current Nuke settings: - `{settings_path}` - -Reopening Nuke should synchronize these paths and resolve any discrepancies. -""" - nuke.message( - message.format( - env_path=current_ocio_path, - settings_path=settings_ocio_path - ) - ) - return False - - return True - - def _set_ocio_config_path_to_workfile(self, config_data): - """ Set OCIO config path to workfile - - Path set into nuke workfile. It is trying to replace path with - environment variable if possible. If not, it will set it as it is. - It also saves the script to apply the change, but only if it's not - empty Untitled script. - - Args: - config_data (dict): OCIO config data from settings - - """ - # replace path with env var if possible - ocio_path = self._replace_ocio_path_with_env_var(config_data) - - log.info("Setting OCIO config path to: `{}`".format( - ocio_path)) - - self._root_node["customOCIOConfigPath"].setValue( - ocio_path - ) - self._root_node["OCIO_config"].setValue("custom") - - # only save script if it's not empty - if self._root_node["name"].value() != "": - log.info("Saving script to apply OCIO config path change.") - nuke.scriptSave() - - def _get_included_vars(self, config_template): - """ Get all environment variables included in template - - Args: - config_template (str): OCIO config template from settings - - Returns: - list: list of environment variables included in template - """ - # resolve all environments for whitelist variables - included_vars = [ - "BUILTIN_OCIO_ROOT", - ] - - # include all project root related env vars - for env_var in os.environ: - if env_var.startswith("AYON_PROJECT_ROOT_"): - included_vars.append(env_var) - - # use regex to find env var in template with format {ENV_VAR} - # this way we make sure only template used env vars are included - env_var_regex = r"\{([A-Z0-9_]+)\}" - env_var = re.findall(env_var_regex, config_template) - if env_var: - included_vars.append(env_var[0]) - - return included_vars - - def _replace_ocio_path_with_env_var(self, config_data): - """ Replace OCIO config path with environment variable - - Environment variable is added as TCL expression to path. TCL expression - is also replacing backward slashes found in path for windows - formatted values. - - Args: - config_data (str): OCIO config dict from settings - - Returns: - str: OCIO config path with environment variable TCL expression - """ - config_path = config_data["path"].replace("\\", "/") - config_template = config_data["template"] - - included_vars = self._get_included_vars(config_template) - - # make sure we return original path if no env var is included - new_path = config_path - - for env_var in included_vars: - env_path = os.getenv(env_var) - if not env_path: - continue - - # it has to be directory current process can see - if not os.path.isdir(env_path): - continue - - # make sure paths are in same format - env_path = env_path.replace("\\", "/") - path = config_path.replace("\\", "/") - - # check if env_path is in path and replace to first found positive - if env_path in path: - # with regsub we make sure path format of slashes is correct - resub_expr = ( - "[regsub -all {{\\\\}} [getenv {}] \"/\"]").format(env_var) - - new_path = path.replace( - env_path, resub_expr - ) - break - - return new_path - - def set_writes_colorspace(self): - """ Adds correct colorspace to write node dict - """ - for node in nuke.allNodes(filter="Group", group=self._root_node): - log.info("Setting colorspace to `{}`".format(node.name())) - - # get data from avalon knob - avalon_knob_data = read_avalon_data(node) - node_data = get_node_data(node, INSTANCE_DATA_KNOB) - - if ( - # backward compatibility - # TODO: remove this once old avalon data api will be removed - avalon_knob_data - and avalon_knob_data.get("id") not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - } - ): - continue - elif ( - node_data - and node_data.get("id") not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - } - ): - continue - - if ( - # backward compatibility - # TODO: remove this once old avalon data api will be removed - avalon_knob_data - and "creator" not in avalon_knob_data - ): - continue - elif ( - node_data - and "creator_identifier" not in node_data - ): - continue - - nuke_imageio_writes = None - if avalon_knob_data: - # establish families - product_type = avalon_knob_data.get("productType") - if product_type is None: - product_type = avalon_knob_data["family"] - families = [product_type] - if avalon_knob_data.get("families"): - families.append(avalon_knob_data.get("families")) - - nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["families"], - plugin_name=avalon_knob_data["creator"], - product_name=avalon_knob_data["productName"] - ) - elif node_data: - nuke_imageio_writes = get_write_node_template_attr(node) - - if not nuke_imageio_writes: - return - - write_node = None - - # get into the group node - node.begin() - for x in nuke.allNodes(): - if x.Class() == "Write": - write_node = x - node.end() - - if not write_node: - return - - set_node_knobs_from_settings( - write_node, nuke_imageio_writes["knobs"]) - - def set_reads_colorspace(self, read_clrs_inputs): - """ Setting colorspace to Read nodes - - Looping through all read nodes and tries to set colorspace based - on regex rules in presets - """ - changes = {} - for n in nuke.allNodes(): - file = nuke.filename(n) - if n.Class() != "Read": - continue - - # check if any colorspace presets for read is matching - preset_clrsp = None - - for input in read_clrs_inputs: - if not bool(re.search(input["regex"], file)): - continue - preset_clrsp = input["colorspace"] - - if preset_clrsp is not None: - current = n["colorspace"].value() - future = str(preset_clrsp) - if current != future: - changes[n.name()] = { - "from": current, - "to": future - } - - if changes: - msg = "Read nodes are not set to correct colorspace:\n\n" - for nname, knobs in changes.items(): - msg += ( - " - node: '{0}' is now '{1}' but should be '{2}'\n" - ).format(nname, knobs["from"], knobs["to"]) - - msg += "\nWould you like to change it?" - - if nuke.ask(msg): - for nname, knobs in changes.items(): - n = nuke.toNode(nname) - n["colorspace"].setValue(knobs["to"]) - log.info( - "Setting `{0}` to `{1}`".format( - nname, - knobs["to"])) - - def set_colorspace(self): - """ Setting colorspace following presets - """ - # get imageio - nuke_colorspace = get_nuke_imageio_settings() - - log.info("Setting colorspace to workfile...") - try: - self.set_root_colorspace(nuke_colorspace) - except AttributeError as _error: - msg = "Set Colorspace to workfile error: {}".format(_error) - nuke.message(msg) - - log.info("Setting colorspace to viewers...") - try: - self.set_viewers_colorspace(nuke_colorspace) - except AttributeError as _error: - msg = "Set Colorspace to viewer error: {}".format(_error) - nuke.message(msg) - - log.info("Setting colorspace to write nodes...") - try: - self.set_writes_colorspace() - except AttributeError as _error: - nuke.message(_error) - log.error(_error) - - log.info("Setting colorspace to read nodes...") - read_clrs_inputs = nuke_colorspace["regex_inputs"].get("inputs", []) - if read_clrs_inputs: - self.set_reads_colorspace(read_clrs_inputs) - - def reset_frame_range_handles(self): - """Set frame range to current folder.""" - - if "attrib" not in self._folder_entity: - msg = "Folder {} don't have set any 'attrib'".format( - self._folder_path - ) - log.warning(msg) - nuke.message(msg) - return - - folder_attributes = self._folder_entity["attrib"] - - missing_cols = [] - check_cols = ["fps", "frameStart", "frameEnd", - "handleStart", "handleEnd"] - - for col in check_cols: - if col not in folder_attributes: - missing_cols.append(col) - - if len(missing_cols) > 0: - missing = ", ".join(missing_cols) - msg = "'{}' are not set for folder '{}'!".format( - missing, self._folder_path) - log.warning(msg) - nuke.message(msg) - return - - # get handles values - handle_start = folder_attributes["handleStart"] - handle_end = folder_attributes["handleEnd"] - frame_start = folder_attributes["frameStart"] - frame_end = folder_attributes["frameEnd"] - - fps = float(folder_attributes["fps"]) - frame_start_handle = frame_start - handle_start - frame_end_handle = frame_end + handle_end - - self._root_node["lock_range"].setValue(False) - self._root_node["fps"].setValue(fps) - self._root_node["first_frame"].setValue(frame_start_handle) - self._root_node["last_frame"].setValue(frame_end_handle) - self._root_node["lock_range"].setValue(True) - - # update node graph so knobs are updated - update_node_graph() - - frame_range = '{0}-{1}'.format(frame_start, frame_end) - - for node in nuke.allNodes(filter="Viewer"): - node['frame_range'].setValue(frame_range) - node['frame_range_lock'].setValue(True) - node['frame_range'].setValue(frame_range) - node['frame_range_lock'].setValue(True) - - if not ASSIST: - set_node_data( - self._root_node, - INSTANCE_DATA_KNOB, - { - "handleStart": int(handle_start), - "handleEnd": int(handle_end) - } - ) - else: - log.warning( - "NukeAssist mode is not allowing " - "updating custom knobs..." - ) - - def reset_resolution(self): - """Set resolution to project resolution.""" - log.info("Resetting resolution") - project_name = get_current_project_name() - folder_attributes = self._folder_entity["attrib"] - - format_data = { - "width": folder_attributes["resolutionWidth"], - "height": folder_attributes["resolutionHeight"], - "pixel_aspect": folder_attributes["pixelAspect"], - "name": project_name - } - - if any(x_ for x_ in format_data.values() if x_ is None): - msg = ("Missing set shot attributes in DB." - "\nContact your supervisor!." - "\n\nWidth: `{width}`" - "\nHeight: `{height}`" - "\nPixel Aspect: `{pixel_aspect}`").format(**format_data) - log.error(msg) - nuke.message(msg) - - existing_format = None - for format in nuke.formats(): - if format_data["name"] == format.name(): - existing_format = format - break - - if existing_format: - # Enforce existing format to be correct. - existing_format.setWidth(format_data["width"]) - existing_format.setHeight(format_data["height"]) - existing_format.setPixelAspect(format_data["pixel_aspect"]) - else: - format_string = self.make_format_string(**format_data) - log.info("Creating new format: {}".format(format_string)) - nuke.addFormat(format_string) - - nuke.root()["format"].setValue(format_data["name"]) - log.info("Format is set.") - - # update node graph so knobs are updated - update_node_graph() - - def make_format_string(self, **kwargs): - if kwargs.get("r"): - return ( - "{width} " - "{height} " - "{x} " - "{y} " - "{r} " - "{t} " - "{pixel_aspect:.2f} " - "{name}".format(**kwargs) - ) - else: - return ( - "{width} " - "{height} " - "{pixel_aspect:.2f} " - "{name}".format(**kwargs) - ) - - def set_context_settings(self): - # replace reset resolution from avalon core to pype's - self.reset_resolution() - # replace reset resolution from avalon core to pype's - self.reset_frame_range_handles() - # add colorspace menu item - self.set_colorspace() - - def set_favorites(self): - from .utils import set_context_favorites - - work_dir = os.getenv("AYON_WORKDIR") - # TODO validate functionality - # - does expect the structure is '{root}/{project}/{folder}' - # - this used asset name expecting it is unique in project - folder_path = get_current_folder_path() - folder_name = folder_path.split("/")[-1] - favorite_items = OrderedDict() - - # project - # get project's root and split to parts - projects_root = os.path.normpath(work_dir.split( - Context.project_name)[0]) - # add project name - project_dir = os.path.join(projects_root, Context.project_name) + "/" - # add to favorites - favorite_items.update({"Project dir": project_dir.replace("\\", "/")}) - - # folder - folder_root = os.path.normpath(work_dir.split( - folder_name)[0]) - # add folder name - folder_dir = os.path.join(folder_root, folder_name) + "/" - # add to favorites - favorite_items.update({"Shot dir": folder_dir.replace("\\", "/")}) - - # workdir - favorite_items.update({"Work dir": work_dir.replace("\\", "/")}) - - set_context_favorites(favorite_items) - - -def get_write_node_template_attr(node): - """ Gets all defined data from presets - """ - - # TODO: add identifiers to settings and rename settings key - plugin_names_mapping = { - "create_write_image": "CreateWriteImage", - "create_write_prerender": "CreateWritePrerender", - "create_write_render": "CreateWriteRender" - } - # get avalon data from node - node_data = get_node_data(node, INSTANCE_DATA_KNOB) - identifier = node_data["creator_identifier"] - - # return template data - product_name = node_data.get("productName") - if product_name is None: - product_name = node_data["subset"] - return get_imageio_node_setting( - node_class="Write", - plugin_name=plugin_names_mapping[identifier], - product_name=product_name - ) - - -def get_dependent_nodes(nodes): - """Get all dependent nodes connected to the list of nodes. - - Looking for connections outside of the nodes in incoming argument. - - Arguments: - nodes (list): list of nuke.Node objects - - Returns: - connections_in: dictionary of nodes and its dependencies - connections_out: dictionary of nodes and its dependency - """ - - connections_in = dict() - connections_out = dict() - node_names = [n.name() for n in nodes] - for node in nodes: - inputs = node.dependencies() - outputs = node.dependent() - # collect all inputs outside - test_in = [(i, n) for i, n in enumerate(inputs) - if n.name() not in node_names] - if test_in: - connections_in.update({ - node: test_in - }) - # collect all outputs outside - test_out = [i for i in outputs if i.name() not in node_names] - if test_out: - # only one dependent node is allowed - connections_out.update({ - node: test_out[-1] - }) - - return connections_in, connections_out - - -def update_node_graph(): - # Resetting frame will update knob values - try: - root_node_lock = nuke.root()["lock_range"].value() - nuke.root()["lock_range"].setValue(not root_node_lock) - nuke.root()["lock_range"].setValue(root_node_lock) - - current_frame = nuke.frame() - nuke.frame(1) - nuke.frame(int(current_frame)) - except Exception as error: - log.warning(error) - - -def find_free_space_to_paste_nodes( - nodes, - group=nuke.root(), - direction="right", - offset=300 -): - """ - For getting coordinates in DAG (node graph) for placing new nodes - - Arguments: - nodes (list): list of nuke.Node objects - group (nuke.Node) [optional]: object in which context it is - direction (str) [optional]: where we want it to be placed - [left, right, top, bottom] - offset (int) [optional]: what offset it is from rest of nodes - - Returns: - xpos (int): x coordinace in DAG - ypos (int): y coordinace in DAG - """ - if len(nodes) == 0: - return 0, 0 - - group_xpos = list() - group_ypos = list() - - # get local coordinates of all nodes - nodes_xpos = [n.xpos() for n in nodes] + \ - [n.xpos() + n.screenWidth() for n in nodes] - - nodes_ypos = [n.ypos() for n in nodes] + \ - [n.ypos() + n.screenHeight() for n in nodes] - - # get complete screen size of all nodes to be placed in - nodes_screen_width = max(nodes_xpos) - min(nodes_xpos) - nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos) - - # get screen size (r,l,t,b) of all nodes in `group` - with group: - group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \ - [n.xpos() + n.screenWidth() for n in nuke.allNodes() - if n not in nodes] - group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \ - [n.ypos() + n.screenHeight() for n in nuke.allNodes() - if n not in nodes] - - # calc output left - if direction in "left": - xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset) - ypos = min(group_ypos) - return xpos, ypos - # calc output right - if direction in "right": - xpos = max(group_xpos) + abs(offset) - ypos = min(group_ypos) - return xpos, ypos - # calc output top - if direction in "top": - xpos = min(group_xpos) - ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset) - return xpos, ypos - # calc output bottom - if direction in "bottom": - xpos = min(group_xpos) - ypos = max(group_ypos) + abs(offset) - return xpos, ypos - - -@contextlib.contextmanager -def maintained_selection(exclude_nodes=None): - """Maintain selection during context - - Maintain selection during context and unselect - all nodes after context is done. - - Arguments: - exclude_nodes (list[nuke.Node]): list of nodes to be unselected - before context is done - - Example: - >>> with maintained_selection(): - ... node["selected"].setValue(True) - >>> print(node["selected"].value()) - False - """ - if exclude_nodes: - for node in exclude_nodes: - node["selected"].setValue(False) - - previous_selection = nuke.selectedNodes() - - try: - yield - finally: - # unselect all selection in case there is some - reset_selection() - - # and select all previously selected nodes - if previous_selection: - select_nodes(previous_selection) - - -@contextlib.contextmanager -def swap_node_with_dependency(old_node, new_node): - """ Swap node with dependency - - Swap node with dependency and reconnect all inputs and outputs. - It removes old node. - - Arguments: - old_node (nuke.Node): node to be replaced - new_node (nuke.Node): node to replace with - - Example: - >>> old_node_name = old_node["name"].value() - >>> print(old_node_name) - old_node_name_01 - >>> with swap_node_with_dependency(old_node, new_node) as node_name: - ... new_node["name"].setValue(node_name) - >>> print(new_node["name"].value()) - old_node_name_01 - """ - # preserve position - xpos, ypos = old_node.xpos(), old_node.ypos() - # preserve selection after all is done - outputs = get_node_outputs(old_node) - inputs = old_node.dependencies() - node_name = old_node["name"].value() - - try: - nuke.delete(old_node) - - yield node_name - finally: - - # Reconnect inputs - for i, node in enumerate(inputs): - new_node.setInput(i, node) - # Reconnect outputs - if outputs: - for n, pipes in outputs.items(): - for i in pipes: - n.setInput(i, new_node) - # return to original position - new_node.setXYpos(xpos, ypos) - - -def reset_selection(): - """Deselect all selected nodes""" - for node in nuke.selectedNodes(): - node["selected"].setValue(False) - - -def select_nodes(nodes): - """Selects all inputted nodes - - Arguments: - nodes (Union[list, tuple, set]): nuke nodes to be selected - """ - assert isinstance(nodes, (list, tuple, set)), \ - "nodes has to be list, tuple or set" - - for node in nodes: - node["selected"].setValue(True) - - -def launch_workfiles_app(): - """Show workfiles tool on nuke launch. - - Trigger to show workfiles tool on application launch. Can be executed only - once all other calls are ignored. - - Workfiles tool show is deferred after application initialization using - QTimer. - """ - - if Context.workfiles_launched: - return - - Context.workfiles_launched = True - - # get all important settings - open_at_start = env_value_to_bool( - env_key="AYON_WORKFILE_TOOL_ON_START", - default=None) - - # return if none is defined - if not open_at_start: - return - - # Show workfiles tool using timer - # - this will be probably triggered during initialization in that case - # the application is not be able to show uis so it must be - # deferred using timer - # - timer should be processed when initialization ends - # When applications starts to process events. - timer = QtCore.QTimer() - timer.timeout.connect(_launch_workfile_app) - timer.setInterval(100) - Context.workfiles_tool_timer = timer - timer.start() - - -def _launch_workfile_app(): - # Safeguard to not show window when application is still starting up - # or is already closing down. - closing_down = QtWidgets.QApplication.closingDown() - starting_up = QtWidgets.QApplication.startingUp() - - # Stop the timer if application finished start up of is closing down - if closing_down or not starting_up: - Context.workfiles_tool_timer.stop() - Context.workfiles_tool_timer = None - - # Skip if application is starting up or closing down - if starting_up or closing_down: - return - - # Make sure on top is enabled on first show so the window is not hidden - # under main nuke window - # - this happened on Centos 7 and it is because the focus of nuke - # changes to the main window after showing because of initialization - # which moves workfiles tool under it - host_tools.show_workfiles(parent=None, on_top=True) - - -@deprecated("ayon_nuke.api.lib.start_workfile_template_builder") -def process_workfile_builder(): - """ [DEPRECATED] Process workfile builder on nuke start - - This function is deprecated and will be removed in future versions. - Use settings for `project_settings/nuke/templated_workfile_build` which are - supported by api `start_workfile_template_builder()`. - """ - - # to avoid looping of the callback, remove it! - nuke.removeOnCreate(process_workfile_builder, nodeClass="Root") - - # get state from settings - project_settings = get_current_project_settings() - workfile_builder = project_settings["nuke"].get( - "workfile_builder", {}) - - # get settings - create_fv_on = workfile_builder.get("create_first_version") or None - builder_on = workfile_builder.get("builder_on_start") or None - - last_workfile_path = os.environ.get("AYON_LAST_WORKFILE") - - # generate first version in file not existing and feature is enabled - if create_fv_on and not os.path.exists(last_workfile_path): - # get custom template path if any - custom_template_path = get_current_context_custom_workfile_template( - project_settings=project_settings - ) - - # if custom template is defined - if custom_template_path: - log.info("Adding nodes from `{}`...".format( - custom_template_path - )) - try: - # import nodes into current script - nuke.nodePaste(custom_template_path) - except RuntimeError: - raise RuntimeError(( - "Template defined for project: {} is not working. " - "Talk to your manager for an advise").format( - custom_template_path)) - - # if builder at start is defined - if builder_on: - log.info("Building nodes from presets...") - # build nodes by defined presets - BuildWorkfile().process() - - log.info("Saving script as version `{}`...".format( - last_workfile_path - )) - # safe file as version - save_file(last_workfile_path) - return - - -def start_workfile_template_builder(): - from .workfile_template_builder import ( - build_workfile_template - ) - - # remove callback since it would be duplicating the workfile - nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root") - - # to avoid looping of the callback, remove it! - log.info("Starting workfile template builder...") - try: - build_workfile_template(workfile_creation_enabled=True) - except TemplateProfileNotFound: - log.warning("Template profile not found. Skipping...") - - -def add_scripts_menu(): - try: - from scriptsmenu import launchfornuke - except ImportError: - log.warning( - "Skipping studio.menu install, because " - "'scriptsmenu' module seems unavailable." - ) - return - - # load configuration of custom menu - project_name = get_current_project_name() - project_settings = get_project_settings(project_name) - config = project_settings["nuke"]["scriptsmenu"]["definition"] - _menu = project_settings["nuke"]["scriptsmenu"]["name"] - - if not config: - log.warning("Skipping studio menu, no definition found.") - return - - # run the launcher for Maya menu - studio_menu = launchfornuke.main(title=_menu.title()) - - # apply configuration - studio_menu.build_from_configuration(studio_menu, config) - - -def add_scripts_gizmo(): - - # load configuration of custom menu - project_name = get_current_project_name() - project_settings = get_project_settings(project_name) - platform_name = platform.system().lower() - - for gizmo_settings in project_settings["nuke"]["gizmo"]: - gizmo_list_definition = gizmo_settings["gizmo_definition"] - toolbar_name = gizmo_settings["toolbar_menu_name"] - # gizmo_toolbar_path = gizmo_settings["gizmo_toolbar_path"] - gizmo_source_dir = gizmo_settings.get( - "gizmo_source_dir", {}).get(platform_name) - toolbar_icon_path = gizmo_settings.get( - "toolbar_icon_path", {}).get(platform_name) - - if not gizmo_source_dir: - log.debug("Skipping studio gizmo `{}`, " - "no gizmo path found.".format(toolbar_name) - ) - return - - if not gizmo_list_definition: - log.debug("Skipping studio gizmo `{}`, " - "no definition found.".format(toolbar_name) - ) - return - - if toolbar_icon_path: - try: - toolbar_icon_path = toolbar_icon_path.format(**os.environ) - except KeyError as e: - log.error( - "This environment variable doesn't exist: {}".format(e) - ) - - existing_gizmo_path = [] - for source_dir in gizmo_source_dir: - try: - resolve_source_dir = source_dir.format(**os.environ) - except KeyError as e: - log.error( - "This environment variable doesn't exist: {}".format(e) - ) - continue - if not os.path.exists(resolve_source_dir): - log.warning( - "The source of gizmo `{}` does not exists".format( - resolve_source_dir - ) - ) - continue - existing_gizmo_path.append(resolve_source_dir) - - # run the launcher for Nuke toolbar - toolbar_menu = gizmo_menu.GizmoMenu( - title=toolbar_name, - icon=toolbar_icon_path - ) - - # apply configuration - toolbar_menu.add_gizmo_path(existing_gizmo_path) - toolbar_menu.build_from_configuration(gizmo_list_definition) - - -class NukeDirmap(HostDirmap): - def __init__(self, file_name, *args, **kwargs): - """ - Args: - file_name (str): full path of referenced file from workfiles - *args (tuple): Positional arguments for 'HostDirmap' class - **kwargs (dict): Keyword arguments for 'HostDirmap' class - """ - - self.file_name = file_name - super(NukeDirmap, self).__init__(*args, **kwargs) - - def on_enable_dirmap(self): - pass - - def dirmap_routine(self, source_path, destination_path): - source_path = source_path.lower().replace(os.sep, '/') - destination_path = destination_path.lower().replace(os.sep, '/') - if platform.system().lower() == "windows": - self.file_name = self.file_name.lower().replace( - source_path, destination_path) - else: - self.file_name = self.file_name.replace( - source_path, destination_path) - - -class DirmapCache: - """Caching class to get settings and sitesync easily and only once.""" - _project_name = None - _project_settings = None - _sitesync_addon_discovered = False - _sitesync_addon = None - _mapping = None - - @classmethod - def project_name(cls): - if cls._project_name is None: - cls._project_name = os.getenv("AYON_PROJECT_NAME") - return cls._project_name - - @classmethod - def project_settings(cls): - if cls._project_settings is None: - cls._project_settings = get_project_settings(cls.project_name()) - return cls._project_settings - - @classmethod - def sitesync_addon(cls): - if not cls._sitesync_addon_discovered: - cls._sitesync_addon_discovered = True - cls._sitesync_addon = AddonsManager().get("sitesync") - return cls._sitesync_addon - - @classmethod - def mapping(cls): - return cls._mapping - - @classmethod - def set_mapping(cls, mapping): - cls._mapping = mapping - - -def dirmap_file_name_filter(file_name): - """Nuke callback function with single full path argument. - - Checks project settings for potential mapping from source to dest. - """ - - dirmap_processor = NukeDirmap( - file_name, - "nuke", - DirmapCache.project_name(), - DirmapCache.project_settings(), - DirmapCache.sitesync_addon(), - ) - if not DirmapCache.mapping(): - DirmapCache.set_mapping(dirmap_processor.get_mappings()) - - dirmap_processor.process_dirmap(DirmapCache.mapping()) - if os.path.exists(dirmap_processor.file_name): - return dirmap_processor.file_name - return file_name - - -@contextlib.contextmanager -def node_tempfile(): - """Create a temp file where node is pasted during duplication. - - This is to avoid using clipboard for node duplication. - """ - - tmp_file = tempfile.NamedTemporaryFile( - mode="w", prefix="openpype_nuke_temp_", suffix=".nk", delete=False - ) - tmp_file.close() - node_tempfile_path = tmp_file.name - - try: - # Yield the path where node can be copied - yield node_tempfile_path - - finally: - # Remove the file at the end - os.remove(node_tempfile_path) - - -def duplicate_node(node): - reset_selection() - - # select required node for duplication - node.setSelected(True) - - with node_tempfile() as filepath: - # copy selected to temp filepath - nuke.nodeCopy(filepath) - - # reset selection - reset_selection() - - # paste node and selection is on it only - dupli_node = nuke.nodePaste(filepath) - - # reset selection - reset_selection() - - return dupli_node - - -def get_group_io_nodes(nodes): - """Get the input and the output of a group of nodes.""" - - if not nodes: - raise ValueError("there is no nodes in the list") - - input_node = None - output_node = None - - if len(nodes) == 1: - input_node = output_node = nodes[0] - - else: - for node in nodes: - if "Input" in node.name(): - input_node = node - - if "Output" in node.name(): - output_node = node - - if input_node is not None and output_node is not None: - break - - if input_node is None: - log.warning("No Input found") - - if output_node is None: - log.warning("No Output found") - - return input_node, output_node - - -def get_extreme_positions(nodes): - """Get the 4 numbers that represent the box of a group of nodes.""" - - if not nodes: - raise ValueError("there is no nodes in the list") - - nodes_xpos = [n.xpos() for n in nodes] + \ - [n.xpos() + n.screenWidth() for n in nodes] - - nodes_ypos = [n.ypos() for n in nodes] + \ - [n.ypos() + n.screenHeight() for n in nodes] - - min_x, min_y = (min(nodes_xpos), min(nodes_ypos)) - max_x, max_y = (max(nodes_xpos), max(nodes_ypos)) - return min_x, min_y, max_x, max_y - - -def refresh_node(node): - """Correct a bug caused by the multi-threading of nuke. - - Refresh the node to make sure that it takes the desired attributes. - """ - - x = node.xpos() - y = node.ypos() - nuke.autoplaceSnap(node) - node.setXYpos(x, y) - - -def refresh_nodes(nodes): - for node in nodes: - refresh_node(node) - - -def get_names_from_nodes(nodes): - """Get list of nodes names. - - Args: - nodes(List[nuke.Node]): List of nodes to convert into names. - - Returns: - List[str]: Name of passed nodes. - """ - - return [ - node.name() - for node in nodes - ] - - -def get_nodes_by_names(names): - """Get list of nuke nodes based on their names. - - Args: - names (List[str]): List of node names to be found. - - Returns: - List[nuke.Node]: List of nodes found by name. - """ - - return [ - nuke.toNode(name) - for name in names - ] - - -def get_viewer_config_from_string(input_string): - """Convert string to display and viewer string - - Args: - input_string (str): string with viewer - - Raises: - IndexError: if more then one slash in input string - IndexError: if missing closing bracket - - Returns: - tuple[str]: display, viewer - """ - display = None - viewer = input_string - # check if () or / or \ in name - if "/" in viewer: - split = viewer.split("/") - - # rise if more then one column - if len(split) > 2: - raise IndexError(( - "Viewer Input string is not correct. " - "more then two `/` slashes! {}" - ).format(input_string)) - - viewer = split[1] - display = split[0] - elif "(" in viewer: - pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" - result_ = re.findall(pattern, viewer) - try: - result_ = result_.pop() - display = str(result_[1]).rstrip() - viewer = str(result_[0]).rstrip() - except IndexError: - raise IndexError(( - "Viewer Input string is not correct. " - "Missing bracket! {}" - ).format(input_string)) - - return (display, viewer) - - -def create_viewer_profile_string(viewer, display=None, path_like=False): - """Convert viewer and display to string - - Args: - viewer (str): viewer name - display (Optional[str]): display name - path_like (Optional[bool]): if True, return path like string - - Returns: - str: viewer config string - """ - if not display: - return viewer - - if path_like: - return "{}/{}".format(display, viewer) - return "{} ({})".format(viewer, display) - - -def get_filenames_without_hash(filename, frame_start, frame_end): - """Get filenames without frame hash - i.e. "renderCompositingMain.baking.0001.exr" - - Args: - filename (str): filename with frame hash - frame_start (str): start of the frame - frame_end (str): end of the frame - - Returns: - list: filename per frame of the sequence - """ - filenames = [] - for frame in range(int(frame_start), (int(frame_end) + 1)): - if "#" in filename: - # use regex to convert #### to {:0>4} - def replace(match): - return "{{:0>{}}}".format(len(match.group())) - filename_without_hashes = re.sub("#+", replace, filename) - new_filename = filename_without_hashes.format(frame) - filenames.append(new_filename) - return filenames - - -def create_camera_node_by_version(): - """Function to create the camera with the latest node class - For Nuke version 14.0 or later, the Camera4 camera node class - would be used - For the version before, the Camera2 camera node class - would be used - Returns: - Node: camera node - """ - nuke_number_version = nuke.NUKE_VERSION_MAJOR - if nuke_number_version >= 14: - return nuke.createNode("Camera4") - else: - return nuke.createNode("Camera2") - - -def link_knobs(knobs, node, group_node): - """Link knobs from inside `group_node`""" - - missing_knobs = [] - for knob in knobs: - if knob in group_node.knobs(): - continue - - if knob not in node.knobs().keys(): - missing_knobs.append(knob) - - link = nuke.Link_Knob("") - link.makeLink(node.name(), knob) - link.setName(knob) - link.setFlag(0x1000) - group_node.addKnob(link) - - if missing_knobs: - raise ValueError( - "Write node exposed knobs missing:\n\n{}\n\nPlease review" - " project settings.".format("\n".join(missing_knobs)) - ) diff --git a/server_addon/nuke/client/ayon_nuke/api/pipeline.py b/server_addon/nuke/client/ayon_nuke/api/pipeline.py deleted file mode 100644 index 2ba430c272..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/pipeline.py +++ /dev/null @@ -1,641 +0,0 @@ -import nuke - -import os -import importlib -from collections import OrderedDict, defaultdict - -import pyblish.api - -from ayon_core.host import ( - HostBase, - IWorkfileHost, - ILoadHost, - IPublishHost -) -from ayon_core.settings import get_current_project_settings -from ayon_core.lib import register_event_callback, Logger -from ayon_core.pipeline import ( - register_loader_plugin_path, - register_creator_plugin_path, - register_inventory_action_path, - register_workfile_build_plugin_path, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, - AVALON_CONTAINER_ID, - get_current_folder_path, - get_current_task_name, - registered_host, -) -from ayon_core.pipeline.workfile import BuildWorkfile -from ayon_core.tools.utils import host_tools -from ayon_nuke import NUKE_ROOT_DIR -from ayon_core.tools.workfile_template_build import open_template_ui - -from .lib import ( - Context, - ROOT_DATA_KNOB, - INSTANCE_DATA_KNOB, - get_main_window, - WorkfileSettings, - start_workfile_template_builder, - launch_workfiles_app, - check_inventory_versions, - set_avalon_knob_data, - read_avalon_data, - on_script_load, - dirmap_file_name_filter, - add_scripts_menu, - add_scripts_gizmo, - get_node_data, - set_node_data, - MENU_LABEL, -) -from .workfile_template_builder import ( - build_workfile_template, - create_placeholder, - update_placeholder, - NukeTemplateBuilder, -) -from .workio import ( - open_file, - save_file, - file_extensions, - has_unsaved_changes, - work_root, - current_file -) -from .constants import ASSIST -from . import push_to_project - -log = Logger.get_logger(__name__) - -PLUGINS_DIR = os.path.join(NUKE_ROOT_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build") - -# registering pyblish gui regarding settings in presets -if os.getenv("PYBLISH_GUI", None): - pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) - - -class NukeHost( - HostBase, IWorkfileHost, ILoadHost, IPublishHost -): - name = "nuke" - - def open_workfile(self, filepath): - return open_file(filepath) - - def save_workfile(self, filepath=None): - return save_file(filepath) - - def work_root(self, session): - return work_root(session) - - def get_current_workfile(self): - return current_file() - - def workfile_has_unsaved_changes(self): - return has_unsaved_changes() - - def get_workfile_extensions(self): - return file_extensions() - - def get_containers(self): - return ls() - - def install(self): - """Installing all requirements for Nuke host""" - - pyblish.api.register_host("nuke") - - self.log.info("Registering Nuke plug-ins..") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) - register_inventory_action_path(INVENTORY_PATH) - register_workfile_build_plugin_path(WORKFILE_BUILD_PATH) - - # Register AYON event for workfiles loading. - register_event_callback("workio.open_file", check_inventory_versions) - register_event_callback("taskChanged", change_context_label) - - _install_menu() - - # add script menu - add_scripts_menu() - add_scripts_gizmo() - - add_nuke_callbacks() - - launch_workfiles_app() - - def get_context_data(self): - root_node = nuke.root() - return get_node_data(root_node, ROOT_DATA_KNOB) - - def update_context_data(self, data, changes): - root_node = nuke.root() - set_node_data(root_node, ROOT_DATA_KNOB, data) - - -def add_nuke_callbacks(): - """ Adding all available nuke callbacks - """ - nuke_settings = get_current_project_settings()["nuke"] - workfile_settings = WorkfileSettings() - - # Set context settings. - nuke.addOnCreate( - workfile_settings.set_context_settings, nodeClass="Root") - - # adding favorites to file browser - nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - - # template builder callbacks - nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root") - - # fix ffmpeg settings on script - nuke.addOnScriptLoad(on_script_load) - - # set checker for last versions on loaded containers - nuke.addOnScriptLoad(check_inventory_versions) - nuke.addOnScriptSave(check_inventory_versions) - - # set apply all workfile settings on script load and save - nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) - - if nuke_settings["dirmap"]["enabled"]: - log.info("Added Nuke's dir-mapping callback ...") - # Add dirmap for file paths. - nuke.addFilenameFilter(dirmap_file_name_filter) - - log.info("Added Nuke callbacks ...") - - -def reload_config(): - """Attempt to reload pipeline at run-time. - - CAUTION: This is primarily for development and debugging purposes. - - """ - - for module in ( - "ayon_nuke.api.actions", - "ayon_nuke.api.menu", - "ayon_nuke.api.plugin", - "ayon_nuke.api.lib", - ): - log.info("Reloading module: {}...".format(module)) - - module = importlib.import_module(module) - - try: - importlib.reload(module) - except AttributeError as e: - from importlib import reload - log.warning("Cannot reload module: {}".format(e)) - reload(module) - - -def _show_workfiles(): - # Make sure parent is not set - # - this makes Workfiles tool as separated window which - # avoid issues with reopening - # - it is possible to explicitly change on top flag of the tool - host_tools.show_workfiles(parent=None, on_top=False) - - -def get_context_label(): - return "{0}, {1}".format( - get_current_folder_path(), - get_current_task_name() - ) - - -def _install_menu(): - """Install AYON menu into Nuke's main menu bar.""" - - # uninstall original AYON menu - main_window = get_main_window() - menubar = nuke.menu("Nuke") - menu = menubar.addMenu(MENU_LABEL) - - if not ASSIST: - label = get_context_label() - context_action_item = menu.addCommand("Context") - context_action_item.setEnabled(False) - - Context.context_action_item = context_action_item - - context_action = context_action_item.action() - context_action.setText(label) - - # add separator after context label - menu.addSeparator() - - menu.addCommand( - "Work Files...", - _show_workfiles - ) - - menu.addSeparator() - if not ASSIST: - # only add parent if nuke version is 14 or higher - # known issue with no solution yet - menu.addCommand( - "Create...", - lambda: host_tools.show_publisher( - parent=main_window, - tab="create" - ) - ) - # only add parent if nuke version is 14 or higher - # known issue with no solution yet - menu.addCommand( - "Publish...", - lambda: host_tools.show_publisher( - parent=main_window, - tab="publish" - ) - ) - - menu.addCommand( - "Load...", - lambda: host_tools.show_loader( - parent=main_window, - use_context=True - ) - ) - menu.addCommand( - "Manage...", - lambda: host_tools.show_scene_inventory(parent=main_window) - ) - menu.addSeparator() - menu.addCommand( - "Library...", - lambda: host_tools.show_library_loader( - parent=main_window - ) - ) - menu.addSeparator() - menu.addCommand( - "Set Resolution", - lambda: WorkfileSettings().reset_resolution() - ) - menu.addCommand( - "Set Frame Range", - lambda: WorkfileSettings().reset_frame_range_handles() - ) - menu.addCommand( - "Set Colorspace", - lambda: WorkfileSettings().set_colorspace() - ) - menu.addCommand( - "Apply All Settings", - lambda: WorkfileSettings().set_context_settings() - ) - - menu.addSeparator() - menu.addCommand( - "Build Workfile", - lambda: BuildWorkfile().process() - ) - - menu_template = menu.addMenu("Template Builder") - menu_template.addCommand( - "Build Workfile from template", - lambda: build_workfile_template() - ) - - if not ASSIST: - menu_template.addSeparator() - menu_template.addCommand( - "Open template", - lambda: open_template_ui( - NukeTemplateBuilder(registered_host()), get_main_window() - ) - ) - menu_template.addCommand( - "Create Place Holder", - lambda: create_placeholder() - ) - menu_template.addCommand( - "Update Place Holder", - lambda: update_placeholder() - ) - - menu.addCommand( - "Push to Project", - lambda: push_to_project.main() - ) - - menu.addSeparator() - menu.addCommand( - "Experimental tools...", - lambda: host_tools.show_experimental_tools_dialog(parent=main_window) - ) - menu.addSeparator() - # add reload pipeline only in debug mode - if bool(os.getenv("NUKE_DEBUG")): - menu.addSeparator() - menu.addCommand("Reload Pipeline", reload_config) - - # adding shortcuts - add_shortcuts_from_presets() - - -def change_context_label(): - if ASSIST: - return - - context_action_item = Context.context_action_item - if context_action_item is None: - return - context_action = context_action_item.action() - - old_label = context_action.text() - new_label = get_context_label() - - context_action.setText(new_label) - - log.info("Task label changed from `{}` to `{}`".format( - old_label, new_label)) - - -def add_shortcuts_from_presets(): - menubar = nuke.menu("Nuke") - nuke_presets = get_current_project_settings()["nuke"]["general"] - - if nuke_presets.get("menu"): - menu_label_mapping = { - "create": "Create...", - "manage": "Manage...", - "load": "Load...", - "build_workfile": "Build Workfile", - "publish": "Publish..." - } - - for command_name, shortcut_str in nuke_presets.get("menu").items(): - log.info("menu_name `{}` | menu_label `{}`".format( - command_name, MENU_LABEL - )) - log.info("Adding Shortcut `{}` to `{}`".format( - shortcut_str, command_name - )) - try: - menu = menubar.findItem(MENU_LABEL) - item_label = menu_label_mapping[command_name] - menuitem = menu.findItem(item_label) - menuitem.setShortcut(shortcut_str) - except (AttributeError, KeyError) as e: - log.error(e) - - -def containerise(node, - name, - namespace, - context, - loader=None, - data=None): - """Bundle `node` into an assembly and imprint it with metadata - - Containerisation enables a tracking of version, author and origin - for loaded assets. - - Arguments: - node (nuke.Node): Nuke's node object to imprint as container - name (str): Name of resulting assembly - namespace (str): Namespace under which to host container - context (dict): Asset information - loader (str, optional): Name of node used to produce this container. - - Returns: - node (nuke.Node): containerised nuke's node object - - """ - data = OrderedDict( - [ - ("schema", "openpype:container-2.0"), - ("id", AVALON_CONTAINER_ID), - ("name", name), - ("namespace", namespace), - ("loader", str(loader)), - ("representation", context["representation"]["id"]), - ], - - **data or dict() - ) - - set_avalon_knob_data(node, data) - - # set tab to first native - node.setTab(0) - - return node - - -def parse_container(node): - """Returns containerised data of a node - - Reads the imprinted data from `containerise`. - - Arguments: - node (nuke.Node): Nuke's node object to read imprinted data - - Returns: - dict: The container schema data for this container node. - - """ - data = read_avalon_data(node) - - # If not all required data return the empty container - required = ["schema", "id", "name", - "namespace", "loader", "representation"] - if not all(key in data for key in required): - return - - # Store the node's name - data.update({ - "objectName": node.fullName(), - "node": node, - }) - - return data - - -def update_container(node, keys=None): - """Returns node with updateted containder data - - Arguments: - node (nuke.Node): The node in Nuke to imprint as container, - keys (dict, optional): data which should be updated - - Returns: - node (nuke.Node): nuke node with updated container data - - Raises: - TypeError on given an invalid container node - - """ - keys = keys or dict() - - container = parse_container(node) - if not container: - raise TypeError("Not a valid container node.") - - container.update(keys) - node = set_avalon_knob_data(node, container) - - return node - - -def ls(): - """List available containers. - - This function is used by the Container Manager in Nuke. You'll - need to implement a for-loop that then *yields* one Container at - a time. - """ - all_nodes = nuke.allNodes(recurseGroups=False) - - nodes = [n for n in all_nodes] - - for n in nodes: - container = parse_container(n) - if container: - yield container - - -def list_instances(creator_id=None): - """List all created instances to publish from current workfile. - - For SubsetManager - - Args: - creator_id (Optional[str]): creator identifier - - Returns: - (list) of dictionaries matching instances format - """ - instances_by_order = defaultdict(list) - product_instances = [] - instance_ids = set() - - for node in nuke.allNodes(recurseGroups=True): - - if node.Class() in ["Viewer", "Dot"]: - continue - - try: - if node["disable"].value(): - continue - except NameError: - # pass if disable knob doesn't exist - pass - - # get data from avalon knob - instance_data = get_node_data( - node, INSTANCE_DATA_KNOB) - - if not instance_data: - continue - - if instance_data["id"] not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - if creator_id and instance_data["creator_identifier"] != creator_id: - continue - - instance_id = instance_data.get("instance_id") - if not instance_id: - pass - elif instance_id in instance_ids: - instance_data.pop("instance_id") - else: - instance_ids.add(instance_id) - - # node name could change, so update product name data - _update_product_name_data(instance_data, node) - - if "render_order" not in node.knobs(): - product_instances.append((node, instance_data)) - continue - - order = int(node["render_order"].value()) - instances_by_order[order].append((node, instance_data)) - - # Sort instances based on order attribute or product name. - # TODO: remove in future Publisher enhanced with sorting - ordered_instances = [] - for key in sorted(instances_by_order.keys()): - instances_by_product = defaultdict(list) - for node, data_ in instances_by_order[key]: - product_name = data_.get("productName") - if product_name is None: - product_name = data_.get("subset") - instances_by_product[product_name].append((node, data_)) - for subkey in sorted(instances_by_product.keys()): - ordered_instances.extend(instances_by_product[subkey]) - - instances_by_product = defaultdict(list) - for node, data_ in product_instances: - product_name = data_.get("productName") - if product_name is None: - product_name = data_.get("subset") - instances_by_product[product_name].append((node, data_)) - for key in sorted(instances_by_product.keys()): - ordered_instances.extend(instances_by_product[key]) - - return ordered_instances - - -def _update_product_name_data(instance_data, node): - """Update product name data in instance data. - - Args: - instance_data (dict): instance creator data - node (nuke.Node): nuke node - """ - # make sure node name is product name - old_product_name = instance_data.get("productName") - if old_product_name is None: - old_product_name = instance_data.get("subset") - old_variant = instance_data["variant"] - product_name_root = old_product_name.replace(old_variant, "") - - new_product_name = node.name() - new_variant = new_product_name.replace(product_name_root, "") - - instance_data["productName"] = new_product_name - instance_data["variant"] = new_variant - - -def remove_instance(instance): - """Remove instance from current workfile metadata. - - For SubsetManager - - Args: - instance (dict): instance representation from subsetmanager model - """ - instance_node = instance.transient_data["node"] - instance_knob = instance_node.knobs()[INSTANCE_DATA_KNOB] - instance_node.removeKnob(instance_knob) - nuke.delete(instance_node) - - -def select_instance(instance): - """ - Select instance in Node View - - Args: - instance (dict): instance representation from subsetmanager model - """ - instance_node = instance.transient_data["node"] - instance_node["selected"].setValue(True) diff --git a/server_addon/nuke/client/ayon_nuke/api/plugin.py b/server_addon/nuke/client/ayon_nuke/api/plugin.py deleted file mode 100644 index fc30f328c7..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/plugin.py +++ /dev/null @@ -1,1227 +0,0 @@ -import nuke -import re -import os -import sys -import six -import random -import string -from collections import defaultdict - -from ayon_core.settings import get_current_project_settings -from ayon_core.lib import ( - BoolDef, - EnumDef -) -from ayon_core.lib import StringTemplate -from ayon_core.pipeline import ( - LoaderPlugin, - CreatorError, - Creator as NewCreator, - CreatedInstance, - get_current_task_name, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, -) -from ayon_core.pipeline.colorspace import ( - get_display_view_colorspace_name, - get_colorspace_settings_from_publish_context, - set_colorspace_data_to_representation -) -from ayon_core.lib.transcoding import ( - VIDEO_EXTENSIONS -) -from .lib import ( - INSTANCE_DATA_KNOB, - Knobby, - maintained_selection, - get_avalon_knob_data, - set_node_knobs_from_settings, - set_node_data, - get_node_data, - get_view_process_node, - get_filenames_without_hash, - link_knobs -) -from .pipeline import ( - list_instances, - remove_instance -) - - -def _collect_and_cache_nodes(creator): - key = "openpype.nuke.nodes" - if key not in creator.collection_shared_data: - instances_by_identifier = defaultdict(list) - for item in list_instances(): - _, instance_data = item - identifier = instance_data["creator_identifier"] - instances_by_identifier[identifier].append(item) - creator.collection_shared_data[key] = instances_by_identifier - return creator.collection_shared_data[key] - - -class NukeCreatorError(CreatorError): - pass - - -class NukeCreator(NewCreator): - selected_nodes = [] - - def pass_pre_attributes_to_instance( - self, - instance_data, - pre_create_data, - keys=None - ): - if not keys: - keys = pre_create_data.keys() - - creator_attrs = instance_data["creator_attributes"] = {} - for pass_key in keys: - creator_attrs[pass_key] = pre_create_data[pass_key] - - def check_existing_product(self, product_name): - """Make sure product name is unique. - - It search within all nodes recursively - and checks if product name is found in - any node having instance data knob. - - Arguments: - product_name (str): Product name - """ - - for node in nuke.allNodes(recurseGroups=True): - # make sure testing node is having instance knob - if INSTANCE_DATA_KNOB not in node.knobs().keys(): - continue - node_data = get_node_data(node, INSTANCE_DATA_KNOB) - - if not node_data: - # a node has no instance data - continue - - # test if product name is matching - if node_data.get("productType") == product_name: - raise NukeCreatorError( - ( - "A publish instance for '{}' already exists " - "in nodes! Please change the variant " - "name to ensure unique output." - ).format(product_name) - ) - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - """Create node representing instance. - - Arguments: - node_name (str): Name of the new node. - knobs (OrderedDict): node knobs name and values - parent (str): Name of the parent node. - node_type (str, optional): Nuke node Class. - - Returns: - nuke.Node: Newly created instance node. - - """ - node_type = node_type or "NoOp" - - node_knobs = knobs or {} - - # set parent node - parent_node = nuke.root() - if parent: - parent_node = nuke.toNode(parent) - - try: - with parent_node: - created_node = nuke.createNode(node_type) - created_node["name"].setValue(node_name) - - for key, values in node_knobs.items(): - if key in created_node.knobs(): - created_node["key"].setValue(values) - except Exception as _err: - raise NukeCreatorError("Creating have failed: {}".format(_err)) - - return created_node - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - else: - self.selected_nodes = [] - - def create(self, product_name, instance_data, pre_create_data): - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - try: - instance_node = self.create_instance_node( - product_name, - node_type=instance_data.pop("node_type", None) - ) - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - set_node_data( - instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) - - return instance - - except Exception as er: - six.reraise( - NukeCreatorError, - NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2]) - - def collect_instances(self): - cached_instances = _collect_and_cache_nodes(self) - attr_def_keys = { - attr_def.key - for attr_def in self.get_instance_attr_defs() - } - attr_def_keys.discard(None) - - for (node, data) in cached_instances[self.identifier]: - created_instance = CreatedInstance.from_existing( - data, self - ) - created_instance.transient_data["node"] = node - self._add_instance_to_context(created_instance) - - for key in ( - set(created_instance["creator_attributes"].keys()) - - attr_def_keys - ): - created_instance["creator_attributes"].pop(key) - - def update_instances(self, update_list): - for created_inst, changes in update_list: - instance_node = created_inst.transient_data["node"] - - # update instance node name if product name changed - if "productName" in changes.changed_keys: - instance_node["name"].setValue( - changes["productName"].new_value - ) - - # in case node is not existing anymore (user erased it manually) - try: - instance_node.fullName() - except ValueError: - self.remove_instances([created_inst]) - continue - - set_node_data( - instance_node, - INSTANCE_DATA_KNOB, - created_inst.data_to_store() - ) - - def remove_instances(self, instances): - for instance in instances: - remove_instance(instance) - self._remove_instance_from_context(instance) - - def get_pre_create_attr_defs(self): - return [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ) - ] - - def get_creator_settings(self, project_settings, settings_key=None): - if not settings_key: - settings_key = self.__class__.__name__ - return project_settings["nuke"]["create"][settings_key] - - -class NukeWriteCreator(NukeCreator): - """Add Publishable Write node""" - - identifier = "create_write" - label = "Create Write" - product_type = "write" - icon = "sign-out" - - def get_linked_knobs(self): - linked_knobs = [] - if "channels" in self.instance_attributes: - linked_knobs.append("channels") - if "ordered" in self.instance_attributes: - linked_knobs.append("render_order") - if "use_range_limit" in self.instance_attributes: - linked_knobs.extend(["___", "first", "last", "use_limit"]) - - return linked_knobs - - def integrate_links(self, node, outputs=True): - # skip if no selection - if not self.selected_node: - return - - # collect dependencies - input_nodes = [self.selected_node] - dependent_nodes = self.selected_node.dependent() if outputs else [] - - # relinking to collected connections - for i, input in enumerate(input_nodes): - node.setInput(i, input) - - # make it nicer in graph - node.autoplace() - - # relink also dependent nodes - for dep_nodes in dependent_nodes: - dep_nodes.setInput(0, node) - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - selected_nodes = nuke.selectedNodes() - if selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - elif len(selected_nodes) > 1: - NukeCreatorError("Creator error: Select only one camera node") - self.selected_node = selected_nodes[0] - else: - self.selected_node = None - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef("use_selection", label="Use selection"), - self._get_render_target_enum() - ] - return attr_defs - - def get_instance_attr_defs(self): - attr_defs = [ - self._get_render_target_enum(), - ] - # add reviewable attribute - if "reviewable" in self.instance_attributes: - attr_defs.append(self._get_reviewable_bool()) - - return attr_defs - - def _get_render_target_enum(self): - rendering_targets = { - "local": "Local machine rendering", - "frames": "Use existing frames" - } - if ("farm_rendering" in self.instance_attributes): - rendering_targets["frames_farm"] = "Use existing frames - farm" - rendering_targets["farm"] = "Farm rendering" - - return EnumDef( - "render_target", - items=rendering_targets, - label="Render target" - ) - - def _get_reviewable_bool(self): - return BoolDef( - "review", - default=True, - label="Review" - ) - - def create(self, product_name, instance_data, pre_create_data): - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - set_node_data( - instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) - - return instance - - except Exception as er: - six.reraise( - NukeCreatorError, - NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) - - def apply_settings(self, project_settings): - """Method called on initialization of plugin to apply settings.""" - - # plugin settings - plugin_settings = self.get_creator_settings(project_settings) - temp_rendering_path_template = ( - plugin_settings.get("temp_rendering_path_template") - or self.temp_rendering_path_template - ) - # TODO remove template key replacements - temp_rendering_path_template = ( - temp_rendering_path_template - .replace("{product[name]}", "{subset}") - .replace("{product[type]}", "{family}") - .replace("{task[name]}", "{task}") - .replace("{folder[name]}", "{asset}") - ) - # individual attributes - self.instance_attributes = plugin_settings.get( - "instance_attributes") or self.instance_attributes - self.prenodes = plugin_settings["prenodes"] - self.default_variants = plugin_settings.get( - "default_variants") or self.default_variants - self.temp_rendering_path_template = temp_rendering_path_template - - -def get_instance_group_node_childs(instance): - """Return list of instance group node children - - Args: - instance (pyblish.Instance): pyblish instance - - Returns: - list: [nuke.Node] - """ - node = instance.data["transientData"]["node"] - - if node.Class() != "Group": - return - - # collect child nodes - child_nodes = [] - # iterate all nodes - for node in nuke.allNodes(group=node): - # add contained nodes to instance's node list - child_nodes.append(node) - - return child_nodes - - -def get_colorspace_from_node(node): - # Add version data to instance - colorspace = node["colorspace"].value() - - # remove default part of the string - if "default (" in colorspace: - colorspace = re.sub(r"default.\(|\)", "", colorspace) - - return colorspace - - -def get_review_presets_config(): - settings = get_current_project_settings() - review_profiles = ( - settings["core"] - ["publish"] - ["ExtractReview"] - ["profiles"] - ) - - outputs = {} - for profile in review_profiles: - outputs.update(profile.get("outputs", {})) - - return [str(name) for name, _prop in outputs.items()] - - -class NukeLoader(LoaderPlugin): - container_id_knob = "containerId" - container_id = None - - def reset_container_id(self): - self.container_id = "".join(random.choice( - string.ascii_uppercase + string.digits) for _ in range(10)) - - def get_container_id(self, node): - id_knob = node.knobs().get(self.container_id_knob) - return id_knob.value() if id_knob else None - - def get_members(self, source): - """Return nodes that has same "containerId" as `source`""" - source_id = self.get_container_id(source) - return [node for node in nuke.allNodes(recurseGroups=True) - if self.get_container_id(node) == source_id - and node is not source] if source_id else [] - - def set_as_member(self, node): - source_id = self.get_container_id(node) - - if source_id: - node[self.container_id_knob].setValue(source_id) - else: - HIDEN_FLAG = 0x00040000 - _knob = Knobby( - "String_Knob", - self.container_id, - flags=[ - nuke.READ_ONLY, - HIDEN_FLAG - ]) - knob = _knob.create(self.container_id_knob) - node.addKnob(knob) - - def clear_members(self, parent_node): - parent_class = parent_node.Class() - members = self.get_members(parent_node) - - dependent_nodes = None - for node in members: - _depndc = [n for n in node.dependent() if n not in members] - if not _depndc: - continue - - dependent_nodes = _depndc - break - - for member in members: - if member.Class() == parent_class: - continue - self.log.info("removing node: `{}".format(member.name())) - nuke.delete(member) - - return dependent_nodes - - -class ExporterReview(object): - """ - Base class object for generating review data from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - data = None - publish_on_farm = False - - def __init__(self, - klass, - instance, - multiple_presets=True - ): - - self.log = klass.log - self.instance = instance - self.multiple_presets = multiple_presets - self.path_in = self.instance.data.get("path", None) - self.staging_dir = self.instance.data["stagingDir"] - self.collection = self.instance.data.get("collection", None) - self.data = {"representations": []} - - def get_file_info(self): - if self.collection: - # get path - self.fname = os.path.basename( - self.collection.format("{head}{padding}{tail}") - ) - self.fhead = self.collection.format("{head}") - - # get first and last frame - self.first_frame = min(self.collection.indexes) - self.last_frame = max(self.collection.indexes) - - # make sure slate frame is not included - frame_start_handle = self.instance.data["frameStartHandle"] - if frame_start_handle > self.first_frame: - self.first_frame = frame_start_handle - - else: - self.fname = os.path.basename(self.path_in) - self.fhead = os.path.splitext(self.fname)[0] + "." - self.first_frame = self.instance.data["frameStartHandle"] - self.last_frame = self.instance.data["frameEndHandle"] - - if "#" in self.fhead: - self.fhead = self.fhead.replace("#", "")[:-1] - - def get_representation_data( - self, - tags=None, - range=False, - custom_tags=None, - colorspace=None, - ): - """ Add representation data to self.data - - Args: - tags (list[str], optional): list of defined tags. - Defaults to None. - range (bool, optional): flag for adding ranges. - Defaults to False. - custom_tags (list[str], optional): user inputted custom tags. - Defaults to None. - colorspace (str, optional): colorspace name. - Defaults to None. - """ - add_tags = tags or [] - repre = { - "name": self.name, - "ext": self.ext, - "files": self.file, - "stagingDir": self.staging_dir, - "tags": [self.name.replace("_", "-")] + add_tags, - "data": { - # making sure that once intermediate file is published - # as representation, we will be able to then identify it - # from representation.data.isIntermediate - "isIntermediate": True - }, - } - - if custom_tags: - repre["custom_tags"] = custom_tags - - if range: - repre.update({ - "frameStart": self.first_frame, - "frameEnd": self.last_frame, - }) - if ".{}".format(self.ext) not in VIDEO_EXTENSIONS: - filenames = get_filenames_without_hash( - self.file, self.first_frame, self.last_frame) - repre["files"] = filenames - - if self.multiple_presets: - repre["outputName"] = self.name - - if self.publish_on_farm: - repre["tags"].append("publish_on_farm") - - # add colorspace data to representation - if colorspace: - set_colorspace_data_to_representation( - repre, - self.instance.context.data, - colorspace=colorspace, - log=self.log - ) - self.data["representations"].append(repre) - - def get_imageio_baking_profile(self): - from . import lib as opnlib - nuke_imageio = opnlib.get_nuke_imageio_settings() - - if nuke_imageio["baking_target"]["enabled"]: - return nuke_imageio["baking_target"] - else: - # viewer is having display and view keys only and it is - # display_view type - return { - "type": "display_view", - "display_view": nuke_imageio["viewer"], - } - - -class ExporterReviewLut(ExporterReview): - """ - Generator object for review lut from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - - """ - _temp_nodes = [] - - def __init__(self, - klass, - instance, - name=None, - ext=None, - cube_size=None, - lut_size=None, - lut_style=None, - multiple_presets=True): - # initialize parent class - super(ExporterReviewLut, self).__init__( - klass, instance, multiple_presets) - - # deal with now lut defined in viewer lut - if hasattr(klass, "viewer_lut_raw"): - self.viewer_lut_raw = klass.viewer_lut_raw - else: - self.viewer_lut_raw = False - - self.name = name or "baked_lut" - self.ext = ext or "cube" - self.cube_size = cube_size or 32 - self.lut_size = lut_size or 1024 - self.lut_style = lut_style or "linear" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def clean_nodes(self): - for node in self._temp_nodes: - nuke.delete(node) - self._temp_nodes = [] - self.log.info("Deleted nodes...") - - def generate_lut(self, **kwargs): - bake_viewer_process = kwargs["bake_viewer_process"] - bake_viewer_input_process_node = kwargs[ - "bake_viewer_input_process"] - - # ---------- start nodes creation - - # CMSTestPattern - cms_node = nuke.createNode("CMSTestPattern") - cms_node["cube_size"].setValue(self.cube_size) - # connect - self._temp_nodes.append(cms_node) - self.previous_node = cms_node - - if bake_viewer_process: - # Node View Process - if bake_viewer_input_process_node: - ipn = get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug( - "ViewProcess... `{}`".format(self._temp_nodes)) - - if not self.viewer_lut_raw: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug( - "OCIODisplay... `{}`".format(self._temp_nodes)) - - # GenerateLUT - gen_lut_node = nuke.createNode("GenerateLUT") - gen_lut_node["file"].setValue(self.path) - gen_lut_node["file_type"].setValue(".{}".format(self.ext)) - gen_lut_node["lut1d"].setValue(self.lut_size) - gen_lut_node["style1d"].setValue(self.lut_style) - # connect - gen_lut_node.setInput(0, self.previous_node) - self._temp_nodes.append(gen_lut_node) - # ---------- end nodes creation - - # Export lut file - nuke.execute( - gen_lut_node.name(), - int(self.first_frame), - int(self.first_frame)) - - self.log.info("Exported...") - - # ---------- generate representation data - self.get_representation_data() - - # ---------- Clean up - self.clean_nodes() - - return self.data - - -class ExporterReviewMov(ExporterReview): - """ - Metaclass for generating review mov files - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - _temp_nodes = {} - - def __init__(self, - klass, - instance, - name=None, - ext=None, - multiple_presets=True - ): - # initialize parent class - super(ExporterReviewMov, self).__init__( - klass, instance, multiple_presets) - # passing presets for nodes to self - self.nodes = klass.nodes if hasattr(klass, "nodes") else {} - - # deal with now lut defined in viewer lut - self.viewer_lut_raw = klass.viewer_lut_raw - self.write_colorspace = instance.data["colorspace"] - self.color_channels = instance.data["color_channels"] - self.formatting_data = instance.data["anatomyData"] - - self.name = name or "baked" - self.ext = ext or "mov" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - if ".{}".format(self.ext) in VIDEO_EXTENSIONS: - self.file = "{}{}.{}".format( - self.fhead, self.name, self.ext) - else: - # Output is image (or image sequence) - # When the file is an image it's possible it - # has extra information after the `fhead` that - # we want to preserve, e.g. like frame numbers - # or frames hashes like `####` - filename_no_ext = os.path.splitext( - os.path.basename(self.path_in))[0] - after_head = filename_no_ext[len(self.fhead):] - self.file = "{}{}.{}.{}".format( - self.fhead, self.name, after_head, self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def clean_nodes(self, node_name): - for node in self._temp_nodes[node_name]: - nuke.delete(node) - self._temp_nodes[node_name] = [] - self.log.info("Deleted nodes...") - - def render(self, render_node_name): - self.log.info("Rendering... ") - # Render Write node - nuke.execute( - render_node_name, - int(self.first_frame), - int(self.last_frame)) - - self.log.info("Rendered...") - - def save_file(self): - import shutil - with maintained_selection(): - self.log.info("Saving nodes as file... ") - # create nk path - path = f"{os.path.splitext(self.path)[0]}.nk" - # save file to the path - if not os.path.exists(os.path.dirname(path)): - os.makedirs(os.path.dirname(path)) - shutil.copyfile(self.instance.context.data["currentFile"], path) - - self.log.info("Nodes exported...") - return path - - def generate_mov(self, farm=False, delete=True, **kwargs): - # colorspace data - colorspace = self.write_colorspace - - # get colorspace settings - # get colorspace data from context - config_data, _ = get_colorspace_settings_from_publish_context( - self.instance.context.data) - - add_tags = [] - self.publish_on_farm = farm - read_raw = kwargs["read_raw"] - bake_viewer_process = kwargs["bake_viewer_process"] - bake_viewer_input_process_node = kwargs[ - "bake_viewer_input_process"] - - baking_colorspace = self.get_imageio_baking_profile() - - colorspace_override = kwargs["colorspace_override"] - if colorspace_override["enabled"]: - baking_colorspace = colorspace_override - - fps = self.instance.context.data["fps"] - - self.log.debug(f">> baking_view_profile `{baking_colorspace}`") - - add_custom_tags = kwargs.get("add_custom_tags", []) - - self.log.info(f"__ add_custom_tags: `{add_custom_tags}`") - - product_name = self.instance.data["productName"] - self._temp_nodes[product_name] = [] - - # Read node - r_node = nuke.createNode("Read") - r_node["file"].setValue(self.path_in) - r_node["first"].setValue(self.first_frame) - r_node["origfirst"].setValue(self.first_frame) - r_node["last"].setValue(self.last_frame) - r_node["origlast"].setValue(self.last_frame) - r_node["colorspace"].setValue(self.write_colorspace) - - # do not rely on defaults, set explicitly - # to be sure it is set correctly - r_node["frame_mode"].setValue("expression") - r_node["frame"].setValue("") - - if read_raw: - r_node["raw"].setValue(1) - - # connect to Read node - self._shift_to_previous_node_and_temp( - product_name, r_node, "Read... `{}`" - ) - - # add reformat node - reformat_nodes_config = kwargs["reformat_nodes_config"] - if reformat_nodes_config["enabled"]: - reposition_nodes = reformat_nodes_config["reposition_nodes"] - for reposition_node in reposition_nodes: - node_class = reposition_node["node_class"] - knobs = reposition_node["knobs"] - node = nuke.createNode(node_class) - set_node_knobs_from_settings(node, knobs) - - # connect in order - self._connect_to_above_nodes( - node, product_name, "Reposition node... `{}`" - ) - # append reformatted tag - add_tags.append("reformatted") - - # only create colorspace baking if toggled on - if bake_viewer_process: - if bake_viewer_input_process_node: - # View Process node - ipn = get_view_process_node() - if ipn is not None: - # connect to ViewProcess node - self._connect_to_above_nodes( - ipn, product_name, "ViewProcess... `{}`" - ) - - if not self.viewer_lut_raw: - # OCIODisplay - if baking_colorspace["type"] == "display_view": - display_view = baking_colorspace["display_view"] - - message = "OCIODisplay... '{}'" - node = nuke.createNode("OCIODisplay") - - # assign display and view - display = display_view["display"] - view = display_view["view"] - - # display could not be set in nuke_default config - if display: - # format display string with anatomy data - display = StringTemplate(display).format_strict( - self.formatting_data - ) - node["display"].setValue(display) - - # format view string with anatomy data - view = StringTemplate(view).format_strict( - self.formatting_data) - # assign viewer - node["view"].setValue(view) - - if config_data: - # convert display and view to colorspace - colorspace = get_display_view_colorspace_name( - config_path=config_data["path"], - display=display, view=view - ) - - # OCIOColorSpace - elif baking_colorspace["type"] == "colorspace": - baking_colorspace = baking_colorspace["colorspace"] - # format colorspace string with anatomy data - baking_colorspace = StringTemplate( - baking_colorspace).format_strict(self.formatting_data) - node = nuke.createNode("OCIOColorSpace") - message = "OCIOColorSpace... '{}'" - # no need to set input colorspace since it is driven by - # working colorspace - node["out_colorspace"].setValue(baking_colorspace) - colorspace = baking_colorspace - - else: - raise ValueError( - "Invalid baking color space type: " - f"{baking_colorspace['type']}" - ) - - self._connect_to_above_nodes( - node, product_name, message - ) - - # Write node - write_node = nuke.createNode("Write") - self.log.debug(f"Path: {self.path}") - - write_node["file"].setValue(str(self.path)) - write_node["file_type"].setValue(str(self.ext)) - write_node["channels"].setValue(str(self.color_channels)) - - # Knobs `meta_codec` and `mov64_codec` are not available on centos. - # TODO shouldn't this come from settings on outputs? - try: - write_node["meta_codec"].setValue("ap4h") - except Exception: - self.log.info("`meta_codec` knob was not found") - - try: - write_node["mov64_codec"].setValue("ap4h") - write_node["mov64_fps"].setValue(float(fps)) - except Exception: - self.log.info("`mov64_codec` knob was not found") - - try: - write_node["mov64_write_timecode"].setValue(1) - except Exception: - self.log.info("`mov64_write_timecode` knob was not found") - - write_node["raw"].setValue(1) - - # connect - write_node.setInput(0, self.previous_node) - self._temp_nodes[product_name].append(write_node) - self.log.debug(f"Write... `{self._temp_nodes[product_name]}`") - # ---------- end nodes creation - - # ---------- render or save to nk - if self.publish_on_farm: - nuke.scriptSave() - path_nk = self.save_file() - self.data.update({ - "bakeScriptPath": path_nk, - "bakeWriteNodeName": write_node.name(), - "bakeRenderPath": self.path - }) - else: - self.render(write_node.name()) - - # ---------- generate representation data - tags = ["review", "need_thumbnail"] - - if delete: - tags.append("delete") - - self.get_representation_data( - tags=tags + add_tags, - custom_tags=add_custom_tags, - range=True, - colorspace=colorspace, - ) - - self.log.debug(f"Representation... `{self.data}`") - - self.clean_nodes(product_name) - nuke.scriptSave() - - return self.data - - def _shift_to_previous_node_and_temp(self, product_name, node, message): - self._temp_nodes[product_name].append(node) - self.previous_node = node - self.log.debug(message.format(self._temp_nodes[product_name])) - - def _connect_to_above_nodes(self, node, product_name, message): - node.setInput(0, self.previous_node) - self._shift_to_previous_node_and_temp(product_name, node, message) - - -def convert_to_valid_instaces(): - """ Check and convert to latest publisher instances - - Also save as new minor version of workfile. - """ - def product_type_to_identifier(product_type): - mapping = { - "render": "create_write_render", - "prerender": "create_write_prerender", - "still": "create_write_image", - "model": "create_model", - "camera": "create_camera", - "nukenodes": "create_backdrop", - "gizmo": "create_gizmo", - "source": "create_source" - - } - return mapping[product_type] - - from ayon_nuke.api import workio - - task_name = get_current_task_name() - - # save into new workfile - current_file = workio.current_file() - - # add file suffex if not - if "_publisherConvert" not in current_file: - new_workfile = ( - current_file[:-3] - + "_publisherConvert" - + current_file[-3:] - ) - else: - new_workfile = current_file - - path = new_workfile.replace("\\", "/") - nuke.scriptSaveAs(new_workfile, overwrite=1) - nuke.Root()["name"].setValue(path) - nuke.Root()["project_directory"].setValue(os.path.dirname(path)) - nuke.Root().setModified(False) - - _remove_old_knobs(nuke.Root()) - - # loop all nodes and convert - for node in nuke.allNodes(recurseGroups=True): - transfer_data = { - "creator_attributes": {} - } - creator_attr = transfer_data["creator_attributes"] - - if node.Class() in ["Viewer", "Dot"]: - continue - - if get_node_data(node, INSTANCE_DATA_KNOB): - continue - - # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"]) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - transfer_data.update({ - k: v for k, v in avalon_knob_data.items() - if k not in ["families", "creator"] - }) - - transfer_data["task"] = task_name - - product_type = avalon_knob_data.get("productType") - if product_type is None: - product_type = avalon_knob_data["family"] - - # establish families - families_ak = avalon_knob_data.get("families", []) - - if "suspend_publish" in node.knobs(): - creator_attr["suspended_publish"] = ( - node["suspend_publish"].value()) - - # get review knob value - if "review" in node.knobs(): - creator_attr["review"] = ( - node["review"].value()) - - if "publish" in node.knobs(): - transfer_data["active"] = ( - node["publish"].value()) - - # add identifier - transfer_data["creator_identifier"] = product_type_to_identifier( - product_type - ) - - # Add all nodes in group instances. - if node.Class() == "Group": - # only alter families for render product type - if families_ak and "write" in families_ak.lower(): - target = node["render"].value() - if target == "Use existing frames": - creator_attr["render_target"] = "frames" - elif target == "Local": - # Local rendering - creator_attr["render_target"] = "local" - elif target == "On farm": - # Farm rendering - creator_attr["render_target"] = "farm" - - if "deadlinePriority" in node.knobs(): - transfer_data["farm_priority"] = ( - node["deadlinePriority"].value()) - if "deadlineChunkSize" in node.knobs(): - creator_attr["farm_chunk"] = ( - node["deadlineChunkSize"].value()) - if "deadlineConcurrentTasks" in node.knobs(): - creator_attr["farm_concurrency"] = ( - node["deadlineConcurrentTasks"].value()) - - _remove_old_knobs(node) - - # add new instance knob with transfer data - set_node_data( - node, INSTANCE_DATA_KNOB, transfer_data) - - nuke.scriptSave() - - -def _remove_old_knobs(node): - remove_knobs = [ - "review", "publish", "render", "suspend_publish", "warn", "divd", - "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", - "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" - ] - - # remove all old knobs - for knob in node.allKnobs(): - try: - if knob.name() in remove_knobs: - node.removeKnob(knob) - elif "avalon" in knob.name(): - node.removeKnob(knob) - except ValueError: - pass - - -def exposed_write_knobs(settings, plugin_name, instance_node): - exposed_knobs = settings["nuke"]["create"][plugin_name].get( - "exposed_knobs", [] - ) - if exposed_knobs: - instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs')) - write_node = nuke.allNodes(group=instance_node, filter="Write")[0] - link_knobs(exposed_knobs, write_node, instance_node) diff --git a/server_addon/nuke/client/ayon_nuke/api/push_to_project.py b/server_addon/nuke/client/ayon_nuke/api/push_to_project.py deleted file mode 100644 index 852e5d0e31..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/push_to_project.py +++ /dev/null @@ -1,118 +0,0 @@ -from collections import defaultdict -import shutil -import os - -from ayon_api import get_project, get_folder_by_id, get_task_by_id -from ayon_core.settings import get_project_settings -from ayon_core.pipeline import Anatomy, registered_host -from ayon_core.pipeline.template_data import get_template_data -from ayon_core.pipeline.workfile import get_workdir_with_workdir_data -from ayon_core.tools import context_dialog - -from .utils import bake_gizmos_recursively -from .lib import MENU_LABEL - -import nuke - - -def bake_container(container): - """Bake containers to read nodes.""" - - node = container["node"] - - # Fetch knobs to remove in order. - knobs_to_remove = [] - remove = False - for count in range(0, node.numKnobs()): - knob = node.knob(count) - - # All knobs from "AYON" tab knob onwards. - if knob.name() == MENU_LABEL: - remove = True - - if remove: - knobs_to_remove.append(knob) - - # Dont remove knobs from "containerId" onwards. - if knob.name() == "containerId": - remove = False - - # Knobs needs to be remove in reverse order, because child knobs needs to - # be remove first. - for knob in reversed(knobs_to_remove): - node.removeKnob(knob) - - node["tile_color"].setValue(0) - - -def main(): - context = context_dialog.ask_for_context() - - if context is None: - return - - # Get workfile path to save to. - project_name = context["project_name"] - project = get_project(project_name) - folder = get_folder_by_id(project_name, context["folder_id"]) - task = get_task_by_id(project_name, context["task_id"]) - host = registered_host() - project_settings = get_project_settings(project_name) - anatomy = Anatomy(project_name) - - workdir_data = get_template_data( - project, folder, task, host.name, project_settings - ) - - workdir = get_workdir_with_workdir_data( - workdir_data, - project_name, - anatomy, - project_settings=project_settings - ) - # Save current workfile. - current_file = host.current_file() - host.save_file(current_file) - - for container in host.ls(): - bake_container(container) - - # Bake gizmos. - bake_gizmos_recursively() - - # Copy all read node files to "resources" folder next to workfile and - # change file path. - first_frame = int(nuke.root()["first_frame"].value()) - last_frame = int(nuke.root()["last_frame"].value()) - files_by_node_name = defaultdict(set) - nodes_by_name = {} - for count in range(first_frame, last_frame + 1): - nuke.frame(count) - for node in nuke.allNodes(filter="Read"): - files_by_node_name[node.name()].add( - nuke.filename(node, nuke.REPLACE) - ) - nodes_by_name[node.name()] = node - - resources_dir = os.path.join(workdir, "resources") - for name, files in files_by_node_name.items(): - dir = os.path.join(resources_dir, name) - if not os.path.exists(dir): - os.makedirs(dir) - - for f in files: - shutil.copy(f, os.path.join(dir, os.path.basename(f))) - - node = nodes_by_name[name] - path = node["file"].value().replace(os.path.dirname(f), dir) - node["file"].setValue(path.replace("\\", "/")) - - # Save current workfile to new context. - pushed_workfile = os.path.join( - workdir, os.path.basename(current_file)) - host.save_file(pushed_workfile) - - # Open current context workfile. - host.open_file(current_file) - - nuke.message(f"Pushed to project: \n{pushed_workfile}") diff --git a/server_addon/nuke/client/ayon_nuke/api/utils.py b/server_addon/nuke/client/ayon_nuke/api/utils.py deleted file mode 100644 index 646bb0ece1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/utils.py +++ /dev/null @@ -1,224 +0,0 @@ -import os -import re - -import nuke - -import pyblish.util -import pyblish.api -from qtpy import QtWidgets - -from ayon_core import resources -from ayon_core.pipeline import registered_host -from ayon_core.tools.utils import show_message_dialog -from ayon_core.pipeline.create import CreateContext - - -def set_context_favorites(favorites=None): - """ Adding favorite folders to nuke's browser - - Arguments: - favorites (dict): couples of {name:path} - """ - favorites = favorites or {} - icon_path = resources.get_resource("icons", "folder-favorite.png") - for name, path in favorites.items(): - nuke.addFavoriteDir( - name, - path, - nuke.IMAGE | nuke.SCRIPT | nuke.GEO, - icon=icon_path) - - -def get_node_outputs(node): - ''' - Return a dictionary of the nodes and pipes that are connected to node - ''' - dep_dict = {} - dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS) - for d in dependencies: - dep_dict[d] = [] - for i in range(d.inputs()): - if d.input(i) == node: - dep_dict[d].append(i) - return dep_dict - - -def is_node_gizmo(node): - ''' - return True if node is gizmo - ''' - return 'gizmo_file' in node.knobs() - - -def gizmo_is_nuke_default(gizmo): - '''Check if gizmo is in default install path''' - plug_dir = os.path.join(os.path.dirname( - nuke.env['ExecutablePath']), 'plugins') - return gizmo.filename().startswith(plug_dir) - - -def bake_gizmos_recursively(in_group=None): - """Converting a gizmo to group - - Arguments: - is_group (nuke.Node)[optonal]: group node or all nodes - """ - from .lib import maintained_selection - if in_group is None: - in_group = nuke.Root() - # preserve selection after all is done - with maintained_selection(): - # jump to the group - with in_group: - for node in nuke.allNodes(): - if is_node_gizmo(node) and not gizmo_is_nuke_default(node): - with node: - outputs = get_node_outputs(node) - group = node.makeGroup() - # Reconnect inputs and outputs if any - if outputs: - for n, pipes in outputs.items(): - for i in pipes: - n.setInput(i, group) - for i in range(node.inputs()): - group.setInput(i, node.input(i)) - # set node position and name - group.setXYpos(node.xpos(), node.ypos()) - name = node.name() - nuke.delete(node) - group.setName(name) - node = group - - if node.Class() == "Group": - bake_gizmos_recursively(node) - - -def colorspace_exists_on_node(node, colorspace_name): - """ Check if colorspace exists on node - - Look through all options in the colorspace knob, and see if we have an - exact match to one of the items. - - Args: - node (nuke.Node): nuke node object - colorspace_name (str): color profile name - - Returns: - bool: True if exists - """ - try: - colorspace_knob = node['colorspace'] - except ValueError: - # knob is not available on input node - return False - - return colorspace_name in get_colorspace_list(colorspace_knob) - - -def get_colorspace_list(colorspace_knob): - """Get available colorspace profile names - - Args: - colorspace_knob (nuke.Knob): nuke knob object - - Returns: - list: list of strings names of profiles - """ - results = [] - - # This pattern is to match with roles which uses an indentation and - # parentheses with original colorspace. The value returned from the - # colorspace is the string before the indentation, so we'll need to - # convert the values to match with value returned from the knob, - # ei. knob.value(). - pattern = r".*\t.* \(.*\)" - for colorspace in nuke.getColorspaceList(colorspace_knob): - match = re.search(pattern, colorspace) - if match: - results.append(colorspace.split("\t", 1)[0]) - else: - results.append(colorspace) - - return results - - -def is_headless(): - """ - Returns: - bool: headless - """ - return QtWidgets.QApplication.instance() is None - - -def submit_render_on_farm(node): - # Ensure code is executed in root context. - if nuke.root() == nuke.thisNode(): - _submit_render_on_farm(node) - else: - # If not in root context, move to the root context and then execute the - # code. - with nuke.root(): - _submit_render_on_farm(node) - - -def _submit_render_on_farm(node): - """Render on farm submission - - This function prepares the context for farm submission, validates it, - extracts relevant data, copies the current workfile to a timestamped copy, - and submits the job to the farm. - - Args: - node (Node): The node for which the farm submission is being made. - """ - - host = registered_host() - create_context = CreateContext(host) - - # Ensure CreateInstance is enabled. - for instance in create_context.instances: - if node.name() != instance.transient_data["node"].name(): - continue - - instance.data["active"] = True - - context = pyblish.api.Context() - context.data["create_context"] = create_context - # Used in pyblish plugin to determine which instance to publish. - context.data["node_name"] = node.name() - # Used in pyblish plugins to determine whether to run or not. - context.data["render_on_farm"] = True - - # Since we need to bypass version validation and incrementing, we need to - # remove the plugins from the list that are responsible for these tasks. - plugins = pyblish.api.discover() - blacklist = ["IncrementScriptVersion", "ValidateVersion"] - plugins = [ - plugin - for plugin in plugins - if plugin.__name__ not in blacklist - ] - - context = pyblish.util.publish(context, plugins=plugins) - - error_message = "" - success = True - for result in context.data["results"]: - if result["success"]: - continue - - success = False - - err = result["error"] - error_message += "\n" - error_message += err.formatted_traceback - - if not success: - show_message_dialog( - "Publish Errors", error_message, level="critical" - ) - return - - show_message_dialog( - "Submission Successful", "Submission to the farm was successful." - ) diff --git a/server_addon/nuke/client/ayon_nuke/api/workfile_template_builder.py b/server_addon/nuke/client/ayon_nuke/api/workfile_template_builder.py deleted file mode 100644 index aebf91c4a4..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/workfile_template_builder.py +++ /dev/null @@ -1,156 +0,0 @@ -import collections -import nuke - -from ayon_core.pipeline import registered_host -from ayon_core.pipeline.workfile.workfile_template_builder import ( - AbstractTemplateBuilder, - PlaceholderPlugin, -) -from ayon_core.tools.workfile_template_build import ( - WorkfileBuildPlaceholderDialog, -) -from .lib import ( - imprint, - reset_selection, - get_main_window, - WorkfileSettings, -) - -PLACEHOLDER_SET = "PLACEHOLDERS_SET" - - -class NukeTemplateBuilder(AbstractTemplateBuilder): - """Concrete implementation of AbstractTemplateBuilder for nuke""" - - def import_template(self, path): - """Import template into current scene. - Block if a template is already loaded. - - Args: - path (str): A path to current template (usually given by - get_template_preset implementation) - - Returns: - bool: Whether the template was successfully imported or not - """ - - # TODO check if the template is already imported - - nuke.nodePaste(path) - reset_selection() - - return True - - -class NukePlaceholderPlugin(PlaceholderPlugin): - node_color = 4278190335 - - def _collect_scene_placeholders(self): - # Cache placeholder data to shared data - placeholder_nodes = self.builder.get_shared_populate_data( - "placeholder_nodes" - ) - if placeholder_nodes is None: - placeholder_nodes = {} - all_groups = collections.deque() - all_groups.append(nuke.thisGroup()) - while all_groups: - group = all_groups.popleft() - for node in group.nodes(): - if isinstance(node, nuke.Group): - all_groups.append(node) - - node_knobs = node.knobs() - if ( - "is_placeholder" not in node_knobs - or not node.knob("is_placeholder").value() - ): - continue - - if "empty" in node_knobs and node.knob("empty").value(): - continue - - placeholder_nodes[node.fullName()] = node - - self.builder.set_shared_populate_data( - "placeholder_nodes", placeholder_nodes - ) - return placeholder_nodes - - def create_placeholder(self, placeholder_data): - placeholder_data["plugin_identifier"] = self.identifier - - placeholder = nuke.nodes.NoOp() - placeholder.setName("PLACEHOLDER") - placeholder.knob("tile_color").setValue(self.node_color) - - imprint(placeholder, placeholder_data) - imprint(placeholder, {"is_placeholder": True}) - placeholder.knob("is_placeholder").setVisible(False) - - def update_placeholder(self, placeholder_item, placeholder_data): - node = nuke.toNode(placeholder_item.scene_identifier) - imprint(node, placeholder_data) - - def _parse_placeholder_node_data(self, node): - placeholder_data = {} - for key in self.get_placeholder_keys(): - knob = node.knob(key) - value = None - if knob is not None: - value = knob.getValue() - placeholder_data[key] = value - return placeholder_data - - def delete_placeholder(self, placeholder): - """Remove placeholder if building was successful""" - placeholder_node = nuke.toNode(placeholder.scene_identifier) - nuke.delete(placeholder_node) - - -def build_workfile_template(*args, **kwargs): - builder = NukeTemplateBuilder(registered_host()) - builder.build_template(*args, **kwargs) - - # set all settings to shot context default - WorkfileSettings().set_context_settings() - - -def update_workfile_template(*args): - builder = NukeTemplateBuilder(registered_host()) - builder.rebuild_template() - - -def create_placeholder(*args): - host = registered_host() - builder = NukeTemplateBuilder(host) - window = WorkfileBuildPlaceholderDialog(host, builder, - parent=get_main_window()) - window.show() - - -def update_placeholder(*args): - host = registered_host() - builder = NukeTemplateBuilder(host) - placeholder_items_by_id = { - placeholder_item.scene_identifier: placeholder_item - for placeholder_item in builder.get_placeholders() - } - placeholder_items = [] - for node in nuke.selectedNodes(): - node_name = node.fullName() - if node_name in placeholder_items_by_id: - placeholder_items.append(placeholder_items_by_id[node_name]) - - # TODO show UI at least - if len(placeholder_items) == 0: - raise ValueError("No node selected") - - if len(placeholder_items) > 1: - raise ValueError("Too many selected nodes") - - placeholder_item = placeholder_items[0] - window = WorkfileBuildPlaceholderDialog(host, builder, - parent=get_main_window()) - window.set_update_mode(placeholder_item) - window.exec_() diff --git a/server_addon/nuke/client/ayon_nuke/api/workio.py b/server_addon/nuke/client/ayon_nuke/api/workio.py deleted file mode 100644 index b2445fd3d2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/workio.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Host API required Work Files tool""" -import os -import nuke -import shutil -from .utils import is_headless - - -def file_extensions(): - return [".nk"] - - -def has_unsaved_changes(): - return nuke.root().modified() - - -def save_file(filepath): - path = filepath.replace("\\", "/") - nuke.scriptSaveAs(path, overwrite=1) - nuke.Root()["name"].setValue(path) - nuke.Root()["project_directory"].setValue(os.path.dirname(path)) - nuke.Root().setModified(False) - - -def open_file(filepath): - - def read_script(nuke_script): - nuke.scriptClear() - nuke.scriptReadFile(nuke_script) - nuke.Root()["name"].setValue(nuke_script) - nuke.Root()["project_directory"].setValue(os.path.dirname(nuke_script)) - nuke.Root().setModified(False) - - filepath = filepath.replace("\\", "/") - - # To remain in the same window, we have to clear the script and read - # in the contents of the workfile. - # Nuke Preferences can be read after the script is read. - read_script(filepath) - - if not is_headless(): - autosave = nuke.toNode("preferences")["AutoSaveName"].evaluate() - autosave_prmpt = "Autosave detected.\n" \ - "Would you like to load the autosave file?" # noqa - if os.path.isfile(autosave) and nuke.ask(autosave_prmpt): - try: - # Overwrite the filepath with autosave - shutil.copy(autosave, filepath) - # Now read the (auto-saved) script again - read_script(filepath) - except shutil.Error as err: - nuke.message( - "Detected autosave file could not be used.\n{}" - - .format(err)) - - return True - - -def current_file(): - current_file = nuke.root().name() - - # Unsaved current file - if current_file == 'Root': - return None - - return os.path.normpath(current_file).replace("\\", "/") - - -def work_root(session): - - work_dir = session["AYON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - path = os.path.join(work_dir, scene_dir) - else: - path = work_dir - - return os.path.normpath(path).replace("\\", "/") diff --git a/server_addon/nuke/client/ayon_nuke/hooks/pre_nukeassist_setup.py b/server_addon/nuke/client/ayon_nuke/hooks/pre_nukeassist_setup.py deleted file mode 100644 index afef3ba843..0000000000 --- a/server_addon/nuke/client/ayon_nuke/hooks/pre_nukeassist_setup.py +++ /dev/null @@ -1,12 +0,0 @@ -from ayon_applications import PreLaunchHook - - -class PrelaunchNukeAssistHook(PreLaunchHook): - """ - Adding flag when nukeassist - """ - app_groups = {"nukeassist"} - launch_types = set() - - def execute(self): - self.launch_context.env["NUKEASSIST"] = "1" diff --git a/server_addon/nuke/client/ayon_nuke/plugins/__init__.py b/server_addon/nuke/client/ayon_nuke/plugins/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/__init__.py b/server_addon/nuke/client/ayon_nuke/plugins/create/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/convert_legacy.py b/server_addon/nuke/client/ayon_nuke/plugins/create/convert_legacy.py deleted file mode 100644 index 65e719d15b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/convert_legacy.py +++ /dev/null @@ -1,55 +0,0 @@ -from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID -from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin -from ayon_nuke.api.lib import ( - INSTANCE_DATA_KNOB, - get_node_data, - get_avalon_knob_data, - NODE_TAB_NAME, -) -from ayon_nuke.api.plugin import convert_to_valid_instaces - -import nuke - - -class LegacyConverted(ProductConvertorPlugin): - identifier = "legacy.converter" - - def find_instances(self): - - legacy_found = False - # search for first available legacy item - for node in nuke.allNodes(recurseGroups=True): - if node.Class() in ["Viewer", "Dot"]: - continue - - if get_node_data(node, INSTANCE_DATA_KNOB): - continue - - if NODE_TAB_NAME not in node.knobs(): - continue - - # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"], create=False) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - # catch and break - legacy_found = True - break - - if legacy_found: - # if not item do not add legacy instance converter - self.add_convertor_item("Convert legacy instances") - - def convert(self): - # loop all instances and convert them - convert_to_valid_instaces() - # remove legacy item if all is fine - self.remove_convertor_item() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_backdrop.py deleted file mode 100644 index f97b9efeb6..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_backdrop.py +++ /dev/null @@ -1,53 +0,0 @@ -from nukescripts import autoBackdrop - -from ayon_nuke.api import ( - NukeCreator, - maintained_selection, - select_nodes -) - - -class CreateBackdrop(NukeCreator): - """Add Publishable Backdrop""" - - settings_category = "nuke" - - identifier = "create_backdrop" - label = "Nukenodes (backdrop)" - product_type = "nukenodes" - icon = "file-archive-o" - maintain_selection = True - - # plugin attributes - node_color = "0xdfea5dff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if len(self.selected_nodes) >= 1: - select_nodes(self.selected_nodes) - - created_node = autoBackdrop() - created_node["name"].setValue(node_name) - created_node["tile_color"].setValue(int(self.node_color, 16)) - created_node["note_font_size"].setValue(24) - created_node["label"].setValue("[{}]".format(node_name)) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateBackdrop, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_camera.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_camera.py deleted file mode 100644 index 69e5b9c676..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_camera.py +++ /dev/null @@ -1,71 +0,0 @@ -import nuke -from ayon_nuke.api import ( - NukeCreator, - NukeCreatorError, - maintained_selection -) -from ayon_nuke.api.lib import ( - create_camera_node_by_version -) - - -class CreateCamera(NukeCreator): - """Add Publishable Camera""" - - settings_category = "nuke" - - identifier = "create_camera" - label = "Camera (3d)" - product_type = "camera" - icon = "camera" - - # plugin attributes - node_color = "0xff9100ff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if self.selected_nodes: - node = self.selected_nodes[0] - if node.Class() != "Camera3": - raise NukeCreatorError( - "Creator error: Select only camera node type") - created_node = self.selected_nodes[0] - else: - created_node = create_camera_node_by_version() - - created_node["tile_color"].setValue( - int(self.node_color, 16)) - - created_node["name"].setValue(node_name) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateCamera, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError( - "Creator error: No active selection") - elif len(self.selected_nodes) > 1: - raise NukeCreatorError( - "Creator error: Select only one camera node") - else: - self.selected_nodes = [] diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_gizmo.py deleted file mode 100644 index 6be7cd58db..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_gizmo.py +++ /dev/null @@ -1,67 +0,0 @@ -import nuke -from ayon_nuke.api import ( - NukeCreator, - NukeCreatorError, - maintained_selection -) - - -class CreateGizmo(NukeCreator): - """Add Publishable Group as gizmo""" - - settings_category = "nuke" - - identifier = "create_gizmo" - label = "Gizmo (group)" - product_type = "gizmo" - icon = "file-archive-o" - default_variants = ["ViewerInput", "Lut", "Effect"] - - # plugin attributes - node_color = "0x7533c1ff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if self.selected_nodes: - node = self.selected_nodes[0] - if node.Class() != "Group": - raise NukeCreatorError( - "Creator error: Select only 'Group' node type") - created_node = node - else: - created_node = nuke.collapseToGroup() - - created_node["tile_color"].setValue( - int(self.node_color, 16)) - - created_node["name"].setValue(node_name) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateGizmo, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - elif len(self.selected_nodes) > 1: - NukeCreatorError("Creator error: Select only one 'Group' node") - else: - self.selected_nodes = [] diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_model.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_model.py deleted file mode 100644 index b7d7b740c2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_model.py +++ /dev/null @@ -1,67 +0,0 @@ -import nuke -from ayon_nuke.api import ( - NukeCreator, - NukeCreatorError, - maintained_selection -) - - -class CreateModel(NukeCreator): - """Add Publishable Camera""" - - settings_category = "nuke" - - identifier = "create_model" - label = "Model (3d)" - product_type = "model" - icon = "cube" - default_variants = ["Main"] - - # plugin attributes - node_color = "0xff3200ff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if self.selected_nodes: - node = self.selected_nodes[0] - if node.Class() != "Scene": - raise NukeCreatorError( - "Creator error: Select only 'Scene' node type") - created_node = node - else: - created_node = nuke.createNode("Scene") - - created_node["tile_color"].setValue( - int(self.node_color, 16)) - - created_node["name"].setValue(node_name) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateModel, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - elif len(self.selected_nodes) > 1: - NukeCreatorError("Creator error: Select only one 'Scene' node") - else: - self.selected_nodes = [] diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_source.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_source.py deleted file mode 100644 index 1579cebb1d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_source.py +++ /dev/null @@ -1,90 +0,0 @@ -import nuke -import six -import sys -from ayon_nuke.api import ( - INSTANCE_DATA_KNOB, - NukeCreator, - NukeCreatorError, - set_node_data -) -from ayon_core.pipeline import ( - CreatedInstance -) - - -class CreateSource(NukeCreator): - """Add Publishable Read with source""" - - settings_category = "nuke" - - identifier = "create_source" - label = "Source (read)" - product_type = "source" - icon = "film" - default_variants = ["Effect", "Backplate", "Fire", "Smoke"] - - # plugin attributes - node_color = "0xff9100ff" - - def create_instance_node( - self, - node_name, - read_node - ): - read_node["tile_color"].setValue( - int(self.node_color, 16)) - read_node["name"].setValue(node_name) - - return read_node - - def create(self, product_name, instance_data, pre_create_data): - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - try: - for read_node in self.selected_nodes: - if read_node.Class() != 'Read': - continue - - node_name = read_node.name() - _product_name = product_name + node_name - - # make sure product name is unique - self.check_existing_product(_product_name) - - instance_node = self.create_instance_node( - _product_name, - read_node - ) - instance = CreatedInstance( - self.product_type, - _product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - set_node_data( - instance_node, - INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - except Exception as er: - six.reraise( - NukeCreatorError, - NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2]) - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - else: - NukeCreatorError( - "Creator error: only supported with active selection") diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_image.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_image.py deleted file mode 100644 index 2268817e76..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_image.py +++ /dev/null @@ -1,174 +0,0 @@ -import nuke -import sys -import six - -from ayon_core.pipeline import ( - CreatedInstance -) -from ayon_core.lib import ( - BoolDef, - NumberDef, - UISeparatorDef, - EnumDef -) -from ayon_nuke import api as napi -from ayon_nuke.api.plugin import exposed_write_knobs - - -class CreateWriteImage(napi.NukeWriteCreator): - - settings_category = "nuke" - - identifier = "create_write_image" - label = "Image (write)" - product_type = "image" - icon = "sign-out" - - instance_attributes = [ - "use_range_limit" - ] - default_variants = [ - "StillFrame", - "MPFrame", - "LayoutFrame" - ] - temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ), - self._get_render_target_enum(), - UISeparatorDef(), - self._get_frame_source_number() - ] - return attr_defs - - def _get_render_target_enum(self): - rendering_targets = { - "local": "Local machine rendering", - "frames": "Use existing frames" - } - - return EnumDef( - "render_target", - items=rendering_targets, - label="Render target" - ) - - def _get_frame_source_number(self): - return NumberDef( - "active_frame", - label="Active frame", - default=nuke.frame() - ) - - def create_instance_node(self, product_name, instance_data): - settings = self.project_settings["nuke"]["create"]["CreateWriteImage"] - - # add fpath_template - write_data = { - "creator": self.__class__.__name__, - "productName": product_name, - "fpath_template": self.temp_rendering_path_template, - "render_on_farm": ( - "render_on_farm" in settings["instance_attributes"] - ) - } - write_data.update(instance_data) - - created_node = napi.create_write_node( - product_name, - write_data, - input=self.selected_node, - prenodes=self.prenodes, - linked_knobs=self.get_linked_knobs(), - **{ - "frame": nuke.frame() - } - ) - - self._add_frame_range_limit(created_node, instance_data) - - self.integrate_links(created_node, outputs=True) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - product_name = product_name.format(**pre_create_data) - - # pass values from precreate to instance - self.pass_pre_attributes_to_instance( - instance_data, - pre_create_data, - [ - "active_frame", - "render_target" - ] - ) - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data, - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - napi.set_node_data( - instance_node, - napi.INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - exposed_write_knobs( - self.project_settings, self.__class__.__name__, instance_node - ) - - return instance - - except Exception as er: - six.reraise( - napi.NukeCreatorError, - napi.NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) - - def _add_frame_range_limit(self, write_node, instance_data): - if "use_range_limit" not in self.instance_attributes: - return - - active_frame = ( - instance_data["creator_attributes"].get("active_frame")) - - write_node.begin() - for n in nuke.allNodes(): - # get write node - if n.Class() in "Write": - w_node = n - write_node.end() - - w_node["use_limit"].setValue(True) - w_node["first"].setValue(active_frame or nuke.frame()) - w_node["last"].setExpression("first") - - return write_node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_prerender.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_prerender.py deleted file mode 100644 index 014e91e81c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_prerender.py +++ /dev/null @@ -1,160 +0,0 @@ -import nuke -import sys -import six - -from ayon_core.pipeline import ( - CreatedInstance -) -from ayon_core.lib import ( - BoolDef -) -from ayon_nuke import api as napi -from ayon_nuke.api.plugin import exposed_write_knobs - - -class CreateWritePrerender(napi.NukeWriteCreator): - - settings_category = "nuke" - - identifier = "create_write_prerender" - label = "Prerender (write)" - product_type = "prerender" - icon = "sign-out" - - instance_attributes = [ - "use_range_limit" - ] - default_variants = [ - "Key01", - "Bg01", - "Fg01", - "Branch01", - "Part01" - ] - temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - - # Before write node render. - order = 90 - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ), - self._get_render_target_enum() - ] - return attr_defs - - def create_instance_node(self, product_name, instance_data): - settings = self.project_settings["nuke"]["create"] - settings = settings["CreateWritePrerender"] - - # add fpath_template - write_data = { - "creator": self.__class__.__name__, - "productName": product_name, - "fpath_template": self.temp_rendering_path_template, - "render_on_farm": ( - "render_on_farm" in settings["instance_attributes"] - ) - } - - write_data.update(instance_data) - - # get width and height - if self.selected_node: - width, height = ( - self.selected_node.width(), self.selected_node.height()) - else: - actual_format = nuke.root().knob('format').value() - width, height = (actual_format.width(), actual_format.height()) - - created_node = napi.create_write_node( - product_name, - write_data, - input=self.selected_node, - prenodes=self.prenodes, - linked_knobs=self.get_linked_knobs(), - **{ - "width": width, - "height": height - } - ) - - self._add_frame_range_limit(created_node) - - self.integrate_links(created_node, outputs=True) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # pass values from precreate to instance - self.pass_pre_attributes_to_instance( - instance_data, - pre_create_data, - [ - "render_target" - ] - ) - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - napi.set_node_data( - instance_node, - napi.INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - exposed_write_knobs( - self.project_settings, self.__class__.__name__, instance_node - ) - - return instance - - except Exception as er: - six.reraise( - napi.NukeCreatorError, - napi.NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) - - def _add_frame_range_limit(self, write_node): - if "use_range_limit" not in self.instance_attributes: - return - - write_node.begin() - for n in nuke.allNodes(): - # get write node - if n.Class() in "Write": - w_node = n - write_node.end() - - w_node["use_limit"].setValue(True) - w_node["first"].setValue(nuke.root()["first_frame"].value()) - w_node["last"].setValue(nuke.root()["last_frame"].value()) - - return write_node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_render.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_render.py deleted file mode 100644 index bed081c882..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_render.py +++ /dev/null @@ -1,136 +0,0 @@ -import nuke -import sys -import six - -from ayon_core.pipeline import ( - CreatedInstance -) -from ayon_core.lib import ( - BoolDef -) -from ayon_nuke import api as napi -from ayon_nuke.api.plugin import exposed_write_knobs - - -class CreateWriteRender(napi.NukeWriteCreator): - - settings_category = "nuke" - - identifier = "create_write_render" - label = "Render (write)" - product_type = "render" - icon = "sign-out" - - instance_attributes = [ - "reviewable" - ] - default_variants = [ - "Main", - "Mask" - ] - temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ), - self._get_render_target_enum() - ] - return attr_defs - - def create_instance_node(self, product_name, instance_data): - settings = self.project_settings["nuke"]["create"]["CreateWriteRender"] - - # add fpath_template - write_data = { - "creator": self.__class__.__name__, - "productName": product_name, - "fpath_template": self.temp_rendering_path_template, - "render_on_farm": ( - "render_on_farm" in settings["instance_attributes"] - ) - } - - write_data.update(instance_data) - - # get width and height - if self.selected_node: - width, height = ( - self.selected_node.width(), self.selected_node.height()) - else: - actual_format = nuke.root().knob('format').value() - width, height = (actual_format.width(), actual_format.height()) - - self.log.debug(">>>>>>> : {}".format(self.instance_attributes)) - self.log.debug(">>>>>>> : {}".format(self.get_linked_knobs())) - - created_node = napi.create_write_node( - product_name, - write_data, - input=self.selected_node, - prenodes=self.prenodes, - linked_knobs=self.get_linked_knobs(), - **{ - "width": width, - "height": height - } - ) - - self.integrate_links(created_node, outputs=False) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # pass values from precreate to instance - self.pass_pre_attributes_to_instance( - instance_data, - pre_create_data, - [ - "render_target" - ] - ) - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - napi.set_node_data( - instance_node, - napi.INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - exposed_write_knobs( - self.project_settings, self.__class__.__name__, instance_node - ) - - return instance - - except Exception as er: - six.reraise( - napi.NukeCreatorError, - napi.NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/workfile_creator.py b/server_addon/nuke/client/ayon_nuke/plugins/create/workfile_creator.py deleted file mode 100644 index 463d898224..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/workfile_creator.py +++ /dev/null @@ -1,84 +0,0 @@ -import ayon_api - -import ayon_nuke.api as api -from ayon_core.pipeline import ( - AutoCreator, - CreatedInstance, -) -from ayon_nuke.api import ( - INSTANCE_DATA_KNOB, - set_node_data -) -import nuke - - -class WorkfileCreator(AutoCreator): - - settings_category = "nuke" - - identifier = "workfile" - product_type = "workfile" - - default_variant = "Main" - - def get_instance_attr_defs(self): - return [] - - def collect_instances(self): - root_node = nuke.root() - instance_data = api.get_node_data( - root_node, api.INSTANCE_DATA_KNOB - ) - - project_name = self.create_context.get_current_project_name() - folder_path = self.create_context.get_current_folder_path() - task_name = self.create_context.get_current_task_name() - host_name = self.create_context.host_name - - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path - ) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - product_name = self.get_product_name( - project_name, - folder_entity, - task_entity, - self.default_variant, - host_name, - ) - instance_data.update({ - "folderPath": folder_path, - "task": task_name, - "variant": self.default_variant - }) - instance_data.update(self.get_dynamic_data( - project_name, - folder_entity, - task_entity, - self.default_variant, - host_name, - instance_data - )) - - instance = CreatedInstance( - self.product_type, product_name, instance_data, self - ) - instance.transient_data["node"] = root_node - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - for created_inst, _changes in update_list: - instance_node = created_inst.transient_data["node"] - - set_node_data( - instance_node, - INSTANCE_DATA_KNOB, - created_inst.data_to_store() - ) - - def create(self, options=None): - # no need to create if it is created - # in `collect_instances` - pass diff --git a/server_addon/nuke/client/ayon_nuke/plugins/inventory/repair_old_loaders.py b/server_addon/nuke/client/ayon_nuke/plugins/inventory/repair_old_loaders.py deleted file mode 100644 index 11d65d4b8c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/inventory/repair_old_loaders.py +++ /dev/null @@ -1,36 +0,0 @@ -from ayon_core.lib import Logger -from ayon_core.pipeline import InventoryAction -from ayon_nuke.api.lib import set_avalon_knob_data - - -class RepairOldLoaders(InventoryAction): - - label = "Repair Old Loaders" - icon = "gears" - color = "#cc0000" - - log = Logger.get_logger(__name__) - - def process(self, containers): - import nuke - new_loader = "LoadClip" - - for cdata in containers: - orig_loader = cdata["loader"] - orig_name = cdata["objectName"] - if orig_loader not in ["LoadSequence", "LoadMov"]: - self.log.warning( - "This repair action is only working on " - "`LoadSequence` and `LoadMov` Loaders") - continue - - new_name = orig_name.replace(orig_loader, new_loader) - node = nuke.toNode(cdata["objectName"]) - - cdata.update({ - "loader": new_loader, - "objectName": new_name - }) - node["name"].setValue(new_name) - # get data from avalon knob - set_avalon_knob_data(node, cdata) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/inventory/select_containers.py b/server_addon/nuke/client/ayon_nuke/plugins/inventory/select_containers.py deleted file mode 100644 index f67c8c16e9..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/inventory/select_containers.py +++ /dev/null @@ -1,21 +0,0 @@ -from ayon_core.pipeline import InventoryAction -from ayon_nuke.api.command import viewer_update_and_undo_stop - - -class SelectContainers(InventoryAction): - - label = "Select Containers" - icon = "mouse-pointer" - color = "#d8d8d8" - - def process(self, containers): - import nuke - - nodes = [nuke.toNode(i["objectName"]) for i in containers] - - with viewer_update_and_undo_stop(): - # clear previous_selection - [n['selected'].setValue(False) for n in nodes] - # Select tool - for node in nodes: - node["selected"].setValue(True) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/actions.py b/server_addon/nuke/client/ayon_nuke/plugins/load/actions.py deleted file mode 100644 index a4e2b156a3..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/actions.py +++ /dev/null @@ -1,77 +0,0 @@ -"""A module containing generic loader actions that will display in the Loader. - -""" - -from ayon_core.lib import Logger -from ayon_core.pipeline import load -from ayon_nuke.api import lib - -log = Logger.get_logger(__name__) - - -class SetFrameRangeLoader(load.LoaderPlugin): - """Set frame range excluding pre- and post-handles""" - - product_types = { - "animation", - "camera", - "write", - "yeticache", - "pointcache", - } - representations = {"*"} - extensions = {"*"} - - label = "Set frame range" - order = 11 - icon = "clock-o" - color = "white" - - def load(self, context, name, namespace, data): - version_entity = context["version"] - version_attributes = version_entity["attrib"] - - start = version_attributes.get("frameStart") - end = version_attributes.get("frameEnd") - - log.info("start: {}, end: {}".format(start, end)) - if start is None or end is None: - log.info("Skipping setting frame range because start or " - "end frame data is missing..") - return - - lib.update_frame_range(start, end) - - -class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): - """Set frame range including pre- and post-handles""" - - product_types = { - "animation", - "camera", - "write", - "yeticache", - "pointcache", - } - representations = {"*"} - - label = "Set frame range (with handles)" - order = 12 - icon = "clock-o" - color = "white" - - def load(self, context, name, namespace, data): - version_attributes = context["version"]["attrib"] - start = version_attributes.get("frameStart") - end = version_attributes.get("frameEnd") - - if start is None or end is None: - print("Skipping setting frame range because start or " - "end frame data is missing..") - return - - # Include handles - start -= version_attributes.get("handleStart", 0) - end += version_attributes.get("handleEnd", 0) - - lib.update_frame_range(start, end) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py deleted file mode 100644 index 054a56d041..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py +++ /dev/null @@ -1,255 +0,0 @@ -import nuke -import nukescripts -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - find_free_space_to_paste_nodes, - maintained_selection, - reset_selection, - select_nodes, - get_avalon_knob_data, - set_avalon_knob_data -) -from ayon_nuke.api.command import viewer_update_and_undo_stop -from ayon_nuke.api import containerise, update_container - - -class LoadBackdropNodes(load.LoaderPlugin): - """Loading Published Backdrop nodes (workfile, nukenodes)""" - - product_types = {"workfile", "nukenodes"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Import Nuke Nodes" - order = 0 - icon = "eye" - color = "white" - node_color = "0x7533c1ff" - - def load(self, context, name, namespace, data): - """ - Loading function to import .nk file into script and wrap - it on backdrop - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerised nuke node object - """ - - # get main variables - namespace = namespace or context["folder"]["name"] - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - colorspace = version_attributes.get("colorSpace") - - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - # add attributes from the version to imprint to metadata knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - # Get mouse position - n = nuke.createNode("NoOp") - xcursor, ycursor = (n.xpos(), n.ypos()) - reset_selection() - nuke.delete(n) - - bdn_frame = 50 - - with maintained_selection(): - - # add group from nk - nuke.nodePaste(file) - - # get all pasted nodes - new_nodes = list() - nodes = nuke.selectedNodes() - - # get pointer position in DAG - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes, direction="right", offset=200 + bdn_frame - ) - - # reset position to all nodes and replace inputs and output - for n in nodes: - reset_selection() - xpos = (n.xpos() - xcursor) + xpointer - ypos = (n.ypos() - ycursor) + ypointer - n.setXYpos(xpos, ypos) - - # replace Input nodes for dots - if n.Class() in "Input": - dot = nuke.createNode("Dot") - new_name = n.name().replace("INP", "DOT") - dot.setName(new_name) - dot["label"].setValue(new_name) - dot.setXYpos(xpos, ypos) - new_nodes.append(dot) - - # rewire - dep = n.dependent() - for d in dep: - index = next((i for i, dpcy in enumerate( - d.dependencies()) - if n is dpcy), 0) - d.setInput(index, dot) - - # remove Input node - reset_selection() - nuke.delete(n) - continue - - # replace Input nodes for dots - elif n.Class() in "Output": - dot = nuke.createNode("Dot") - new_name = n.name() + "_DOT" - dot.setName(new_name) - dot["label"].setValue(new_name) - dot.setXYpos(xpos, ypos) - new_nodes.append(dot) - - # rewire - dep = next((d for d in n.dependencies()), None) - if dep: - dot.setInput(0, dep) - - # remove Input node - reset_selection() - nuke.delete(n) - continue - else: - new_nodes.append(n) - - # reselect nodes with new Dot instead of Inputs and Output - reset_selection() - select_nodes(new_nodes) - # place on backdrop - bdn = nukescripts.autoBackdrop() - - # add frame offset - xpos = bdn.xpos() - bdn_frame - ypos = bdn.ypos() - bdn_frame - bdwidth = bdn["bdwidth"].value() + (bdn_frame*2) - bdheight = bdn["bdheight"].value() + (bdn_frame*2) - - bdn["xpos"].setValue(xpos) - bdn["ypos"].setValue(ypos) - bdn["bdwidth"].setValue(bdwidth) - bdn["bdheight"].setValue(bdheight) - - bdn["name"].setValue(object_name) - bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name)) - bdn["note_font_size"].setValue(20) - - return containerise( - node=bdn, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - GN = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - name = container["name"] - namespace = container["namespace"] - object_name = "{}_{}".format(name, namespace) - - version_attributes = version_entity["attrib"] - colorspace = version_attributes.get("colorSpace") - - data_imprint = { - "representation": repre_entity["id"], - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection(): - xpos = GN.xpos() - ypos = GN.ypos() - avalon_data = get_avalon_knob_data(GN) - nuke.delete(GN) - # add group from nk - nuke.nodePaste(file) - - GN = nuke.selectedNode() - set_avalon_knob_data(GN, avalon_data) - GN.setXYpos(xpos, ypos) - GN["name"].setValue(object_name) - - # get all versions in list - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - GN["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(GN, data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_camera_abc.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_camera_abc.py deleted file mode 100644 index 3930cf52fa..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_camera_abc.py +++ /dev/null @@ -1,198 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) -from ayon_nuke.api.lib import ( - maintained_selection -) - - -class AlembicCameraLoader(load.LoaderPlugin): - """ - This will load alembic camera into script. - """ - - product_types = {"camera"} - representations = {"*"} - extensions = {"abc"} - - settings_category = "nuke" - - label = "Load Alembic Camera" - icon = "camera" - color = "orange" - node_color = "0x3469ffff" - - def load(self, context, name, namespace, data): - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - # add additional metadata from the version to imprint to metadata knob - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - } - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - with maintained_selection(): - camera_node = nuke.createNode( - "Camera2", - "name {} file {} read_from_file True".format( - object_name, file), - inpanel=False - ) - - camera_node.forceValidate() - camera_node["frame_rate"].setValue(float(fps)) - - # workaround because nuke's bug is not adding - # animation keys properly - xpos = camera_node.xpos() - ypos = camera_node.ypos() - nuke.nodeCopy("%clipboard%") - nuke.delete(camera_node) - nuke.nodePaste("%clipboard%") - camera_node = nuke.toNode(object_name) - camera_node.setXYpos(xpos, ypos) - - # color node by correct color by actual version - self.node_version_color( - context["project"]["name"], version_entity, camera_node - ) - - return containerise( - node=camera_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """ - Called by Scene Inventory when look should be updated to current - version. - If any reference edits cannot be applied, eg. shader renamed and - material not present, reference is unloaded and cleaned. - All failed edits are highlighted to the user via message box. - - Args: - container: object that has look to be updated - representation: (dict): relationship data to get proper - representation from DB and persisted - data in .json - Returns: - None - """ - # Get version from io - version_entity = context["version"] - repre_entity = context["representation"] - - # get main variables - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - # prepare data for imprinting - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - - # add attributes from the version to imprint to metadata knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = get_representation_path(repre_entity).replace("\\", "/") - - with maintained_selection(): - camera_node = container["node"] - camera_node['selected'].setValue(True) - - # collect input output dependencies - dependencies = camera_node.dependencies() - dependent = camera_node.dependent() - - camera_node["frame_rate"].setValue(float(fps)) - camera_node["file"].setValue(file) - - # workaround because nuke's bug is - # not adding animation keys properly - xpos = camera_node.xpos() - ypos = camera_node.ypos() - nuke.nodeCopy("%clipboard%") - camera_name = camera_node.name() - nuke.delete(camera_node) - nuke.nodePaste("%clipboard%") - camera_node = nuke.toNode(camera_name) - camera_node.setXYpos(xpos, ypos) - - # link to original input nodes - for i, input in enumerate(dependencies): - camera_node.setInput(i, input) - # link to original output nodes - for d in dependent: - index = next((i for i, dpcy in enumerate( - d.dependencies()) - if camera_node is dpcy), 0) - d.setInput(index, camera_node) - - # color node by correct color by actual version - self.node_version_color( - context["project"]["name"], version_entity, camera_node - ) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(camera_node, data_imprint) - - def node_version_color(self, project_name, version_entity, node): - """ Coloring a node by correct color by actual version - """ - # get all versions in list - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - node["tile_color"].setValue(int(color_value, 16)) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py deleted file mode 100644 index d1e38eea6b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py +++ /dev/null @@ -1,584 +0,0 @@ -from copy import deepcopy - -import nuke -import qargparse -import ayon_api - -from ayon_core.lib import Logger -from ayon_core.pipeline import ( - get_representation_path, -) -from ayon_core.pipeline.colorspace import ( - get_imageio_file_rules_colorspace_from_filepath, - get_current_context_imageio_config_preset, -) -from ayon_nuke.api.lib import ( - get_imageio_input_colorspace, - maintained_selection -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop, - colorspace_exists_on_node -) -from ayon_core.lib.transcoding import ( - VIDEO_EXTENSIONS, - IMAGE_EXTENSIONS -) -from ayon_nuke.api import plugin - - -class LoadClip(plugin.NukeLoader): - """Load clip into Nuke - - Either it is image sequence or video file. - """ - log = Logger.get_logger(__name__) - - product_types = { - "source", - "plate", - "render", - "prerender", - "review", - } - representations = {"*"} - extensions = set( - ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) - ) - - settings_category = "nuke" - - label = "Load Clip" - order = -20 - icon = "file-video-o" - color = "white" - - # Loaded from settings - representations_include = [] - - script_start = int(nuke.root()["first_frame"].value()) - - # option gui - options_defaults = { - "start_at_workfile": True, - "add_retime": True, - "deep_exr": False - } - - node_name_template = "{class_name}_{ext}" - - @classmethod - def get_options(cls, *args): - return [ - qargparse.Boolean( - "start_at_workfile", - help="Load at workfile start frame", - default=cls.options_defaults["start_at_workfile"] - ), - qargparse.Boolean( - "add_retime", - help="Load with retime", - default=cls.options_defaults["add_retime"] - ), - qargparse.Boolean( - "deep_exr", - help="Read with deep exr", - default=cls.options_defaults["deep_exr"] - ) - ] - - @classmethod - def get_representations(cls): - return cls.representations_include or cls.representations - - def load(self, context, name, namespace, options): - """Load asset via database.""" - project_name = context["project"]["name"] - repre_entity = context["representation"] - version_entity = context["version"] - version_attributes = version_entity["attrib"] - version_data = version_entity["data"] - - # reset container id so it is always unique for each instance - self.reset_container_id() - - is_sequence = len(repre_entity["files"]) > 1 - - if is_sequence: - context["representation"] = ( - self._representation_with_hash_in_frame(repre_entity) - ) - - filepath = self.filepath_from_context(context) - filepath = filepath.replace("\\", "/") - self.log.debug("_ filepath: {}".format(filepath)) - - start_at_workfile = options.get( - "start_at_workfile", self.options_defaults["start_at_workfile"]) - - add_retime = options.get( - "add_retime", self.options_defaults["add_retime"]) - - deep_exr = options.get( - "deep_exr", self.options_defaults["deep_exr"]) - - repre_id = repre_entity["id"] - - self.log.debug( - "Representation id `{}` ".format(repre_id)) - - self.handle_start = version_attributes.get("handleStart", 0) - self.handle_end = version_attributes.get("handleEnd", 0) - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - first -= self.handle_start - last += self.handle_end - - if not is_sequence: - duration = last - first - first = 1 - last = first + duration - - # If a slate is present, the frame range is 1 frame longer for movies, - # but file sequences its the first frame that is 1 frame lower. - slate_frames = repre_entity["data"].get("slateFrames", 0) - extension = "." + repre_entity["context"]["ext"] - - if extension in VIDEO_EXTENSIONS: - last += slate_frames - - files_count = len(repre_entity["files"]) - if extension in IMAGE_EXTENSIONS and files_count != 1: - first -= slate_frames - - # Fallback to folder name when namespace is None - if namespace is None: - namespace = context["folder"]["name"] - - if not filepath: - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - read_name = self._get_node_name(context) - read_node = None - if deep_exr: - # Create the Loader with the filename path set - read_node = nuke.createNode( - "DeepRead", - "name {}".format(read_name), - inpanel=False - ) - else: - # Create the Loader with the filename path set - read_node = nuke.createNode( - "Read", - "name {}".format(read_name), - inpanel=False - ) - - # get colorspace - colorspace = ( - repre_entity["data"].get("colorspace") - or version_attributes.get("colorSpace") - ) - - # to avoid multiple undo steps for rest of process - # we will switch off undo-ing - with viewer_update_and_undo_stop(): - read_node["file"].setValue(filepath) - if read_node.Class() == "Read": - self.set_colorspace_to_node( - read_node, - filepath, - project_name, - version_entity, - repre_entity - ) - - self._set_range_to_node( - read_node, first, last, start_at_workfile, slate_frames - ) - - version_name = version_entity["version"] - if version_name < 0: - version_name = "hero" - - data_imprint = { - "version": version_name, - "db_colorspace": colorspace - } - - # add attributes from the version to imprint metadata knob - for key in [ - "frameStart", - "frameEnd", - "source", - "fps", - "handleStart", - "handleEnd", - ]: - value = version_attributes.get(key, str(None)) - if isinstance(value, str): - value = value.replace("\\", "/") - data_imprint[key] = value - - if add_retime and version_data.get("retime"): - data_imprint["addRetime"] = True - - read_node["tile_color"].setValue(int("0x4ecd25ff", 16)) - - container = containerise( - read_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - if add_retime and version_data.get("retime"): - self._make_retimes(read_node, version_data) - - self.set_as_member(read_node) - - return container - - def switch(self, container, context): - self.update(container, context) - - def _representation_with_hash_in_frame(self, repre_entity): - """Convert frame key value to padded hash - - Args: - repre_entity (dict): Representation entity. - - Returns: - dict: altered representation data - - """ - new_repre_entity = deepcopy(repre_entity) - context = new_repre_entity["context"] - - # Get the frame from the context and hash it - frame = context["frame"] - hashed_frame = "#" * len(str(frame)) - - # Replace the frame with the hash in the originalBasename - if ( - "{originalBasename}" in new_repre_entity["attrib"]["template"] - ): - origin_basename = context["originalBasename"] - context["originalBasename"] = origin_basename.replace( - frame, hashed_frame - ) - - # Replace the frame with the hash in the frame - new_repre_entity["context"]["frame"] = hashed_frame - return new_repre_entity - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - version_attributes = version_entity["attrib"] - version_data = version_entity["data"] - - is_sequence = len(repre_entity["files"]) > 1 - - read_node = container["node"] - - if is_sequence: - repre_entity = self._representation_with_hash_in_frame( - repre_entity - ) - - filepath = ( - get_representation_path(repre_entity) - ).replace("\\", "/") - self.log.debug("_ filepath: {}".format(filepath)) - - start_at_workfile = "start at" in read_node['frame_mode'].value() - - add_retime = [ - key for key in read_node.knobs().keys() - if "addRetime" in key - ] - - repre_id = repre_entity["id"] - - # colorspace profile - colorspace = ( - repre_entity["data"].get("colorspace") - or version_attributes.get("colorSpace") - ) - - self.handle_start = version_attributes.get("handleStart", 0) - self.handle_end = version_attributes.get("handleEnd", 0) - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - first -= self.handle_start - last += self.handle_end - - if not is_sequence: - duration = last - first - first = 1 - last = first + duration - - if not filepath: - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - read_node["file"].setValue(filepath) - - # to avoid multiple undo steps for rest of process - # we will switch off undo-ing - with viewer_update_and_undo_stop(): - if read_node.Class() == "Read": - self.set_colorspace_to_node( - read_node, - filepath, - project_name, - version_entity, - repre_entity - ) - - self._set_range_to_node(read_node, first, last, start_at_workfile) - - updated_dict = { - "representation": repre_entity["id"], - "frameStart": str(first), - "frameEnd": str(last), - "version": str(version_entity["version"]), - "db_colorspace": colorspace, - "source": version_attributes.get("source"), - "handleStart": str(self.handle_start), - "handleEnd": str(self.handle_end), - "fps": str(version_attributes.get("fps")) - } - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - # change color of read_node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x4ecd25ff" - else: - color_value = "0xd84f20ff" - read_node["tile_color"].setValue(int(color_value, 16)) - - # Update the imprinted representation - update_container(read_node, updated_dict) - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - if add_retime and version_data.get("retime"): - self._make_retimes(read_node, version_data) - else: - self.clear_members(read_node) - - self.set_as_member(read_node) - - def set_colorspace_to_node( - self, - read_node, - filepath, - project_name, - version_entity, - repre_entity, - ): - """Set colorspace to read node. - - Sets colorspace with available names validation. - - Args: - read_node (nuke.Node): The nuke's read node - filepath (str): File path. - project_name (str): Project name. - version_entity (dict): Version entity. - repre_entity (dict): Representation entity. - - """ - used_colorspace = self._get_colorspace_data( - project_name, version_entity, repre_entity, filepath - ) - if ( - used_colorspace - and colorspace_exists_on_node(read_node, used_colorspace) - ): - self.log.info(f"Used colorspace: {used_colorspace}") - read_node["colorspace"].setValue(used_colorspace) - else: - self.log.info("Colorspace not set...") - - def remove(self, container): - read_node = container["node"] - assert read_node.Class() == "Read", "Must be Read" - - with viewer_update_and_undo_stop(): - members = self.get_members(read_node) - nuke.delete(read_node) - for member in members: - nuke.delete(member) - - def _set_range_to_node( - self, read_node, first, last, start_at_workfile, slate_frames=0 - ): - read_node['origfirst'].setValue(int(first)) - read_node['first'].setValue(int(first)) - read_node['origlast'].setValue(int(last)) - read_node['last'].setValue(int(last)) - - # set start frame depending on workfile or version - if start_at_workfile: - read_node['frame_mode'].setValue("start at") - - start_frame = self.script_start - slate_frames - - read_node['frame'].setValue(str(start_frame)) - - def _make_retimes(self, parent_node, version_data): - ''' Create all retime and timewarping nodes with copied animation ''' - speed = version_data.get('speed', 1) - time_warp_nodes = version_data.get('timewarps', []) - last_node = None - source_id = self.get_container_id(parent_node) - self.log.debug("__ source_id: {}".format(source_id)) - self.log.debug("__ members: {}".format( - self.get_members(parent_node))) - - dependent_nodes = self.clear_members(parent_node) - - with maintained_selection(): - parent_node['selected'].setValue(True) - - if speed != 1: - rtn = nuke.createNode( - "Retime", - "speed {}".format(speed)) - - rtn["before"].setValue("continue") - rtn["after"].setValue("continue") - rtn["input.first_lock"].setValue(True) - rtn["input.first"].setValue( - self.script_start - ) - self.set_as_member(rtn) - last_node = rtn - - if time_warp_nodes != []: - start_anim = self.script_start + (self.handle_start / speed) - for timewarp in time_warp_nodes: - twn = nuke.createNode( - timewarp["Class"], - "name {}".format(timewarp["name"]) - ) - if isinstance(timewarp["lookup"], list): - # if array for animation - twn["lookup"].setAnimated() - for i, value in enumerate(timewarp["lookup"]): - twn["lookup"].setValueAt( - (start_anim + i) + value, - (start_anim + i)) - else: - # if static value `int` - twn["lookup"].setValue(timewarp["lookup"]) - - self.set_as_member(twn) - last_node = twn - - if dependent_nodes: - # connect to original inputs - for i, n in enumerate(dependent_nodes): - last_node.setInput(i, n) - - def _get_node_name(self, context): - folder_entity = context["folder"] - product_name = context["product"]["name"] - repre_entity = context["representation"] - - folder_name = folder_entity["name"] - repre_cont = repre_entity["context"] - name_data = { - "folder": { - "name": folder_name, - }, - "product": { - "name": product_name, - }, - "asset": folder_name, - "subset": product_name, - "representation": repre_entity["name"], - "ext": repre_cont["representation"], - "id": repre_entity["id"], - "class_name": self.__class__.__name__ - } - - return self.node_name_template.format(**name_data) - - def _get_colorspace_data( - self, project_name, version_entity, repre_entity, filepath - ): - """Get colorspace data from version and representation documents - - Args: - project_name (str): Project name. - version_entity (dict): Version entity. - repre_entity (dict): Representation entity. - filepath (str): File path. - - Returns: - Any[str,None]: colorspace name or None - """ - # Get backward compatible colorspace key. - colorspace = repre_entity["data"].get("colorspace") - self.log.debug( - f"Colorspace from representation colorspace: {colorspace}" - ) - - # Get backward compatible version data key if colorspace is not found. - if not colorspace: - colorspace = version_entity["attrib"].get("colorSpace") - self.log.debug( - f"Colorspace from version colorspace: {colorspace}" - ) - - # Get colorspace from representation colorspaceData if colorspace is - # not found. - if not colorspace: - colorspace_data = repre_entity["data"].get("colorspaceData", {}) - colorspace = colorspace_data.get("colorspace") - self.log.debug( - f"Colorspace from representation colorspaceData: {colorspace}" - ) - - config_data = get_current_context_imageio_config_preset() - # check if any filerules are not applicable - new_parsed_colorspace = get_imageio_file_rules_colorspace_from_filepath( # noqa - filepath, "nuke", project_name, config_data=config_data - ) - self.log.debug(f"Colorspace new filerules: {new_parsed_colorspace}") - - # colorspace from `project_settings/nuke/imageio/regexInputs` - old_parsed_colorspace = get_imageio_input_colorspace(filepath) - self.log.debug(f"Colorspace old filerules: {old_parsed_colorspace}") - - return ( - new_parsed_colorspace - or old_parsed_colorspace - or colorspace - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py deleted file mode 100644 index e923a02424..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py +++ /dev/null @@ -1,361 +0,0 @@ -import json -from collections import OrderedDict -import nuke -import six -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadEffects(load.LoaderPlugin): - """Loading colorspace soft effect exported from nukestudio""" - - product_types = {"effect"} - representations = {"*"} - extensions = {"json"} - - settings_category = "nuke" - - label = "Load Effects - nodes" - order = 0 - icon = "cc" - color = "white" - ignore_attr = ["useLifetime"] - - def load(self, context, name, namespace, data): - """ - Loading function to get the soft effects to particular read node - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerised nuke node object - """ - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - - # add additional metadata from the version to imprint to Avalon knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - GN = nuke.createNode( - "Group", - "name {}_1".format(object_name), - inpanel=False - ) - - # adding content to the group node - with GN: - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for ef_name, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - node.setInput(0, pre_node) - pre_node = node - - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # try to find parent read node - self.connect_read_node(GN, namespace, json_f["assignTo"]) - - GN["tile_color"].setValue(int("0x3469ffff", 16)) - - self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) - - return containerise( - node=GN, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - GN = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = container["namespace"] - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps", - ]: - data_imprint[k] = version_attributes[k] - - # Update the imprinted representation - update_container( - GN, - data_imprint - ) - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - # adding content to the group node - with GN: - # first remove all nodes - [nuke.delete(n) for n in nuke.allNodes()] - - # create input node - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for _, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - node.setInput(0, pre_node) - pre_node = node - - # create output node - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # try to find parent read node - self.connect_read_node(GN, namespace, json_f["assignTo"]) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x3469ffff" - else: - color_value = "0xd84f20ff" - - GN["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - def connect_read_node(self, group_node, namespace, product_name): - """ - Finds read node and selects it - - Arguments: - namespace (str): namespace name - - Returns: - nuke node: node is selected - None: if nothing found - """ - search_name = "{0}_{1}".format(namespace, product_name) - - node = [ - n for n in nuke.allNodes(filter="Read") - if search_name in n["file"].value() - ] - if len(node) > 0: - rn = node[0] - else: - rn = None - - # Parent read node has been found - # solving connections - if rn: - dep_nodes = rn.dependent() - - if len(dep_nodes) > 0: - for dn in dep_nodes: - dn.setInput(0, group_node) - - group_node.setInput(0, rn) - group_node.autoplace() - - def reorder_nodes(self, data): - new_order = OrderedDict() - trackNums = [v["trackIndex"] for k, v in data.items() - if isinstance(v, dict)] - subTrackNums = [v["subTrackIndex"] for k, v in data.items() - if isinstance(v, dict)] - - for trackIndex in range( - min(trackNums), max(trackNums) + 1): - for subTrackIndex in range( - min(subTrackNums), max(subTrackNums) + 1): - item = self.get_item(data, trackIndex, subTrackIndex) - if item is not {}: - new_order.update(item) - return new_order - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if isinstance(val, dict) - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects_ip.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects_ip.py deleted file mode 100644 index ce7e7debeb..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects_ip.py +++ /dev/null @@ -1,372 +0,0 @@ -import json -from collections import OrderedDict -import six -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import lib -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadEffectsInputProcess(load.LoaderPlugin): - """Loading colorspace soft effect exported from nukestudio""" - - product_types = {"effect"} - representations = {"*"} - extensions = {"json"} - - settings_category = "nuke" - - label = "Load Effects - Input Process" - order = 0 - icon = "eye" - color = "#cc0000" - ignore_attr = ["useLifetime"] - - def load(self, context, name, namespace, data): - """ - Loading function to get the soft effects to particular read node - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerised nuke node object - """ - - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - # add additional metadata from the version to imprint to Avalon knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - GN = nuke.createNode( - "Group", - "name {}_1".format(object_name), - inpanel=False - ) - - # adding content to the group node - with GN: - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for _, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - - node.setInput(0, pre_node) - pre_node = node - - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # try to place it under Viewer1 - if not self.connect_active_viewer(GN): - nuke.delete(GN) - return - - GN["tile_color"].setValue(int("0x3469ffff", 16)) - - self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) - - return containerise( - node=GN, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - GN = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # Update the imprinted representation - update_container( - GN, - data_imprint - ) - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - # adding content to the group node - with GN: - # first remove all nodes - [nuke.delete(n) for n in nuke.allNodes()] - - # create input node - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for _, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - node.setInput(0, pre_node) - pre_node = node - - # create output node - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # get all versions in list - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x3469ffff" - else: - color_value = "0xd84f20ff" - GN["tile_color"].setValue(int(color_value, 16)) - - self.log.info("updated to version: {}".format(version_entity["name"])) - - def connect_active_viewer(self, group_node): - """ - Finds Active viewer and - place the node under it, also adds - name of group into Input Process of the viewer - - Arguments: - group_node (nuke node): nuke group node object - - """ - group_node_name = group_node["name"].value() - - viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] - if len(viewer) > 0: - viewer = viewer[0] - else: - msg = str("Please create Viewer node before you " - "run this action again") - self.log.error(msg) - nuke.message(msg) - return None - - # get coordinates of Viewer1 - xpos = viewer["xpos"].value() - ypos = viewer["ypos"].value() - - ypos += 150 - - viewer["ypos"].setValue(ypos) - - # set coordinates to group node - group_node["xpos"].setValue(xpos) - group_node["ypos"].setValue(ypos + 50) - - # add group node name to Viewer Input Process - viewer["input_process_node"].setValue(group_node_name) - - # put backdrop under - lib.create_backdrop( - label="Input Process", - layer=2, - nodes=[viewer, group_node], - color="0x7c7faaff") - - return True - - def reorder_nodes(self, data): - new_order = OrderedDict() - trackNums = [v["trackIndex"] for k, v in data.items() - if isinstance(v, dict)] - subTrackNums = [v["subTrackIndex"] for k, v in data.items() - if isinstance(v, dict)] - - for trackIndex in range( - min(trackNums), max(trackNums) + 1): - for subTrackIndex in range( - min(subTrackNums), max(subTrackNums) + 1): - item = self.get_item(data, trackIndex, subTrackIndex) - if item is not {}: - new_order.update(item) - return new_order - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if isinstance(val, dict) - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py deleted file mode 100644 index 1c91af0c1c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py +++ /dev/null @@ -1,190 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - maintained_selection, - get_avalon_knob_data, - set_avalon_knob_data, - swap_node_with_dependency, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadGizmo(load.LoaderPlugin): - """Loading nuke Gizmo""" - - product_types = {"gizmo"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Load Gizmo" - order = 0 - icon = "dropbox" - color = "white" - node_color = "0x75338eff" - - def load(self, context, name, namespace, data): - """ - Loading function to get Gizmo into node graph - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerized nuke node object - """ - - # get main variables - version_entity = context["version"] - version_attributes = version_entity["attrib"] - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - # add attributes from the version to imprint to metadata knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection(): - # add group from nk - nuke.nodePaste(file) - - group_node = nuke.selectedNode() - - group_node["name"].setValue(object_name) - - return containerise( - node=group_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - version_attributes = version_entity["attrib"] - - # get corresponding node - group_node = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # capture pipeline metadata - avalon_data = get_avalon_knob_data(group_node) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection([group_node]): - # insert nuke script to the script - nuke.nodePaste(file) - # convert imported to selected node - new_group_node = nuke.selectedNode() - # swap nodes with maintained connections - with swap_node_with_dependency( - group_node, new_group_node) as node_name: - new_group_node["name"].setValue(node_name) - # set updated pipeline metadata - set_avalon_knob_data(new_group_node, avalon_data) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - - new_group_node["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["name"]) - ) - - return update_container(new_group_node, data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py deleted file mode 100644 index 36e878fdf1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py +++ /dev/null @@ -1,270 +0,0 @@ -import nuke -import six -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - maintained_selection, - create_backdrop, - get_avalon_knob_data, - set_avalon_knob_data, - swap_node_with_dependency, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadGizmoInputProcess(load.LoaderPlugin): - """Loading colorspace soft effect exported from nukestudio""" - - product_types = {"gizmo"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Load Gizmo - Input Process" - order = 0 - icon = "eye" - color = "#cc0000" - node_color = "0x7533c1ff" - - def load(self, context, name, namespace, data): - """ - Loading function to get Gizmo as Input Process on viewer - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerized nuke node object - """ - - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - # add additional metadata from the version to imprint to metadata knob - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection(): - # add group from nk - nuke.nodePaste(file) - - group_node = nuke.selectedNode() - - group_node["name"].setValue(object_name) - - # try to place it under Viewer1 - if not self.connect_active_viewer(group_node): - nuke.delete(group_node) - return - - return containerise( - node=group_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - group_node = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # capture pipeline metadata - avalon_data = get_avalon_knob_data(group_node) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection([group_node]): - # insert nuke script to the script - nuke.nodePaste(file) - # convert imported to selected node - new_group_node = nuke.selectedNode() - # swap nodes with maintained connections - with swap_node_with_dependency( - group_node, new_group_node) as node_name: - new_group_node["name"].setValue(node_name) - # set updated pipeline metadata - set_avalon_knob_data(new_group_node, avalon_data) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - new_group_node["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(new_group_node, data_imprint) - - def connect_active_viewer(self, group_node): - """ - Finds Active viewer and - place the node under it, also adds - name of group into Input Process of the viewer - - Arguments: - group_node (nuke node): nuke group node object - - """ - group_node_name = group_node["name"].value() - - viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] - if len(viewer) > 0: - viewer = viewer[0] - else: - msg = str("Please create Viewer node before you " - "run this action again") - self.log.error(msg) - nuke.message(msg) - return None - - # get coordinates of Viewer1 - xpos = viewer["xpos"].value() - ypos = viewer["ypos"].value() - - ypos += 150 - - viewer["ypos"].setValue(ypos) - - # set coordinates to group node - group_node["xpos"].setValue(xpos) - group_node["ypos"].setValue(ypos + 50) - - # add group node name to Viewer Input Process - viewer["input_process_node"].setValue(group_node_name) - - # put backdrop under - create_backdrop( - label="Input Process", - layer=2, - nodes=[viewer, group_node], - color="0x7c7faaff" - ) - - return True - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py deleted file mode 100644 index 0c43f5a5ca..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py +++ /dev/null @@ -1,254 +0,0 @@ -import nuke - -import qargparse -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - get_imageio_input_colorspace -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) -from ayon_core.lib.transcoding import ( - IMAGE_EXTENSIONS -) - - -class LoadImage(load.LoaderPlugin): - """Load still image into Nuke""" - - product_types = { - "render2d", - "source", - "plate", - "render", - "prerender", - "review", - "image", - } - representations = {"*"} - extensions = set(ext.lstrip(".") for ext in IMAGE_EXTENSIONS) - - settings_category = "nuke" - - label = "Load Image" - order = -10 - icon = "image" - color = "white" - - # Loaded from settings - representations_include = [] - - node_name_template = "{class_name}_{ext}" - - options = [ - qargparse.Integer( - "frame_number", - label="Frame Number", - default=int(nuke.root()["first_frame"].getValue()), - min=1, - max=999999, - help="What frame is reading from?" - ) - ] - - @classmethod - def get_representations(cls): - return cls.representations_include or cls.representations - - def load(self, context, name, namespace, options): - self.log.info("__ options: `{}`".format(options)) - frame_number = options.get( - "frame_number", int(nuke.root()["first_frame"].getValue()) - ) - - version_entity = context["version"] - version_attributes = version_entity["attrib"] - repre_entity = context["representation"] - repre_id = repre_entity["id"] - - self.log.debug( - "Representation id `{}` ".format(repre_id)) - - last = first = int(frame_number) - - # Fallback to folder name when namespace is None - if namespace is None: - namespace = context["folder"]["name"] - - file = self.filepath_from_context(context) - - if not file: - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - file = file.replace("\\", "/") - - frame = repre_entity["context"].get("frame") - if frame: - padding = len(frame) - file = file.replace( - frame, - format(frame_number, "0{}".format(padding))) - - read_name = self._get_node_name(context) - - # Create the Loader with the filename path set - with viewer_update_and_undo_stop(): - r = nuke.createNode( - "Read", - "name {}".format(read_name), - inpanel=False - ) - - r["file"].setValue(file) - - # Set colorspace defined in version data - colorspace = version_entity["attrib"].get("colorSpace") - if colorspace: - r["colorspace"].setValue(str(colorspace)) - - preset_clrsp = get_imageio_input_colorspace(file) - - if preset_clrsp is not None: - r["colorspace"].setValue(preset_clrsp) - - r["origfirst"].setValue(first) - r["first"].setValue(first) - r["origlast"].setValue(last) - r["last"].setValue(last) - - # add attributes from the version to imprint metadata knob - colorspace = version_attributes["colorSpace"] - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspace": colorspace, - } - for k in ["source", "fps"]: - data_imprint[k] = version_attributes.get(k, str(None)) - - r["tile_color"].setValue(int("0x4ecd25ff", 16)) - - return containerise(r, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - node = container["node"] - frame_number = node["first"].value() - - assert node.Class() == "Read", "Must be Read" - - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - repr_cont = repre_entity["context"] - - file = get_representation_path(repre_entity) - - if not file: - repre_id = repre_entity["id"] - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - file = file.replace("\\", "/") - - frame = repr_cont.get("frame") - if frame: - padding = len(frame) - file = file.replace( - frame, - format(frame_number, "0{}".format(padding))) - - # Get start frame from version data - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - last = first = int(frame_number) - - # Set the global in to the start frame of the sequence - node["file"].setValue(file) - node["origfirst"].setValue(first) - node["first"].setValue(first) - node["origlast"].setValue(last) - node["last"].setValue(last) - - version_attributes = version_entity["attrib"] - updated_dict = { - "representation": repre_entity["id"], - "frameStart": str(first), - "frameEnd": str(last), - "version": str(version_entity["version"]), - "colorspace": version_attributes.get("colorSpace"), - "source": version_attributes.get("source"), - "fps": str(version_attributes.get("fps")), - } - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x4ecd25ff" - else: - color_value = "0xd84f20ff" - node["tile_color"].setValue(int(color_value, 16)) - - # Update the imprinted representation - update_container(node, updated_dict) - self.log.info("updated to version: {}".format( - version_entity["version"] - )) - - def remove(self, container): - node = container["node"] - assert node.Class() == "Read", "Must be Read" - - with viewer_update_and_undo_stop(): - nuke.delete(node) - - def _get_node_name(self, context): - folder_entity = context["folder"] - product_name = context["product"]["name"] - repre_entity = context["representation"] - - folder_name = folder_entity["name"] - repre_cont = repre_entity["context"] - name_data = { - "folder": { - "name": folder_name, - }, - "product": { - "name": product_name, - }, - "asset": folder_name, - "subset": product_name, - "representation": repre_entity["name"], - "ext": repre_cont["representation"], - "id": repre_entity["id"], - "class_name": self.__class__.__name__ - } - - return self.node_name_template.format(**name_data) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_matchmove.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_matchmove.py deleted file mode 100644 index c1b5a24504..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_matchmove.py +++ /dev/null @@ -1,32 +0,0 @@ -import nuke -from ayon_core.pipeline import load - - -class MatchmoveLoader(load.LoaderPlugin): - """ - This will run matchmove script to create track in script. - """ - - product_types = {"matchmove"} - representations = {"*"} - extensions = {"py"} - - settings_category = "nuke" - - defaults = ["Camera", "Object"] - - label = "Run matchmove script" - icon = "empire" - color = "orange" - - def load(self, context, name, namespace, data): - path = self.filepath_from_context(context) - if path.lower().endswith(".py"): - exec(open(path).read()) - - else: - msg = "Unsupported script type" - self.log.error(msg) - nuke.message(msg) - - return True diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py deleted file mode 100644 index 551147be96..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py +++ /dev/null @@ -1,207 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import maintained_selection -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class AlembicModelLoader(load.LoaderPlugin): - """ - This will load alembic model or anim into script. - """ - - product_types = {"model", "pointcache", "animation"} - representations = {"*"} - extensions = {"abc"} - - settings_category = "nuke" - - label = "Load Alembic" - icon = "cube" - color = "orange" - node_color = "0x4ecd91ff" - - def load(self, context, name, namespace, data): - # get main variables - project_name = context["project"]["name"] - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - # add attributes from the version to imprint to metadata knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - with maintained_selection(): - model_node = nuke.createNode( - "ReadGeo2", - "name {} file {} ".format( - object_name, file), - inpanel=False - ) - - model_node.forceValidate() - - # Ensure all items are imported and selected. - scene_view = model_node.knob('scene_view') - scene_view.setImportedItems(scene_view.getAllItems()) - scene_view.setSelectedItems(scene_view.getAllItems()) - - model_node["frame_rate"].setValue(float(fps)) - - # workaround because nuke's bug is not adding - # animation keys properly - xpos = model_node.xpos() - ypos = model_node.ypos() - nuke.nodeCopy("%clipboard%") - nuke.delete(model_node) - nuke.nodePaste("%clipboard%") - model_node = nuke.toNode(object_name) - model_node.setXYpos(xpos, ypos) - - # color node by correct color by actual version - self.node_version_color(project_name, version_entity, model_node) - - return containerise( - node=model_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """ - Called by Scene Inventory when look should be updated to current - version. - If any reference edits cannot be applied, eg. shader renamed and - material not present, reference is unloaded and cleaned. - All failed edits are highlighted to the user via message box. - - Args: - container: object that has look to be updated - context: (dict): relationship data to get proper - representation from DB and persisted - data in .json - Returns: - None - """ - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - model_node = container["node"] - - # get main variables - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - # prepare data for imprinting - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - - # add additional metadata from the version to imprint to Avalon knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = get_representation_path(repre_entity).replace("\\", "/") - - with maintained_selection(): - model_node['selected'].setValue(True) - - # collect input output dependencies - dependencies = model_node.dependencies() - dependent = model_node.dependent() - - model_node["frame_rate"].setValue(float(fps)) - model_node["file"].setValue(file) - - # Ensure all items are imported and selected. - scene_view = model_node.knob('scene_view') - scene_view.setImportedItems(scene_view.getAllItems()) - scene_view.setSelectedItems(scene_view.getAllItems()) - - # workaround because nuke's bug is - # not adding animation keys properly - xpos = model_node.xpos() - ypos = model_node.ypos() - nuke.nodeCopy("%clipboard%") - nuke.delete(model_node) - - # paste the node back and set the position - nuke.nodePaste("%clipboard%") - model_node = nuke.selectedNode() - model_node.setXYpos(xpos, ypos) - - # link to original input nodes - for i, input in enumerate(dependencies): - model_node.setInput(i, input) - # link to original output nodes - for d in dependent: - index = next((i for i, dpcy in enumerate( - d.dependencies()) - if model_node is dpcy), 0) - d.setInput(index, model_node) - - # color node by correct color by actual version - self.node_version_color(project_name, version_entity, model_node) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(model_node, data_imprint) - - def node_version_color(self, project_name, version_entity, node): - """ Coloring a node by correct color by actual version""" - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - node["tile_color"].setValue(int(color_value, 16)) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = nuke.toNode(container['objectName']) - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py deleted file mode 100644 index bdff8d7e28..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py +++ /dev/null @@ -1,349 +0,0 @@ -import os -import json -import secrets - -import nuke -import six -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import ( - containerise, - viewer_update_and_undo_stop, - update_container, -) - - -class LoadOcioLookNodes(load.LoaderPlugin): - """Loading Ocio look to the nuke.Node graph""" - - product_types = {"ociolook"} - representations = {"*"} - extensions = {"json"} - - settings_category = "nuke" - - label = "Load OcioLook [nodes]" - order = 0 - icon = "cc" - color = "white" - ignore_attr = ["useLifetime"] - - # plugin attributes - current_node_color = "0x4ecd91ff" - old_node_color = "0xd88467ff" - - # json file variables - schema_version = 1 - - def load(self, context, name, namespace, data): - """ - Loading function to get the soft effects to particular read node - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke.Node: containerized nuke.Node object - """ - namespace = namespace or context["folder"]["name"] - suffix = secrets.token_hex(nbytes=4) - node_name = "{}_{}_{}".format( - name, namespace, suffix) - - # getting file path - filepath = self.filepath_from_context(context) - - json_f = self._load_json_data(filepath) - - group_node = self._create_group_node( - filepath, json_f["data"]) - # renaming group node - group_node["name"].setValue(node_name) - - self._node_version_color( - context["project"]["name"], - context["version"], - group_node - ) - - self.log.info( - "Loaded lut setup: `{}`".format(group_node["name"].value())) - - return containerise( - node=group_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__ - ) - - def _create_group_node( - self, - filepath, - data, - group_node=None - ): - """Creates group node with all the nodes inside. - - Creating mainly `OCIOFileTransform` nodes with `OCIOColorSpace` nodes - in between - in case those are needed. - - Arguments: - filepath (str): path to json file - data (dict): data from json file - group_node (Optional[nuke.Node]): group node or None - - Returns: - nuke.Node: group node with all the nodes inside - """ - # get corresponding node - - root_working_colorspace = nuke.root()["workingSpaceLUT"].value() - - dir_path = os.path.dirname(filepath) - all_files = os.listdir(dir_path) - - ocio_working_colorspace = _colorspace_name_by_type( - data["ocioLookWorkingSpace"]) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - input_node = None - output_node = None - if group_node: - # remove all nodes between Input and Output nodes - for node in group_node.nodes(): - if node.Class() not in ["Input", "Output"]: - nuke.delete(node) - elif node.Class() == "Input": - input_node = node - elif node.Class() == "Output": - output_node = node - else: - group_node = nuke.createNode( - "Group", - inpanel=False - ) - - # adding content to the group node - with group_node: - pre_colorspace = root_working_colorspace - - # reusing input node if it exists during update - if input_node: - pre_node = input_node - else: - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - # Compare script working colorspace with ocio working colorspace - # found in json file and convert to json's if needed - if pre_colorspace != ocio_working_colorspace: - pre_node = _add_ocio_colorspace_node( - pre_node, - pre_colorspace, - ocio_working_colorspace - ) - pre_colorspace = ocio_working_colorspace - - for ocio_item in data["ocioLookItems"]: - input_space = _colorspace_name_by_type( - ocio_item["input_colorspace"]) - output_space = _colorspace_name_by_type( - ocio_item["output_colorspace"]) - - # making sure we are set to correct colorspace for otio item - if pre_colorspace != input_space: - pre_node = _add_ocio_colorspace_node( - pre_node, - pre_colorspace, - input_space - ) - - node = nuke.createNode("OCIOFileTransform") - - # file path from lut representation - extension = ocio_item["ext"] - item_name = ocio_item["name"] - - item_lut_file = next( - ( - file for file in all_files - if file.endswith(extension) - ), - None - ) - if not item_lut_file: - raise ValueError( - "File with extension '{}' not " - "found in directory".format(extension) - ) - - item_lut_path = os.path.join( - dir_path, item_lut_file).replace("\\", "/") - node["file"].setValue(item_lut_path) - node["name"].setValue(item_name) - node["direction"].setValue(ocio_item["direction"]) - node["interpolation"].setValue(ocio_item["interpolation"]) - node["working_space"].setValue(input_space) - - pre_node.autoplace() - node.setInput(0, pre_node) - node.autoplace() - # pass output space into pre_colorspace for next iteration - # or for output node comparison - pre_colorspace = output_space - pre_node = node - - # making sure we are back in script working colorspace - if pre_colorspace != root_working_colorspace: - pre_node = _add_ocio_colorspace_node( - pre_node, - pre_colorspace, - root_working_colorspace - ) - - # reusing output node if it exists during update - if not output_node: - output = nuke.createNode("Output") - else: - output = output_node - - output.setInput(0, pre_node) - - return group_node - - def update(self, container, context): - repre_entity = context["representation"] - - group_node = container["node"] - - filepath = get_representation_path(repre_entity) - - json_f = self._load_json_data(filepath) - - group_node = self._create_group_node( - filepath, - json_f["data"], - group_node - ) - - self._node_version_color( - context["project"]["name"], context["version"], group_node - ) - - self.log.info("Updated lut setup: `{}`".format( - group_node["name"].value())) - - return update_container( - group_node, {"representation": repre_entity["id"]}) - - def _load_json_data(self, filepath): - # getting data from json file with unicode conversion - with open(filepath, "r") as _file: - json_f = {self._bytify(key): self._bytify(value) - for key, value in json.load(_file).items()} - - # check if the version in json_f is the same as plugin version - if json_f["version"] != self.schema_version: - raise KeyError( - "Version of json file is not the same as plugin version") - - return json_f - - def _bytify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self._bytify(key): self._bytify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self._bytify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = nuke.toNode(container['objectName']) - with viewer_update_and_undo_stop(): - nuke.delete(node) - - def _node_version_color(self, project_name, version_entity, node): - """ Coloring a node by correct color by actual version""" - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.current_node_color - else: - color_value = self.old_node_color - node["tile_color"].setValue(int(color_value, 16)) - - -def _colorspace_name_by_type(colorspace_data): - """ - Returns colorspace name by type - - Arguments: - colorspace_data (dict): colorspace data - - Returns: - str: colorspace name - """ - if colorspace_data["type"] == "colorspaces": - return colorspace_data["name"] - elif colorspace_data["type"] == "roles": - return colorspace_data["colorspace"] - else: - raise KeyError("Unknown colorspace type: {}".format( - colorspace_data["type"])) - - -def _add_ocio_colorspace_node(pre_node, input_space, output_space): - """ - Adds OCIOColorSpace node to the node graph - - Arguments: - pre_node (nuke.Node): node to connect to - input_space (str): input colorspace - output_space (str): output colorspace - - Returns: - nuke.Node: node with OCIOColorSpace node - """ - node = nuke.createNode("OCIOColorSpace") - node.setInput(0, pre_node) - node["in_colorspace"].setValue(input_space) - node["out_colorspace"].setValue(output_space) - - pre_node.autoplace() - node.setInput(0, pre_node) - node.autoplace() - - return node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_script_precomp.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_script_precomp.py deleted file mode 100644 index cf543dabfd..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_script_precomp.py +++ /dev/null @@ -1,162 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import get_avalon_knob_data -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LinkAsGroup(load.LoaderPlugin): - """Copy the published file to be pasted at the desired location""" - - product_types = {"workfile", "nukenodes"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Load Precomp" - order = 0 - icon = "file" - color = "#cc0000" - - def load(self, context, name, namespace, data): - # for k, v in context.items(): - # log.info("key: `{}`, value: {}\n".format(k, v)) - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - # Fallback to folder name when namespace is None - if namespace is None: - namespace = context["folder"]["name"] - - file = self.filepath_from_context(context).replace("\\", "/") - self.log.info("file: {}\n".format(file)) - - data_imprint = { - "startingFrame": first, - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - # add additional metadata from the version to imprint to Avalon knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # group context is set to precomp, so back up one level. - nuke.endGroup() - - # P = nuke.nodes.LiveGroup("file {}".format(file)) - P = nuke.createNode( - "Precomp", - "file {}".format(file), - inpanel=False - ) - - # Set colorspace defined in version data - self.log.info("colorspace: {}\n".format(colorspace)) - - P["name"].setValue("{}_{}".format(name, namespace)) - P["useOutput"].setValue(True) - - with P: - # iterate through all nodes in group node and find pype writes - writes = [n.name() for n in nuke.allNodes() - if n.Class() == "Group" - if get_avalon_knob_data(n)] - - if writes: - # create panel for selecting output - panel_choices = " ".join(writes) - panel_label = "Select write node for output" - p = nuke.Panel("Select Write Node") - p.addEnumerationPulldown( - panel_label, panel_choices) - p.show() - P["output"].setValue(p.value(panel_label)) - - P["tile_color"].setValue(0xff0ff0ff) - - return containerise( - node=P, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - node = container["node"] - - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - root = get_representation_path(repre_entity).replace("\\", "/") - - # Get start frame from version data - - version_attributes = version_entity["attrib"] - updated_dict = { - "representation": repre_entity["id"], - "frameEnd": version_attributes.get("frameEnd"), - "version": version_entity["version"], - "colorspace": version_attributes.get("colorSpace"), - "source": version_attributes.get("source"), - "fps": version_attributes.get("fps"), - } - - # Update the imprinted representation - update_container( - node, - updated_dict - ) - - node["file"].setValue(root) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0xff0ff0ff" - else: - color_value = "0xd84f20ff" - node["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_backdrop.py deleted file mode 100644 index 1471159380..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_backdrop.py +++ /dev/null @@ -1,62 +0,0 @@ -from pprint import pformat -import pyblish.api -from ayon_nuke.api import lib as pnlib -import nuke - - -class CollectBackdrops(pyblish.api.InstancePlugin): - """Collect Backdrop node instance and its content - """ - - order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Backdrop" - hosts = ["nuke"] - families = ["nukenodes"] - - settings_category = "nuke" - - def process(self, instance): - self.log.debug(pformat(instance.data)) - - bckn = instance.data["transientData"]["node"] - - # define size of the backdrop - left = bckn.xpos() - top = bckn.ypos() - right = left + bckn['bdwidth'].value() - bottom = top + bckn['bdheight'].value() - - instance.data["transientData"]["childNodes"] = [] - # iterate all nodes - for node in nuke.allNodes(): - - # exclude viewer - if node.Class() == "Viewer": - continue - - # find all related nodes - if (node.xpos() > left) \ - and (node.xpos() + node.screenWidth() < right) \ - and (node.ypos() > top) \ - and (node.ypos() + node.screenHeight() < bottom): - - # add contained nodes to instance's node list - instance.data["transientData"]["childNodes"].append(node) - - # get all connections from outside of backdrop - nodes = instance.data["transientData"]["childNodes"] - connections_in, connections_out = pnlib.get_dependent_nodes(nodes) - instance.data["transientData"]["nodeConnectionsIn"] = connections_in - instance.data["transientData"]["nodeConnectionsOut"] = connections_out - - # make label nicer - instance.data["label"] = "{0} ({1} nodes)".format( - bckn.name(), len(instance.data["transientData"]["childNodes"])) - - # get version - version = instance.context.data.get('version') - - if version: - instance.data['version'] = version - - self.log.debug("Backdrop instance collected: `{}`".format(instance)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_context_data.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_context_data.py deleted file mode 100644 index 33c8e63e82..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_context_data.py +++ /dev/null @@ -1,69 +0,0 @@ -import os -import nuke -import pyblish.api -from ayon_core.lib import get_version_from_path -import ayon_nuke.api as napi -from ayon_core.pipeline import KnownPublishError - - -class CollectContextData(pyblish.api.ContextPlugin): - """Collect current context publish.""" - - order = pyblish.api.CollectorOrder - 0.499 - label = "Collect context data" - hosts = ['nuke'] - - settings_category = "nuke" - - def process(self, context): # sourcery skip: avoid-builtin-shadow - root_node = nuke.root() - - current_file = os.path.normpath(root_node.name()) - - if current_file.lower() == "root": - raise KnownPublishError( - "Workfile is not correct file name. \n" - "Use workfile tool to manage the name correctly." - ) - - # Get frame range - first_frame = int(root_node["first_frame"].getValue()) - last_frame = int(root_node["last_frame"].getValue()) - - # get instance data from root - root_instance_context = napi.get_node_data( - root_node, napi.INSTANCE_DATA_KNOB - ) - - handle_start = root_instance_context["handleStart"] - handle_end = root_instance_context["handleEnd"] - - # Get format - format = root_node['format'].value() - resolution_width = format.width() - resolution_height = format.height() - pixel_aspect = format.pixelAspect() - - script_data = { - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect, - - "handleStart": handle_start, - "handleEnd": handle_end, - "step": 1, - "fps": root_node['fps'].value(), - - "currentFile": current_file, - "version": int(get_version_from_path(current_file)), - - "host": pyblish.api.current_host(), - "hostVersion": nuke.NUKE_VERSION_STRING - } - - context.data["scriptData"] = script_data - context.data.update(script_data) - - self.log.debug('Context from Nuke script collected') diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_framerate.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_framerate.py deleted file mode 100644 index cd77eab0f1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_framerate.py +++ /dev/null @@ -1,19 +0,0 @@ -import nuke - -import pyblish.api - - -class CollectFramerate(pyblish.api.ContextPlugin): - """Collect framerate.""" - - order = pyblish.api.CollectorOrder - label = "Collect Framerate" - hosts = [ - "nuke", - "nukeassist" - ] - - settings_category = "nuke" - - def process(self, context): - context.data["fps"] = nuke.root()["fps"].getValue() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_gizmo.py deleted file mode 100644 index ece9823b37..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_gizmo.py +++ /dev/null @@ -1,49 +0,0 @@ -import pyblish.api -import nuke - - -class CollectGizmo(pyblish.api.InstancePlugin): - """Collect Gizmo (group) node instance and its content - """ - - order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Gizmo (group)" - hosts = ["nuke"] - families = ["gizmo"] - - settings_category = "nuke" - - def process(self, instance): - - gizmo_node = instance.data["transientData"]["node"] - - # add product type to familiess - instance.data["families"].insert(0, instance.data["productType"]) - # make label nicer - instance.data["label"] = gizmo_node.name() - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - families = [instance.data["productType"]] + instance.data["families"] - - # Add version data to instance - version_data = { - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "colorspace": nuke.root().knob('workingSpaceLUT').value(), - "families": families, - "productName": instance.data["productName"], - "fps": instance.context.data["fps"] - } - - instance.data.update({ - "versionData": version_data, - "frameStart": first_frame, - "frameEnd": last_frame - }) - self.log.debug("Gizmo instance collected: `{}`".format(instance)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_headless_farm.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_headless_farm.py deleted file mode 100644 index c00b9a8f5d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_headless_farm.py +++ /dev/null @@ -1,58 +0,0 @@ -import pyblish.api - -from ayon_core.pipeline.publish import ( - AYONPyblishPluginMixin -) - - -class CollectRenderOnFarm(pyblish.api.ContextPlugin): - """Setup instances for render on farm submission.""" - - # Needs to be after CollectFromCreateContext - order = pyblish.api.CollectorOrder - 0.49 - label = "Collect Render On Farm" - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - if not context.data.get("render_on_farm", False): - return - - for instance in context: - if instance.data["family"] == "workfile": - instance.data["active"] = False - continue - - # Filter out all other instances. - node = instance.data["transientData"]["node"] - if node.name() != instance.context.data["node_name"]: - instance.data["active"] = False - continue - - instance.data["families"].append("render_on_farm") - - # Enable for farm publishing. - instance.data["farm"] = True - - # Skip workfile version incremental save. - instance.context.data["increment_script_version"] = False - - -class SetupRenderOnFarm(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): - """Setup instance for render on farm submission.""" - - order = pyblish.api.CollectorOrder + 0.4999 - label = "Setup Render On Farm" - hosts = ["nuke"] - families = ["render_on_farm"] - - def process(self, instance): - # Clear the families as we only want the main family, ei. no review - # etc. - instance.data["families"] = ["render_on_farm"] - - # Use the workfile instead of published. - publish_attributes = instance.data["publish_attributes"] - plugin_attributes = publish_attributes["NukeSubmitDeadline"] - plugin_attributes["use_published_workfile"] = False diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_model.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_model.py deleted file mode 100644 index f4266bbbcb..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_model.py +++ /dev/null @@ -1,48 +0,0 @@ -import pyblish.api -import nuke - - -class CollectModel(pyblish.api.InstancePlugin): - """Collect Model node instance and its content - """ - - order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Model" - hosts = ["nuke"] - families = ["model"] - - settings_category = "nuke" - - def process(self, instance): - - geo_node = instance.data["transientData"]["node"] - - # add product type to familiess - instance.data["families"].insert(0, instance.data["productType"]) - # make label nicer - instance.data["label"] = geo_node.name() - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - families = [instance.data["productType"]] + instance.data["families"] - # Add version data to instance - version_data = { - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "colorspace": nuke.root().knob('workingSpaceLUT').value(), - "families": families, - "productName": instance.data["productName"], - "fps": instance.context.data["fps"] - } - - instance.data.update({ - "versionData": version_data, - "frameStart": first_frame, - "frameEnd": last_frame - }) - self.log.debug("Model instance collected: `{}`".format(instance)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_nuke_instance_data.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_nuke_instance_data.py deleted file mode 100644 index d1392a8460..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_nuke_instance_data.py +++ /dev/null @@ -1,57 +0,0 @@ -import nuke -import pyblish.api - - -class CollectInstanceData(pyblish.api.InstancePlugin): - """Collect Nuke instance data - - """ - - order = pyblish.api.CollectorOrder - 0.49 - label = "Collect Nuke Instance Data" - hosts = ["nuke", "nukeassist"] - - settings_category = "nuke" - - # presets - sync_workfile_version_on_families = [] - - def process(self, instance): - product_type = instance.data["productType"] - - # Get format - root = nuke.root() - format_ = root['format'].value() - resolution_width = format_.width() - resolution_height = format_.height() - pixel_aspect = format_.pixelAspect() - - # sync workfile version - if product_type in self.sync_workfile_version_on_families: - self.log.debug( - "Syncing version with workfile for '{}'".format( - product_type - ) - ) - # get version to instance for integration - instance.data['version'] = instance.context.data['version'] - - instance.data.update({ - "step": 1, - "fps": root['fps'].value(), - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect - - }) - - # add creator attributes to instance - creator_attributes = instance.data["creator_attributes"] - instance.data.update(creator_attributes) - - # add review family if review activated on instance - if instance.data.get("review"): - instance.data["families"].append("review") - - self.log.debug("Collected instance: {}".format( - instance.data)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_reads.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_reads.py deleted file mode 100644 index 439374e825..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_reads.py +++ /dev/null @@ -1,124 +0,0 @@ -import os -import re -import nuke -import pyblish.api - - -class CollectNukeReads(pyblish.api.InstancePlugin): - """Collect all read nodes.""" - - order = pyblish.api.CollectorOrder + 0.04 - label = "Collect Source Reads" - hosts = ["nuke", "nukeassist"] - families = ["source"] - - settings_category = "nuke" - - def process(self, instance): - self.log.debug("checking instance: {}".format(instance)) - - node = instance.data["transientData"]["node"] - if node.Class() != "Read": - return - - file_path = node["file"].value() - file_name = os.path.basename(file_path) - items = file_name.split(".") - - if len(items) < 2: - raise ValueError - - ext = items[-1] - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = node['first'].value() - last_frame = node['last'].value() - - # colorspace - colorspace = node["colorspace"].value() - if "default" in colorspace: - colorspace = colorspace.replace("default (", "").replace(")", "") - - # # Easier way to sequence - Not tested - # isSequence = True - # if first_frame == last_frame: - # isSequence = False - - isSequence = False - if len(items) > 1: - sequence = items[-2] - hash_regex = re.compile(r'([#*])') - seq_regex = re.compile(r'[%0-9*d]') - hash_match = re.match(hash_regex, sequence) - seq_match = re.match(seq_regex, sequence) - if hash_match or seq_match: - isSequence = True - - # get source path - path = nuke.filename(node) - source_dir = os.path.dirname(path) - self.log.debug('source dir: {}'.format(source_dir)) - - if isSequence: - source_files = [f for f in os.listdir(source_dir) - if ext in f - if items[0] in f] - else: - source_files = file_name - - # Include start and end render frame in label - name = node.name() - label = "{0} ({1}-{2})".format( - name, - int(first_frame), - int(last_frame) - ) - - self.log.debug("collected_frames: {}".format(label)) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': ext, - 'ext': ext, - 'files': source_files, - "stagingDir": source_dir, - "frameStart": "%0{}d".format( - len(str(last_frame))) % first_frame - } - instance.data["representations"].append(representation) - - transfer = node["publish"] if "publish" in node.knobs() else False - instance.data['transfer'] = transfer - - # Add version data to instance - version_data = { - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "colorspace": colorspace, - "families": [instance.data["productType"]], - "productName": instance.data["productName"], - "fps": instance.context.data["fps"] - } - - instance.data.update({ - "versionData": version_data, - "path": path, - "stagingDir": source_dir, - "ext": ext, - "label": label, - "frameStart": first_frame, - "frameEnd": last_frame, - "colorspace": colorspace, - "handleStart": handle_start, - "handleEnd": handle_end, - "step": 1, - "fps": int(nuke.root()['fps'].value()) - }) - - self.log.debug("instance.data: {}".format(instance.data)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_slate_node.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_slate_node.py deleted file mode 100644 index bb3b0083ab..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_slate_node.py +++ /dev/null @@ -1,48 +0,0 @@ -import pyblish.api -import nuke - - -class CollectSlate(pyblish.api.InstancePlugin): - """Check if SLATE node is in scene and connected to rendering tree""" - - order = pyblish.api.CollectorOrder + 0.002 - label = "Collect Slate Node" - hosts = ["nuke"] - families = ["render"] - - settings_category = "nuke" - - def process(self, instance): - node = instance.data["transientData"]["node"] - - slate = next( - ( - n_ for n_ in nuke.allNodes() - if "slate" in n_.name().lower() - if not n_["disable"].getValue() and - "publish_instance" not in n_.knobs() # Exclude instance nodes. - ), - None - ) - - if slate: - # check if slate node is connected to write node tree - slate_check = 0 - slate_node = None - while slate_check == 0: - try: - node = node.dependencies()[0] - if slate.name() in node.name(): - slate_node = node - slate_check = 1 - except IndexError: - break - - if slate_node: - instance.data["slateNode"] = slate_node - instance.data["slate"] = True - instance.data["families"].append("slate") - self.log.debug( - "Slate node is in node graph: `{}`".format(slate.name())) - self.log.debug( - "__ instance.data: `{}`".format(instance.data)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_workfile.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_workfile.py deleted file mode 100644 index e4bd5ed129..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_workfile.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import nuke -import pyblish.api - - -class CollectWorkfile(pyblish.api.InstancePlugin): - """Collect current script for publish.""" - - order = pyblish.api.CollectorOrder - label = "Collect Workfile" - hosts = ['nuke'] - families = ["workfile"] - - settings_category = "nuke" - - def process(self, instance): # sourcery skip: avoid-builtin-shadow - - script_data = instance.context.data["scriptData"] - current_file = os.path.normpath(nuke.root().name()) - - # creating instances per write node - staging_dir = os.path.dirname(current_file) - base_name = os.path.basename(current_file) - - # creating representation - representation = { - 'name': 'nk', - 'ext': 'nk', - 'files': base_name, - "stagingDir": staging_dir, - } - - # creating instance data - instance.data.update({ - "name": base_name, - "representations": [representation] - }) - - # adding basic script data - instance.data.update(script_data) - - self.log.debug( - "Collected current script version: {}".format(current_file) - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_writes.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_writes.py deleted file mode 100644 index 816f493d72..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_writes.py +++ /dev/null @@ -1,402 +0,0 @@ -import os -import nuke -import pyblish.api -from ayon_nuke import api as napi -from ayon_core.pipeline import publish - - -class CollectNukeWrites(pyblish.api.InstancePlugin, - publish.ColormanagedPyblishPluginMixin): - """Collect all write nodes.""" - - order = pyblish.api.CollectorOrder + 0.0021 - label = "Collect Writes" - hosts = ["nuke", "nukeassist"] - families = ["render", "prerender", "image"] - - settings_category = "nuke" - - # cache - _write_nodes = {} - _frame_ranges = {} - - def process(self, instance): - - group_node = instance.data["transientData"]["node"] - render_target = instance.data["render_target"] - - write_node = self._write_node_helper(instance) - - if write_node is None: - self.log.warning( - "Created node '{}' is missing write node!".format( - group_node.name() - ) - ) - return - - # get colorspace and add to version data - colorspace = napi.get_colorspace_from_node(write_node) - - if render_target == "frames": - self._set_existing_files_data(instance, colorspace) - - elif render_target == "frames_farm": - collected_frames = self._set_existing_files_data( - instance, colorspace) - - self._set_expected_files(instance, collected_frames) - - self._add_farm_instance_data(instance) - - elif render_target == "farm": - self._add_farm_instance_data(instance) - - # set additional instance data - self._set_additional_instance_data(instance, render_target, colorspace) - - def _set_existing_files_data(self, instance, colorspace): - """Set existing files data to instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - colorspace (str): colorspace - - Returns: - list: collected frames - """ - collected_frames = self._get_collected_frames(instance) - - representation = self._get_existing_frames_representation( - instance, collected_frames - ) - - # inject colorspace data - self.set_representation_colorspace( - representation, instance.context, - colorspace=colorspace - ) - - instance.data["representations"].append(representation) - - return collected_frames - - def _set_expected_files(self, instance, collected_frames): - """Set expected files to instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - collected_frames (list): collected frames - """ - write_node = self._write_node_helper(instance) - - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - instance.data["expectedFiles"] = [ - os.path.join(output_dir, source_file) - for source_file in collected_frames - ] - - def _get_frame_range_data(self, instance): - """Get frame range data from instance. - - Args: - instance (pyblish.api.Instance): pyblish instance - - Returns: - tuple: first_frame, last_frame - """ - - instance_name = instance.data["name"] - - if self._frame_ranges.get(instance_name): - # return cashed write node - return self._frame_ranges[instance_name] - - write_node = self._write_node_helper(instance) - - # Get frame range from workfile - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - - # Get frame range from write node if activated - if write_node["use_limit"].getValue(): - first_frame = int(write_node["first"].getValue()) - last_frame = int(write_node["last"].getValue()) - - # add to cache - self._frame_ranges[instance_name] = (first_frame, last_frame) - - return first_frame, last_frame - - def _set_additional_instance_data( - self, instance, render_target, colorspace - ): - """Set additional instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - render_target (str): render target - colorspace (str): colorspace - """ - product_type = instance.data["productType"] - - # add targeted family to families - instance.data["families"].append( - "{}.{}".format(product_type, render_target) - ) - self.log.debug("Appending render target to families: {}.{}".format( - product_type, render_target) - ) - - write_node = self._write_node_helper(instance) - - # Determine defined file type - ext = write_node["file_type"].value() - - # determine defined channel type - color_channels = write_node["channels"].value() - - # get frame range data - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame, last_frame = self._get_frame_range_data(instance) - - # get output paths - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - # TODO: remove this when we have proper colorspace support - version_data = { - "colorspace": colorspace - } - - instance.data.update({ - "versionData": version_data, - "path": write_file_path, - "outputDir": output_dir, - "ext": ext, - "colorspace": colorspace, - "color_channels": color_channels - }) - - if product_type == "render": - instance.data.update({ - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - else: - instance.data.update({ - "handleStart": 0, - "handleEnd": 0, - "frameStart": first_frame, - "frameEnd": last_frame, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - # TODO temporarily set stagingDir as persistent for backward - # compatibility. This is mainly focused on `renders`folders which - # were previously not cleaned up (and could be used in read notes) - # this logic should be removed and replaced with custom staging dir - instance.data["stagingDir_persistent"] = True - - def _write_node_helper(self, instance): - """Helper function to get write node from instance. - - Also sets instance transient data with child nodes. - - Args: - instance (pyblish.api.Instance): pyblish instance - - Returns: - nuke.Node: write node - """ - instance_name = instance.data["name"] - - if self._write_nodes.get(instance_name): - # return cashed write node - return self._write_nodes[instance_name] - - # get all child nodes from group node - child_nodes = napi.get_instance_group_node_childs(instance) - - # set child nodes to instance transient data - instance.data["transientData"]["childNodes"] = child_nodes - - write_node = None - for node_ in child_nodes: - if node_.Class() == "Write": - write_node = node_ - - if write_node: - # for slate frame extraction - instance.data["transientData"]["writeNode"] = write_node - # add to cache - self._write_nodes[instance_name] = write_node - - return self._write_nodes[instance_name] - - def _get_existing_frames_representation( - self, - instance, - collected_frames - ): - """Get existing frames representation. - - Args: - instance (pyblish.api.Instance): pyblish instance - collected_frames (list): collected frames - - Returns: - dict: representation - """ - - first_frame, last_frame = self._get_frame_range_data(instance) - - write_node = self._write_node_helper(instance) - - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - # Determine defined file type - ext = write_node["file_type"].value() - - representation = { - "name": ext, - "ext": ext, - "stagingDir": output_dir, - "tags": [] - } - - # set slate frame - collected_frames = self._add_slate_frame_to_collected_frames( - instance, - collected_frames, - first_frame, - last_frame - ) - - if len(collected_frames) == 1: - representation['files'] = collected_frames.pop() - else: - representation['files'] = collected_frames - - return representation - - def _get_frame_start_str(self, first_frame, last_frame): - """Get frame start string. - - Args: - first_frame (int): first frame - last_frame (int): last frame - - Returns: - str: frame start string - """ - # convert first frame to string with padding - return ( - "{{:0{}d}}".format(len(str(last_frame))) - ).format(first_frame) - - def _add_slate_frame_to_collected_frames( - self, - instance, - collected_frames, - first_frame, - last_frame - ): - """Add slate frame to collected frames. - - Args: - instance (pyblish.api.Instance): pyblish instance - collected_frames (list): collected frames - first_frame (int): first frame - last_frame (int): last frame - - Returns: - list: collected frames - """ - frame_start_str = self._get_frame_start_str(first_frame, last_frame) - frame_length = int(last_frame - first_frame + 1) - - # this will only run if slate frame is not already - # rendered from previews publishes - if ( - "slate" in instance.data["families"] - and frame_length == len(collected_frames) - ): - frame_slate_str = self._get_frame_start_str( - first_frame - 1, - last_frame - ) - - slate_frame = collected_frames[0].replace( - frame_start_str, frame_slate_str) - collected_frames.insert(0, slate_frame) - - return collected_frames - - def _add_farm_instance_data(self, instance): - """Add farm publishing related instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - """ - - # make sure rendered sequence on farm will - # be used for extract review - if not instance.data.get("review"): - instance.data["useSequenceForReview"] = False - - # Farm rendering - instance.data.update({ - "transfer": False, - "farm": True # to skip integrate - }) - self.log.info("Farm rendering ON ...") - - def _get_collected_frames(self, instance): - """Get collected frames. - - Args: - instance (pyblish.api.Instance): pyblish instance - - Returns: - list: collected frames - """ - - first_frame, last_frame = self._get_frame_range_data(instance) - - write_node = self._write_node_helper(instance) - - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - # get file path knob - node_file_knob = write_node["file"] - # list file paths based on input frames - expected_paths = list(sorted({ - node_file_knob.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) - - # convert only to base names - expected_filenames = { - os.path.basename(filepath) - for filepath in expected_paths - } - - # make sure files are existing at folder - collected_frames = [ - filename - for filename in os.listdir(output_dir) - if filename in expected_filenames - ] - - return collected_frames diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_backdrop.py deleted file mode 100644 index 8c42920979..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_backdrop.py +++ /dev/null @@ -1,106 +0,0 @@ -import os - -import nuke - -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api.lib import ( - maintained_selection, - reset_selection, - select_nodes -) - - -class ExtractBackdropNode(publish.Extractor): - """Extracting content of backdrop nodes - - Will create nuke script only with containing nodes. - Also it will solve Input and Output nodes. - - """ - - order = pyblish.api.ExtractorOrder - label = "Extract Backdrop" - hosts = ["nuke"] - families = ["nukenodes"] - - settings_category = "nuke" - - def process(self, instance): - tmp_nodes = [] - child_nodes = instance.data["transientData"]["childNodes"] - # all connections outside of backdrop - connections_in = instance.data["transientData"]["nodeConnectionsIn"] - connections_out = instance.data["transientData"]["nodeConnectionsOut"] - self.log.debug("_ connections_in: `{}`".format(connections_in)) - self.log.debug("_ connections_out: `{}`".format(connections_out)) - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = "{0}.nk".format(instance.name) - path = os.path.join(stagingdir, filename) - - # maintain selection - with maintained_selection(): - # create input child_nodes and name them as passing node (*_INP) - for n, inputs in connections_in.items(): - for i, input in inputs: - inpn = nuke.createNode("Input") - inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) - n.setInput(i, inpn) - inpn.setXYpos(input.xpos(), input.ypos()) - child_nodes.append(inpn) - tmp_nodes.append(inpn) - - reset_selection() - - # connect output node - for n, output in connections_out.items(): - opn = nuke.createNode("Output") - output.setInput( - next((i for i, d in enumerate(output.dependencies()) - if d.name() in n.name()), 0), opn) - opn.setInput(0, n) - opn.autoplace() - child_nodes.append(opn) - tmp_nodes.append(opn) - reset_selection() - - # select child_nodes to copy - reset_selection() - select_nodes(child_nodes) - # create tmp nk file - # save file to the path - nuke.nodeCopy(path) - - # Clean up - for tn in tmp_nodes: - nuke.delete(tn) - - # restore original connections - # reconnect input node - for n, inputs in connections_in.items(): - for i, input in inputs: - n.setInput(i, input) - - # reconnect output node - for n, output in connections_out.items(): - output.setInput( - next((i for i, d in enumerate(output.dependencies()) - if d.name() in n.name()), 0), n) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - # create representation - representation = { - 'name': 'nk', - 'ext': 'nk', - 'files': filename, - "stagingDir": stagingdir - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '{}' to: {}".format( - instance.name, path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_camera.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_camera.py deleted file mode 100644 index 83914087e3..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_camera.py +++ /dev/null @@ -1,185 +0,0 @@ -import os -import math - -import nuke - -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api.lib import maintained_selection - - -class ExtractCamera(publish.Extractor): - """ 3D camera extractor - """ - label = 'Extract Camera' - order = pyblish.api.ExtractorOrder - families = ["camera"] - hosts = ["nuke"] - - settings_category = "nuke" - - # presets - write_geo_knobs = [ - ("file_type", "abc"), - ("storageFormat", "Ogawa"), - ("writeGeometries", False), - ("writePointClouds", False), - ("writeAxes", False) - ] - - def process(self, instance): - camera_node = instance.data["transientData"]["node"] - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - step = 1 - output_range = str(nuke.FrameRange(first_frame, last_frame, step)) - - rm_nodes = [] - self.log.debug("Creating additional nodes for 3D Camera Extractor") - product_name = instance.data["productName"] - staging_dir = self.staging_dir(instance) - - # get extension form preset - extension = next((k[1] for k in self.write_geo_knobs - if k[0] == "file_type"), None) - if not extension: - raise RuntimeError( - "Bad config for extension in presets. " - "Talk to your supervisor or pipeline admin") - - # create file name and path - filename = product_name + ".{}".format(extension) - file_path = os.path.join(staging_dir, filename).replace("\\", "/") - - with maintained_selection(): - # bake camera with axeses onto word coordinate XYZ - rm_n = bakeCameraWithAxeses( - camera_node, output_range) - rm_nodes.append(rm_n) - - # create scene node - rm_n = nuke.createNode("Scene") - rm_nodes.append(rm_n) - - # create write geo node - wg_n = nuke.createNode("WriteGeo") - wg_n["file"].setValue(file_path) - # add path to write to - for k, v in self.write_geo_knobs: - wg_n[k].setValue(v) - rm_nodes.append(wg_n) - - # write out camera - nuke.execute( - wg_n, - int(first_frame), - int(last_frame) - ) - # erase additional nodes - for n in rm_nodes: - nuke.delete(n) - - # create representation data - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': extension, - 'ext': extension, - 'files': filename, - "stagingDir": staging_dir, - "frameStart": first_frame, - "frameEnd": last_frame - } - instance.data["representations"].append(representation) - - instance.data.update({ - "path": file_path, - "outputDir": staging_dir, - "ext": extension, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - self.log.debug("Extracted instance '{0}' to: {1}".format( - instance.name, file_path)) - - -def bakeCameraWithAxeses(camera_node, output_range): - """ Baking all perent hierarchy of axeses into camera - with transposition onto word XYZ coordinance - """ - bakeFocal = False - bakeHaperture = False - bakeVaperture = False - - camera_matrix = camera_node['world_matrix'] - - new_cam_n = nuke.createNode("Camera2") - new_cam_n.setInput(0, None) - new_cam_n['rotate'].setAnimated() - new_cam_n['translate'].setAnimated() - - old_focal = camera_node['focal'] - if old_focal.isAnimated() and not (old_focal.animation(0).constant()): - new_cam_n['focal'].setAnimated() - bakeFocal = True - else: - new_cam_n['focal'].setValue(old_focal.value()) - - old_haperture = camera_node['haperture'] - if old_haperture.isAnimated() and not ( - old_haperture.animation(0).constant()): - new_cam_n['haperture'].setAnimated() - bakeHaperture = True - else: - new_cam_n['haperture'].setValue(old_haperture.value()) - - old_vaperture = camera_node['vaperture'] - if old_vaperture.isAnimated() and not ( - old_vaperture.animation(0).constant()): - new_cam_n['vaperture'].setAnimated() - bakeVaperture = True - else: - new_cam_n['vaperture'].setValue(old_vaperture.value()) - - new_cam_n['win_translate'].setValue(camera_node['win_translate'].value()) - new_cam_n['win_scale'].setValue(camera_node['win_scale'].value()) - - for x in nuke.FrameRange(output_range): - math_matrix = nuke.math.Matrix4() - for y in range(camera_matrix.height()): - for z in range(camera_matrix.width()): - matrix_pointer = z + (y * camera_matrix.width()) - math_matrix[matrix_pointer] = camera_matrix.getValueAt( - x, (y + (z * camera_matrix.width()))) - - rot_matrix = nuke.math.Matrix4(math_matrix) - rot_matrix.rotationOnly() - rot = rot_matrix.rotationsZXY() - - new_cam_n['rotate'].setValueAt(math.degrees(rot[0]), x, 0) - new_cam_n['rotate'].setValueAt(math.degrees(rot[1]), x, 1) - new_cam_n['rotate'].setValueAt(math.degrees(rot[2]), x, 2) - new_cam_n['translate'].setValueAt( - camera_matrix.getValueAt(x, 3), x, 0) - new_cam_n['translate'].setValueAt( - camera_matrix.getValueAt(x, 7), x, 1) - new_cam_n['translate'].setValueAt( - camera_matrix.getValueAt(x, 11), x, 2) - - if bakeFocal: - new_cam_n['focal'].setValueAt(old_focal.getValueAt(x), x) - if bakeHaperture: - new_cam_n['haperture'].setValueAt(old_haperture.getValueAt(x), x) - if bakeVaperture: - new_cam_n['vaperture'].setValueAt(old_vaperture.getValueAt(x), x) - - return new_cam_n diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_gizmo.py deleted file mode 100644 index 05e3164163..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_gizmo.py +++ /dev/null @@ -1,91 +0,0 @@ -import os -import nuke - -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api import utils as pnutils -from ayon_nuke.api.lib import ( - maintained_selection, - reset_selection, - select_nodes -) - - -class ExtractGizmo(publish.Extractor): - """Extracting Gizmo (Group) node - - Will create nuke script only with the Gizmo node. - """ - - order = pyblish.api.ExtractorOrder - label = "Extract Gizmo (group)" - hosts = ["nuke"] - families = ["gizmo"] - - settings_category = "nuke" - - def process(self, instance): - tmp_nodes = [] - orig_grpn = instance.data["transientData"]["node"] - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = "{0}.nk".format(instance.name) - path = os.path.join(stagingdir, filename) - - # maintain selection - with maintained_selection(): - orig_grpn_name = orig_grpn.name() - tmp_grpn_name = orig_grpn_name + "_tmp" - # select original group node - select_nodes([orig_grpn]) - - # copy to clipboard - nuke.nodeCopy("%clipboard%") - - # reset selection to none - reset_selection() - - # paste clipboard - nuke.nodePaste("%clipboard%") - - # assign pasted node - copy_grpn = nuke.selectedNode() - copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos()) - - # convert gizmos to groups - pnutils.bake_gizmos_recursively(copy_grpn) - - # add to temporary nodes - tmp_nodes.append(copy_grpn) - - # swap names - orig_grpn.setName(tmp_grpn_name) - copy_grpn.setName(orig_grpn_name) - - # create tmp nk file - # save file to the path - nuke.nodeCopy(path) - - # Clean up - for tn in tmp_nodes: - nuke.delete(tn) - - # rename back to original - orig_grpn.setName(orig_grpn_name) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - # create representation - representation = { - 'name': 'gizmo', - 'ext': 'nk', - 'files': filename, - "stagingDir": stagingdir - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '{}' to: {}".format( - instance.name, path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_headless_farm.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_headless_farm.py deleted file mode 100644 index 4721fe4462..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_headless_farm.py +++ /dev/null @@ -1,38 +0,0 @@ -import os -from datetime import datetime -import shutil - -import pyblish.api - -from ayon_core.pipeline import registered_host - - -class ExtractRenderOnFarm(pyblish.api.InstancePlugin): - """Copy the workfile to a timestamped copy.""" - - order = pyblish.api.ExtractorOrder + 0.499 - label = "Extract Render On Farm" - hosts = ["nuke"] - families = ["render_on_farm"] - - settings_category = "nuke" - - def process(self, instance): - if not instance.context.data.get("render_on_farm", False): - return - - host = registered_host() - current_datetime = datetime.now() - formatted_timestamp = current_datetime.strftime("%Y%m%d%H%M%S") - base, ext = os.path.splitext(host.current_file()) - - directory = os.path.join(os.path.dirname(base), "farm_submissions") - if not os.path.exists(directory): - os.makedirs(directory) - - filename = "{}_{}{}".format( - os.path.basename(base), formatted_timestamp, ext - ) - path = os.path.join(directory, filename).replace("\\", "/") - instance.context.data["currentFile"] = path - shutil.copy(host.current_file(), path) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_model.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_model.py deleted file mode 100644 index 58b9d4179b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_model.py +++ /dev/null @@ -1,110 +0,0 @@ -import os -from pprint import pformat -import nuke -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api.lib import ( - maintained_selection, - select_nodes -) - - -class ExtractModel(publish.Extractor): - """ 3D model extractor - """ - label = 'Extract Model' - order = pyblish.api.ExtractorOrder - families = ["model"] - hosts = ["nuke"] - - settings_category = "nuke" - - # presets - write_geo_knobs = [ - ("file_type", "abc"), - ("storageFormat", "Ogawa"), - ("writeGeometries", True), - ("writePointClouds", False), - ("writeAxes", False) - ] - - def process(self, instance): - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - - self.log.debug("instance.data: `{}`".format( - pformat(instance.data))) - - rm_nodes = [] - model_node = instance.data["transientData"]["node"] - - self.log.debug("Creating additional nodes for Extract Model") - product_name = instance.data["productName"] - staging_dir = self.staging_dir(instance) - - extension = next((k[1] for k in self.write_geo_knobs - if k[0] == "file_type"), None) - if not extension: - raise RuntimeError( - "Bad config for extension in presets. " - "Talk to your supervisor or pipeline admin") - - # create file name and path - filename = product_name + ".{}".format(extension) - file_path = os.path.join(staging_dir, filename).replace("\\", "/") - - with maintained_selection(): - # select model node - select_nodes([model_node]) - - # create write geo node - wg_n = nuke.createNode("WriteGeo") - wg_n["file"].setValue(file_path) - # add path to write to - for k, v in self.write_geo_knobs: - wg_n[k].setValue(v) - rm_nodes.append(wg_n) - - # write out model - nuke.execute( - wg_n, - int(first_frame), - int(last_frame) - ) - # erase additional nodes - for n in rm_nodes: - nuke.delete(n) - - self.log.debug("Filepath: {}".format(file_path)) - - # create representation data - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': extension, - 'ext': extension, - 'files': filename, - "stagingDir": staging_dir, - "frameStart": first_frame, - "frameEnd": last_frame - } - instance.data["representations"].append(representation) - - instance.data.update({ - "path": file_path, - "outputDir": staging_dir, - "ext": extension, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - self.log.debug("Extracted instance '{0}' to: {1}".format( - instance.name, file_path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_ouput_node.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_ouput_node.py deleted file mode 100644 index 52072cddc5..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_ouput_node.py +++ /dev/null @@ -1,45 +0,0 @@ -import nuke -import pyblish.api -from ayon_nuke.api.lib import maintained_selection - - -class CreateOutputNode(pyblish.api.ContextPlugin): - """Adding output node for each output write node - So when latly user will want to Load .nk as LifeGroup or Precomp - Nuke will not complain about missing Output node - """ - label = 'Output Node Create' - order = pyblish.api.ExtractorOrder + 0.4 - families = ["workfile"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - # capture selection state - with maintained_selection(): - - active_node = [ - inst.data.get("transientData", {}).get("node") - for inst in context - if inst.data.get("transientData", {}).get("node") - if inst.data.get( - "transientData", {}).get("node").Class() != "Root" - ] - - if active_node: - active_node = active_node.pop() - self.log.debug("Active node: {}".format(active_node)) - active_node['selected'].setValue(True) - - # select only instance render node - output_node = nuke.createNode("Output") - - # deselect all and select the original selection - output_node['selected'].setValue(False) - - # save script - nuke.scriptSave() - - # add node to instance node list - context.data["outputNode"] = output_node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_output_directory.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_output_directory.py deleted file mode 100644 index 45156ca9ae..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_output_directory.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -import pyblish.api - - -class ExtractOutputDirectory(pyblish.api.InstancePlugin): - """Extracts the output path for any collection or single output_path.""" - - order = pyblish.api.ExtractorOrder - 0.05 - label = "Output Directory" - optional = True - - settings_category = "nuke" - - def process(self, instance): - - path = None - - if "output_path" in instance.data.keys(): - path = instance.data["path"] - - if not path: - return - - if not os.path.exists(os.path.dirname(path)): - os.makedirs(os.path.dirname(path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_render_local.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_render_local.py deleted file mode 100644 index c865684e7a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_render_local.py +++ /dev/null @@ -1,218 +0,0 @@ -import os -import shutil - -import pyblish.api -import clique -import nuke -from ayon_nuke import api as napi -from ayon_core.pipeline import publish -from ayon_core.lib import collect_frames - - -class NukeRenderLocal(publish.Extractor, - publish.ColormanagedPyblishPluginMixin): - """Render the current Nuke composition locally. - - Extract the result of savers by starting a comp render - This will run the local render of Fusion. - - Allows to use last published frames and overwrite only specific ones - (set in instance.data.get("frames_to_fix")) - """ - - order = pyblish.api.ExtractorOrder - label = "Render Local" - hosts = ["nuke"] - families = ["render.local", "prerender.local", "image.local"] - - settings_category = "nuke" - - def process(self, instance): - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - node = None - for x in child_nodes: - if x.Class() == "Write": - node = x - - self.log.debug("instance collected: {}".format(instance.data)) - - node_product_name = instance.data.get("name", None) - - first_frame = instance.data.get("frameStartHandle", None) - last_frame = instance.data.get("frameEndHandle", None) - - filenames = [] - node_file = node["file"] - # Collect expected filepaths for each frame - # - for cases that output is still image is first created set of - # paths which is then sorted and converted to list - expected_paths = list(sorted({ - node_file.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) - # Extract only filenames for representation - filenames.extend([ - os.path.basename(filepath) - for filepath in expected_paths - ]) - - # Ensure output directory exists. - out_dir = os.path.dirname(expected_paths[0]) - if not os.path.exists(out_dir): - os.makedirs(out_dir) - - frames_to_render = [(first_frame, last_frame)] - - frames_to_fix = instance.data.get("frames_to_fix") - if instance.data.get("last_version_published_files") and frames_to_fix: - frames_to_render = self._get_frames_to_render(frames_to_fix) - anatomy = instance.context.data["anatomy"] - self._copy_last_published(anatomy, instance, out_dir, - filenames) - - for render_first_frame, render_last_frame in frames_to_render: - - self.log.info("Starting render") - self.log.info("Start frame: {}".format(render_first_frame)) - self.log.info("End frame: {}".format(render_last_frame)) - - # Render frames - nuke.execute( - str(node_product_name), - int(render_first_frame), - int(render_last_frame) - ) - - ext = node["file_type"].value() - colorspace = napi.get_colorspace_from_node(node) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - if len(filenames) == 1: - repre = { - 'name': ext, - 'ext': ext, - 'files': filenames[0], - "stagingDir": out_dir - } - else: - repre = { - 'name': ext, - 'ext': ext, - 'frameStart': ( - "{{:0>{}}}" - .format(len(str(last_frame))) - .format(first_frame) - ), - 'files': filenames, - "stagingDir": out_dir - } - - # inject colorspace data - self.set_representation_colorspace( - repre, instance.context, - colorspace=colorspace - ) - - instance.data["representations"].append(repre) - - self.log.debug("Extracted instance '{0}' to: {1}".format( - instance.name, - out_dir - )) - - families = instance.data["families"] - anatomy_data = instance.data["anatomyData"] - # redefinition of families - if "render.local" in families: - instance.data["family"] = "render" - instance.data["productType"] = "render" - families.remove("render.local") - families.insert(0, "render2d") - anatomy_data["family"] = "render" - anatomy_data["product"]["type"] = "render" - elif "prerender.local" in families: - instance.data["family"] = "prerender" - instance.data["productType"] = "prerender" - families.remove("prerender.local") - families.insert(0, "prerender") - anatomy_data["family"] = "prerender" - anatomy_data["product"]["type"] = "prerender" - elif "image.local" in families: - instance.data["family"] = "image" - instance.data["productType"] = "image" - families.remove("image.local") - anatomy_data["family"] = "image" - anatomy_data["product"]["type"] = "image" - instance.data["families"] = families - - collections, remainder = clique.assemble(filenames) - self.log.debug('collections: {}'.format(str(collections))) - - if collections: - collection = collections[0] - instance.data['collection'] = collection - - self.log.info('Finished render') - - self.log.debug("_ instance.data: {}".format(instance.data)) - - def _copy_last_published(self, anatomy, instance, out_dir, - expected_filenames): - """Copies last published files to temporary out_dir. - - These are base of files which will be extended/fixed for specific - frames. - Renames published file to expected file name based on frame, eg. - test_project_test_asset_product_v005.1001.exr > new_render.1001.exr - """ - last_published = instance.data["last_version_published_files"] - last_published_and_frames = collect_frames(last_published) - - expected_and_frames = collect_frames(expected_filenames) - frames_and_expected = {v: k for k, v in expected_and_frames.items()} - for file_path, frame in last_published_and_frames.items(): - file_path = anatomy.fill_root(file_path) - if not os.path.exists(file_path): - continue - target_file_name = frames_and_expected.get(frame) - if not target_file_name: - continue - - out_path = os.path.join(out_dir, target_file_name) - self.log.debug("Copying '{}' -> '{}'".format(file_path, out_path)) - shutil.copy(file_path, out_path) - - # TODO shouldn't this be uncommented - # instance.context.data["cleanupFullPaths"].append(out_path) - - def _get_frames_to_render(self, frames_to_fix): - """Return list of frame range tuples to render - - Args: - frames_to_fix (str): specific or range of frames to be rerendered - (1005, 1009-1010) - Returns: - (list): [(1005, 1005), (1009-1010)] - """ - frames_to_render = [] - - for frame_range in frames_to_fix.split(","): - if frame_range.isdigit(): - render_first_frame = frame_range - render_last_frame = frame_range - elif '-' in frame_range: - frames = frame_range.split('-') - render_first_frame = int(frames[0]) - render_last_frame = int(frames[1]) - else: - raise ValueError("Wrong format of frames to fix {}" - .format(frames_to_fix)) - frames_to_render.append((render_first_frame, - render_last_frame)) - return frames_to_render diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data.py deleted file mode 100644 index 856616898b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -from pprint import pformat -import pyblish.api - -from ayon_core.pipeline import publish - - -class ExtractReviewData(publish.Extractor): - """Extracts review tag into available representation - """ - - order = pyblish.api.ExtractorOrder + 0.01 - # order = pyblish.api.CollectorOrder + 0.499 - label = "Extract Review Data" - - families = ["review"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - fpath = instance.data["path"] - ext = os.path.splitext(fpath)[-1][1:] - - representations = instance.data.get("representations", []) - - # review can be removed since `ProcessSubmittedJobOnFarm` will create - # reviewable representation if needed - if ( - instance.data.get("farm") - and "review" in instance.data["families"] - ): - instance.data["families"].remove("review") - - # iterate representations and add `review` tag - for repre in representations: - if ext != repre["ext"]: - continue - - if not repre.get("tags"): - repre["tags"] = [] - - if "review" not in repre["tags"]: - repre["tags"].append("review") - - self.log.debug("Matching representation: {}".format( - pformat(repre) - )) - - instance.data["representations"] = representations diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data_lut.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data_lut.py deleted file mode 100644 index d3377807ea..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data_lut.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api import plugin -from ayon_nuke.api.lib import maintained_selection - - -class ExtractReviewDataLut(publish.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.005 - label = "Extract Review Data Lut" - - families = ["review"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - self.log.debug("Creating staging dir...") - if "representations" in instance.data: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") - instance.data["stagingDir"] = staging_dir - instance.data["representations"][0]["tags"] = ["review"] - else: - instance.data["representations"] = [] - # get output path - render_path = instance.data['path'] - staging_dir = os.path.normpath(os.path.dirname(render_path)) - instance.data["stagingDir"] = staging_dir - - self.log.debug( - "StagingDir `{0}`...".format(instance.data["stagingDir"])) - - # generate data - with maintained_selection(): - exporter = plugin.ExporterReviewLut( - self, instance - ) - data = exporter.generate_lut() - - # assign to representations - instance.data["lutPath"] = os.path.join( - exporter.stagingDir, exporter.file).replace("\\", "/") - instance.data["representations"] += data["representations"] - - # review can be removed since `ProcessSubmittedJobOnFarm` will create - # reviewable representation if needed - if ( - instance.data.get("farm") - and "review" in instance.data["families"] - ): - instance.data["families"].remove("review") - - self.log.debug( - "_ lutPath: {}".format(instance.data["lutPath"])) - self.log.debug( - "_ representations: {}".format(instance.data["representations"])) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_intermediates.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_intermediates.py deleted file mode 100644 index 48c9988c5b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_intermediates.py +++ /dev/null @@ -1,161 +0,0 @@ -import os -import re -from pprint import pformat -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api import plugin -from ayon_nuke.api.lib import maintained_selection - - -class ExtractReviewIntermediates(publish.Extractor): - """Extracting intermediate videos or sequences with - thumbnail for transcoding. - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Intermediates" - - families = ["review"] - hosts = ["nuke"] - - settings_category = "nuke" - - # presets - viewer_lut_raw = None - outputs = {} - - def process(self, instance): - # TODO 'families' should not be included for filtering of outputs - families = set(instance.data["families"]) - - # Add product type to families - families.add(instance.data["productType"]) - - task_type = instance.context.data["taskType"] - product_name = instance.data["productName"] - self.log.debug("Creating staging dir...") - - if "representations" not in instance.data: - instance.data["representations"] = [] - - staging_dir = os.path.normpath( - os.path.dirname(instance.data["path"])) - - instance.data["stagingDir"] = staging_dir - - self.log.debug( - "StagingDir `{0}`...".format(instance.data["stagingDir"])) - - self.log.debug("Outputs: {}".format(self.outputs)) - - # generate data - with maintained_selection(): - generated_repres = [] - for o_data in self.outputs: - o_name = o_data["name"] - self.log.debug( - "o_name: {}, o_data: {}".format(o_name, pformat(o_data))) - f_product_types = o_data["filter"]["product_types"] - f_task_types = o_data["filter"]["task_types"] - product_names = o_data["filter"]["product_names"] - - self.log.debug( - "f_product_types `{}` > families: {}".format( - f_product_types, families)) - - self.log.debug( - "f_task_types `{}` > task_type: {}".format( - f_task_types, task_type)) - - self.log.debug( - "product_names `{}` > product: {}".format( - product_names, product_name)) - - # test if family found in context - # using intersection to make sure all defined - # families are present in combination - if ( - f_product_types - and not families.intersection(f_product_types) - ): - continue - - # test task types from filter - if f_task_types and task_type not in f_task_types: - continue - - # test products from filter - if product_names and not any( - re.search(p, product_name) for p in product_names - ): - continue - - self.log.debug( - "Baking output `{}` with settings: {}".format( - o_name, o_data) - ) - - # check if settings have more then one preset - # so we dont need to add outputName to representation - # in case there is only one preset - multiple_presets = len(self.outputs) > 1 - - # adding bake presets to instance data for other plugins - if not instance.data.get("bakePresets"): - instance.data["bakePresets"] = {} - # add preset to bakePresets - instance.data["bakePresets"][o_name] = o_data - - # create exporter instance - exporter = plugin.ExporterReviewMov( - self, instance, o_name, o_data["extension"], - multiple_presets) - - delete = not o_data.get("publish", False) - - if instance.data.get("farm"): - if "review" in instance.data["families"]: - instance.data["families"].remove("review") - - data = exporter.generate_mov( - farm=True, delete=delete, **o_data - ) - - self.log.debug( - "_ data: {}".format(data)) - - if not instance.data.get("bakingNukeScripts"): - instance.data["bakingNukeScripts"] = [] - - instance.data["bakingNukeScripts"].append({ - "bakeRenderPath": data.get("bakeRenderPath"), - "bakeScriptPath": data.get("bakeScriptPath"), - "bakeWriteNodeName": data.get("bakeWriteNodeName") - }) - else: - data = exporter.generate_mov(delete=delete, **o_data) - - # add representation generated by exporter - generated_repres.extend(data["representations"]) - self.log.debug( - "__ generated_repres: {}".format(generated_repres)) - - if generated_repres: - # assign to representations - instance.data["representations"] += generated_repres - instance.data["useSequenceForReview"] = False - else: - instance.data["families"].remove("review") - self.log.debug( - "Removing `review` from families. " - "Not available baking profile." - ) - self.log.debug(instance.data["families"]) - - self.log.debug( - "_ representations: {}".format( - instance.data["representations"])) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_script_save.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_script_save.py deleted file mode 100644 index ea584b6529..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_script_save.py +++ /dev/null @@ -1,16 +0,0 @@ -import nuke -import pyblish.api - - -class ExtractScriptSave(pyblish.api.InstancePlugin): - """Save current Nuke workfile script""" - label = 'Script Save' - order = pyblish.api.ExtractorOrder - 0.1 - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - - self.log.debug('Saving current script') - nuke.scriptSave() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_slate_frame.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_slate_frame.py deleted file mode 100644 index 47750ea637..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_slate_frame.py +++ /dev/null @@ -1,366 +0,0 @@ -import os -from pprint import pformat -import nuke -import copy - -import pyblish.api -import six - -from ayon_core.pipeline import publish -from ayon_nuke.api import ( - maintained_selection, - duplicate_node, - get_view_process_node -) - - -class ExtractSlateFrame(publish.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.011 - label = "Extract Slate Frame" - - families = ["slate"] - hosts = ["nuke"] - - settings_category = "nuke" - - # Settings values - key_value_mapping = { - "f_submission_note": { - "enabled": True, "template": "{comment}" - }, - "f_submitting_for": { - "enabled": True, "template": "{intent[value]}" - }, - "f_vfx_scope_of_work": { - "enabled": False, "template": "" - } - } - - def process(self, instance): - - if "representations" not in instance.data: - instance.data["representations"] = [] - - self._create_staging_dir(instance) - - with maintained_selection(): - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) - - if instance.data.get("bakePresets"): - for o_name, o_data in instance.data["bakePresets"].items(): - self.log.debug("_ o_name: {}, o_data: {}".format( - o_name, pformat(o_data))) - self.render_slate( - instance, - o_name, - o_data["bake_viewer_process"], - o_data["bake_viewer_input_process"] - ) - else: - # backward compatibility - self.render_slate(instance) - - # also render image to sequence - self._render_slate_to_sequence(instance) - - def _create_staging_dir(self, instance): - - self.log.debug("Creating staging dir...") - - staging_dir = os.path.normpath( - os.path.dirname(instance.data["path"])) - - instance.data["stagingDir"] = staging_dir - - self.log.debug( - "StagingDir `{0}`...".format(instance.data["stagingDir"])) - - def _check_frames_exists(self, instance): - # rendering path from group write node - fpath = instance.data["path"] - - # instance frame range with handles - first = instance.data["frameStartHandle"] - last = instance.data["frameEndHandle"] - - padding = fpath.count('#') - - test_path_template = fpath - if padding: - repl_string = "#" * padding - test_path_template = fpath.replace( - repl_string, "%0{}d".format(padding)) - - for frame in range(first, last + 1): - test_file = test_path_template % frame - if not os.path.exists(test_file): - self.log.debug("__ test_file: `{}`".format(test_file)) - return None - - return True - - def render_slate( - self, - instance, - output_name=None, - bake_viewer_process=True, - bake_viewer_input_process=True - ): - """Slate frame renderer - - Args: - instance (PyblishInstance): Pyblish instance with product data - output_name (str, optional): - Slate variation name. Defaults to None. - bake_viewer_process (bool, optional): - Switch for viewer profile baking. Defaults to True. - bake_viewer_input_process (bool, optional): - Switch for input process node baking. Defaults to True. - """ - slate_node = instance.data["slateNode"] - - # rendering path from group write node - fpath = instance.data["path"] - - # instance frame range with handles - first_frame = instance.data["frameStartHandle"] - last_frame = instance.data["frameEndHandle"] - - # fill slate node with comments - self.add_comment_slate_node(instance, slate_node) - - # solve output name if any is set - _output_name = output_name or "" - if _output_name: - _output_name = "_" + _output_name - - slate_first_frame = first_frame - 1 - - collection = instance.data.get("collection", None) - - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") - else: - fname = os.path.basename(fpath) - fhead = os.path.splitext(fname)[0] + "." - - if "#" in fhead: - fhead = fhead.replace("#", "")[:-1] - - self.log.debug("__ first_frame: {}".format(first_frame)) - self.log.debug("__ slate_first_frame: {}".format(slate_first_frame)) - - above_slate_node = slate_node.dependencies().pop() - # fallback if files does not exists - if self._check_frames_exists(instance): - # Read node - r_node = nuke.createNode("Read") - r_node["file"].setValue(fpath) - r_node["first"].setValue(first_frame) - r_node["origfirst"].setValue(first_frame) - r_node["last"].setValue(last_frame) - r_node["origlast"].setValue(last_frame) - r_node["colorspace"].setValue(instance.data["colorspace"]) - previous_node = r_node - temporary_nodes = [previous_node] - - # adding copy metadata node for correct frame metadata - cm_node = nuke.createNode("CopyMetaData") - cm_node.setInput(0, previous_node) - cm_node.setInput(1, above_slate_node) - previous_node = cm_node - temporary_nodes.append(cm_node) - - else: - previous_node = above_slate_node - temporary_nodes = [] - - # only create colorspace baking if toggled on - if bake_viewer_process: - if bake_viewer_input_process: - # get input process and connect it to baking - ipn = get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - # add duplicate slate node and connect to previous - duply_slate_node = duplicate_node(slate_node) - duply_slate_node.setInput(0, previous_node) - previous_node = duply_slate_node - temporary_nodes.append(duply_slate_node) - - # add viewer display transformation node - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - else: - # add duplicate slate node and connect to previous - duply_slate_node = duplicate_node(slate_node) - duply_slate_node.setInput(0, previous_node) - previous_node = duply_slate_node - temporary_nodes.append(duply_slate_node) - - # create write node - write_node = nuke.createNode("Write") - file = fhead[:-1] + _output_name + "_slate.png" - path = os.path.join( - instance.data["stagingDir"], file).replace("\\", "/") - - # add slate path to `slateFrames` instance data attr - if not instance.data.get("slateFrames"): - instance.data["slateFrames"] = {} - - instance.data["slateFrames"][output_name or "*"] = path - - # create write node - write_node["file"].setValue(path) - write_node["file_type"].setValue("png") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - - # Render frames - nuke.execute( - write_node.name(), int(slate_first_frame), int(slate_first_frame)) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def _render_slate_to_sequence(self, instance): - # set slate frame - first_frame = instance.data["frameStartHandle"] - last_frame = instance.data["frameEndHandle"] - slate_first_frame = first_frame - 1 - - # render slate as sequence frame - nuke.execute( - instance.data["name"], - int(slate_first_frame), - int(slate_first_frame) - ) - - # Add file to representation files - # - get write node - write_node = instance.data["transientData"]["writeNode"] - # - evaluate filepaths for first frame and slate frame - first_filename = os.path.basename( - write_node["file"].evaluate(first_frame)) - slate_filename = os.path.basename( - write_node["file"].evaluate(slate_first_frame)) - - # Find matching representation based on first filename - matching_repre = None - is_sequence = None - for repre in instance.data["representations"]: - files = repre["files"] - if ( - not isinstance(files, six.string_types) - and first_filename in files - ): - matching_repre = repre - is_sequence = True - break - - elif files == first_filename: - matching_repre = repre - is_sequence = False - break - - if not matching_repre: - self.log.info( - "Matching representation was not found." - " Representation files were not filled with slate." - ) - return - - # Add frame to matching representation files - if not is_sequence: - matching_repre["files"] = [first_filename, slate_filename] - elif slate_filename not in matching_repre["files"]: - matching_repre["files"].insert(0, slate_filename) - matching_repre["frameStart"] = ( - "{{:0>{}}}" - .format(len(str(last_frame))) - .format(slate_first_frame) - ) - self.log.debug( - "__ matching_repre: {}".format(pformat(matching_repre))) - - data = matching_repre.get("data", {}) - data["slateFrames"] = 1 - matching_repre["data"] = data - - self.log.info("Added slate frame to representation files") - - def add_comment_slate_node(self, instance, node): - - comment = instance.data["comment"] - intent = instance.context.data.get("intent") - if not isinstance(intent, dict): - intent = { - "label": intent, - "value": intent - } - - fill_data = copy.deepcopy(instance.data["anatomyData"]) - fill_data.update({ - "custom": copy.deepcopy( - instance.data.get("customData") or {} - ), - "comment": comment, - "intent": intent - }) - - for key, _values in self.key_value_mapping.items(): - if not _values["enabled"]: - self.log.debug("Key \"{}\" is disabled".format(key)) - continue - - template = _values["template"] - try: - value = template.format(**fill_data) - - except ValueError: - self.log.warning( - "Couldn't fill template \"{}\" with data: {}".format( - template, fill_data - ), - exc_info=True - ) - continue - - except KeyError: - self.log.warning( - ( - "Template contains unknown key." - " Template \"{}\" Data: {}" - ).format(template, fill_data), - exc_info=True - ) - continue - - try: - node[key].setValue(value) - self.log.debug("Change key \"{}\" to value \"{}\"".format( - key, value - )) - except NameError: - self.log.warning(( - "Failed to set value \"{0}\" on node attribute \"{0}\"" - ).format(value)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_asset_context.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_asset_context.xml deleted file mode 100644 index 1e7d340a13..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_asset_context.xml +++ /dev/null @@ -1,31 +0,0 @@ - -- diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_backdrop.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_backdrop.xml deleted file mode 100644 index ab1b650773..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_backdrop.xml +++ /dev/null @@ -1,36 +0,0 @@ - -- -Folder path --## Publishing to a different folder context - -There are publish instances present which are publishing into a different folder than your current context. - -Usually this is not what you want but there can be cases where you might want to publish into another folder/shot or task. - -If that's the case you can disable the validation on the instance to ignore it. - -The wrong node's name is: \`{node_name}\` - -### Correct context keys and values: - -\`{correct_values}\` - -### Wrong keys and values: - -\`{wrong_values}\`. - - -## How to repair? - -1. Use \"Repair\" button. -2. Hit Reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_gizmo.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_gizmo.xml deleted file mode 100644 index f39a41a4f9..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_gizmo.xml +++ /dev/null @@ -1,36 +0,0 @@ - -- -Found multiple outputs --## Invalid output amount - -Backdrop is having more than one outgoing connections. - -### How to repair? - -1. Use button `Center node in node graph` and navigate to the backdrop. -2. Reorganize nodes the way only one outgoing connection is present. -3. Hit reload button on the publisher. - --### How could this happen? - -More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream. - -- -Empty backdrop --## Invalid empty backdrop - -Backdrop is empty and no nodes are found above it. - -### How to repair? - -1. Use button `Center node in node graph` and navigate to the backdrop. -2. Add any node above it or delete it. -3. Hit reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_knobs.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_knobs.xml deleted file mode 100644 index 76c184f653..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_knobs.xml +++ /dev/null @@ -1,18 +0,0 @@ - -- -Found multiple outputs --## Invalid amount of Output nodes - -Group node `{node_name}` is having more than one Output node. - -### How to repair? - -1. Use button `Open Group`. -2. Remove redundant Output node. -3. Hit reload button on the publisher. - --### How could this happen? - -Perhaps you had created exciently more than one Output node. - -- -Missing Input nodes --## Missing Input nodes - -Make sure there is at least one connected Input node inside the group node with name `{node_name}` - -### How to repair? - -1. Use button `Open Group`. -2. Add at least one Input node and connect to other nodes. -3. Hit reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_output_resolution.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_output_resolution.xml deleted file mode 100644 index 08a88a993e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_output_resolution.xml +++ /dev/null @@ -1,16 +0,0 @@ - -- -Knobs value --## Invalid node's knobs values - -Following node knobs needs to be repaired: - -{invalid_items} - -### How to repair? - -1. Use Repair button. -2. Hit Reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_proxy_mode.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_proxy_mode.xml deleted file mode 100644 index 6fe5d5d43e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_proxy_mode.xml +++ /dev/null @@ -1,16 +0,0 @@ - -- -Output format --## Invalid format setting - -Either the Reformat node inside of the render group is missing or the Reformat node output format knob is not set to `root.format`. - -### How to repair? - -1. Use Repair button. -2. Hit Reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_rendered_frames.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_rendered_frames.xml deleted file mode 100644 index 434081c269..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_rendered_frames.xml +++ /dev/null @@ -1,17 +0,0 @@ - -- -Proxy mode --## Invalid proxy mode value - -Nuke is set to use Proxy. This is not supported by publisher. - -### How to repair? - -1. Use Repair button. -2. Hit Reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_script_attributes.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_script_attributes.xml deleted file mode 100644 index 871fc629ce..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_script_attributes.xml +++ /dev/null @@ -1,18 +0,0 @@ - -- -Rendered Frames --## Missing Rendered Frames - -Render node "{node_name}" is set to "Use existing frames", but frames are missing. - -### How to repair? - -1. Use Repair button. -2. Set different target. -2. Hit Reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_write_nodes.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_write_nodes.xml deleted file mode 100644 index 96aa6e4494..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_write_nodes.xml +++ /dev/null @@ -1,32 +0,0 @@ - -- -Script attributes --## Invalid Script attributes - -Following script root attributes need to be fixed: - -{failed_attributes} - -### How to repair? - -1. Use Repair. -2. Hit Reload button on the publisher. - -- \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/increment_script_version.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/increment_script_version.py deleted file mode 100644 index 36659aa2d2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/increment_script_version.py +++ /dev/null @@ -1,26 +0,0 @@ -import nuke -import pyblish.api - - -class IncrementScriptVersion(pyblish.api.ContextPlugin): - """Increment current script version.""" - - order = pyblish.api.IntegratorOrder + 0.9 - label = "Increment Script Version" - optional = True - families = ["workfile"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - if not context.data.get("increment_script_version", True): - return - - assert all(result["success"] for result in context.data["results"]), ( - "Publishing not successful so version is not increased.") - - from ayon_core.lib import version_up - path = context.data["currentFile"] - nuke.scriptSaveAs(version_up(path)) - self.log.info('Incrementing script version') diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/remove_ouput_node.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/remove_ouput_node.py deleted file mode 100644 index 4c17cb5f56..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/remove_ouput_node.py +++ /dev/null @@ -1,24 +0,0 @@ -import nuke -import pyblish.api - - -class RemoveOutputNode(pyblish.api.ContextPlugin): - """Removing output node for each output write node - - """ - label = 'Output Node Remove' - order = pyblish.api.IntegratorOrder + 0.4 - families = ["workfile"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - try: - output_node = context.data["outputNode"] - name = output_node["name"].value() - self.log.info("Removing output node: '{}'".format(name)) - - nuke.delete(output_node) - except Exception: - return diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_asset_context.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_asset_context.py deleted file mode 100644 index 903648fd1b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_asset_context.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -"""Validate if instance folder is the same as context folder.""" -from __future__ import absolute_import - -import pyblish.api - -from ayon_core.pipeline.publish import ( - RepairAction, - ValidateContentsOrder, - PublishXmlValidationError, - OptionalPyblishPluginMixin -) -from ayon_nuke.api import SelectInstanceNodeAction - - -class ValidateCorrectAssetContext( - pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin -): - """Validator to check if instance folder context match context folder. - - When working in per-shot style you always publish data in context of - current folder (shot). This validator checks if this is so. It is optional - so it can be disabled when needed. - - Checking `folderPath` and `task` keys. - """ - order = ValidateContentsOrder - label = "Validate Folder context" - hosts = ["nuke"] - actions = [ - RepairAction, - SelectInstanceNodeAction - ] - optional = True - - settings_category = "nuke" - - @classmethod - def apply_settings(cls, project_settings): - """Apply deprecated settings from project settings. - """ - nuke_publish = project_settings["nuke"]["publish"] - if "ValidateCorrectAssetName" in nuke_publish: - settings = nuke_publish["ValidateCorrectAssetName"] - else: - settings = nuke_publish["ValidateCorrectAssetContext"] - - cls.enabled = settings["enabled"] - cls.optional = settings["optional"] - cls.active = settings["active"] - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid_keys = self.get_invalid(instance) - - if not invalid_keys: - return - - message_values = { - "node_name": instance.data["transientData"]["node"].name(), - "correct_values": ", ".join([ - "{} > {}".format(_key, instance.context.data[_key]) - for _key in invalid_keys - ]), - "wrong_values": ", ".join([ - "{} > {}".format(_key, instance.data.get(_key)) - for _key in invalid_keys - ]) - } - - msg = ( - "Instance `{node_name}` has wrong context keys:\n" - "Correct: `{correct_values}` | Wrong: `{wrong_values}`").format( - **message_values) - - self.log.debug(msg) - - raise PublishXmlValidationError( - self, msg, formatting_data=message_values - ) - - @classmethod - def get_invalid(cls, instance): - """Get invalid keys from instance data and context data.""" - - invalid_keys = [] - testing_keys = ["folderPath", "task"] - for _key in testing_keys: - if _key not in instance.data: - invalid_keys.append(_key) - continue - if instance.data[_key] != instance.context.data[_key]: - invalid_keys.append(_key) - - return invalid_keys - - @classmethod - def repair(cls, instance): - """Repair instance data with context data.""" - invalid_keys = cls.get_invalid(instance) - - create_context = instance.context.data["create_context"] - - instance_id = instance.data.get("instance_id") - created_instance = create_context.get_instance_by_id( - instance_id - ) - for _key in invalid_keys: - created_instance[_key] = instance.context.data[_key] - - create_context.save_changes() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_backdrop.py deleted file mode 100644 index f7b94e0c82..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_backdrop.py +++ /dev/null @@ -1,101 +0,0 @@ -import nuke -import pyblish -from ayon_nuke import api as napi - -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - PublishXmlValidationError, - OptionalPyblishPluginMixin -) - -class SelectCenterInNodeGraph(pyblish.api.Action): - """ - Centering failed instance node in node grap - """ - - label = "Center node in node graph" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - all_xC = [] - all_yC = [] - - # maintain selection - with napi.maintained_selection(): - # collect all failed nodes xpos and ypos - for instance in instances: - bdn = instance.data["transientData"]["node"] - xC = bdn.xpos() + bdn.screenWidth() / 2 - yC = bdn.ypos() + bdn.screenHeight() / 2 - - all_xC.append(xC) - all_yC.append(yC) - - self.log.debug("all_xC: `{}`".format(all_xC)) - self.log.debug("all_yC: `{}`".format(all_yC)) - - # zoom to nodes in node graph - nuke.zoom(2, [min(all_xC), min(all_yC)]) - - -class ValidateBackdrop( - pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin -): - """ Validate amount of nodes on backdrop node in case user - forgotten to add nodes above the publishing backdrop node. - """ - - order = ValidateContentsOrder - optional = True - families = ["nukenodes"] - label = "Validate Backdrop" - hosts = ["nuke"] - actions = [SelectCenterInNodeGraph] - - settings_category = "nuke" - - def process(self, instance): - if not self.is_active(instance.data): - return - - child_nodes = instance.data["transientData"]["childNodes"] - connections_out = instance.data["transientData"]["nodeConnectionsOut"] - - msg_multiple_outputs = ( - "Only one outcoming connection from " - "\"{}\" is allowed").format(instance.data["name"]) - - if len(connections_out.keys()) > 1: - raise PublishXmlValidationError( - self, - msg_multiple_outputs, - "multiple_outputs" - ) - - msg_no_nodes = "No content on backdrop node: \"{}\"".format( - instance.data["name"]) - - self.log.debug( - "Amount of nodes on instance: {}".format( - len(child_nodes)) - ) - - if child_nodes == []: - raise PublishXmlValidationError( - self, - msg_no_nodes, - "no_nodes" - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_exposed_knobs.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_exposed_knobs.py deleted file mode 100644 index d1b7c146fb..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_exposed_knobs.py +++ /dev/null @@ -1,82 +0,0 @@ -import pyblish.api - -from ayon_core.pipeline.publish import get_errored_instances_from_context -from ayon_nuke.api.lib import link_knobs -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, - PublishValidationError -) - - -class RepairExposedKnobs(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - instances = get_errored_instances_from_context(context) - - for instance in instances: - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - write_group_node = instance.data["transientData"]["node"] - # get write node from inside of group - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x - - product_type = instance.data["productType"] - plugin_name = plugin.product_types_mapping[product_type] - nuke_settings = instance.context.data["project_settings"]["nuke"] - create_settings = nuke_settings["create"][plugin_name] - exposed_knobs = create_settings["exposed_knobs"] - link_knobs(exposed_knobs, write_node, write_group_node) - - -class ValidateExposedKnobs( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """ Validate write node exposed knobs. - - Compare exposed linked knobs to settings. - """ - - order = pyblish.api.ValidatorOrder - optional = True - families = ["render", "prerender", "image"] - label = "Validate Exposed Knobs" - actions = [RepairExposedKnobs] - hosts = ["nuke"] - - settings_category = "nuke" - - product_types_mapping = { - "render": "CreateWriteRender", - "prerender": "CreateWritePrerender", - "image": "CreateWriteImage" - } - - def process(self, instance): - if not self.is_active(instance.data): - return - - product_type = instance.data["productType"] - plugin = self.product_types_mapping[product_type] - group_node = instance.data["transientData"]["node"] - nuke_settings = instance.context.data["project_settings"]["nuke"] - create_settings = nuke_settings["create"][plugin] - exposed_knobs = create_settings.get("exposed_knobs", []) - unexposed_knobs = [] - for knob in exposed_knobs: - if knob not in group_node.knobs(): - unexposed_knobs.append(knob) - - if unexposed_knobs: - raise PublishValidationError( - "Missing exposed knobs: {}".format(unexposed_knobs) - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_gizmo.py deleted file mode 100644 index 55249ae931..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_gizmo.py +++ /dev/null @@ -1,72 +0,0 @@ -import pyblish -from ayon_core.pipeline import PublishXmlValidationError -from ayon_nuke import api as napi -import nuke - - -class OpenFailedGroupNode(pyblish.api.Action): - """ - Centering failed instance node in node grap - """ - - label = "Open Group" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - # maintain selection - with napi.maintained_selection(): - # collect all failed nodes xpos and ypos - for instance in instances: - grpn = instance.data["transientData"]["node"] - nuke.showDag(grpn) - - -class ValidateGizmo(pyblish.api.InstancePlugin): - """Validate amount of output nodes in gizmo (group) node""" - - order = pyblish.api.ValidatorOrder - optional = True - families = ["gizmo"] - label = "Validate Gizmo (group)" - hosts = ["nuke"] - actions = [OpenFailedGroupNode] - - settings_category = "nuke" - - def process(self, instance): - grpn = instance.data["transientData"]["node"] - - with grpn: - connections_out = nuke.allNodes('Output') - if len(connections_out) > 1: - msg_multiple_outputs = ( - "Only one outcoming connection from " - "\"{}\" is allowed").format(instance.data["name"]) - - raise PublishXmlValidationError( - self, msg_multiple_outputs, "multiple_outputs", - {"node_name": grpn["name"].value()} - ) - - connections_in = nuke.allNodes('Input') - if len(connections_in) == 0: - msg_missing_inputs = ( - "At least one Input node has to be inside Group: " - "\"{}\"").format(instance.data["name"]) - - raise PublishXmlValidationError( - self, msg_missing_inputs, "no_inputs", - {"node_name": grpn["name"].value()} - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_knobs.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_knobs.py deleted file mode 100644 index ea03bd94b2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_knobs.py +++ /dev/null @@ -1,133 +0,0 @@ -import json - -import nuke -import six -import pyblish.api - -from ayon_core.pipeline.publish import ( - RepairContextAction, - PublishXmlValidationError, -) - - -class ValidateKnobs(pyblish.api.ContextPlugin): - """Ensure knobs are consistent. - - Knobs to validate and their values comes from the - - Controlled by plugin settings that require json in following structure: - "ValidateKnobs": { - "enabled": true, - "knobs": { - "family": { - "knob_name": knob_value - } - } - } - """ - - order = pyblish.api.ValidatorOrder - label = "Validate Knobs" - hosts = ["nuke"] - actions = [RepairContextAction] - optional = True - - settings_category = "nuke" - - knobs = "{}" - - def process(self, context): - invalid = self.get_invalid(context, compute=True) - if invalid: - invalid_items = [ - ( - "Node __{node_name}__ with knob _{label}_ " - "expecting _{expected}_, " - "but is set to _{current}_" - ).format(**i) - for i in invalid - ] - raise PublishXmlValidationError( - self, - "Found knobs with invalid values:\n{}".format(invalid), - formatting_data={ - "invalid_items": "\n".join(invalid_items)} - ) - - @classmethod - def get_invalid(cls, context, compute=False): - invalid = context.data.get("invalid_knobs", []) - if compute: - invalid = cls.get_invalid_knobs(context) - - return invalid - - @classmethod - def get_invalid_knobs(cls, context): - invalid_knobs = [] - - for instance in context: - # Load fresh knobs data for each instance - settings_knobs = json.loads(cls.knobs) - - # Filter families. - families = [instance.data["productType"]] - families += instance.data.get("families", []) - - # Get all knobs to validate. - knobs = {} - for family in families: - # check if dot in family - if "." in family: - family = family.split(".")[0] - - # avoid families not in settings - if family not in settings_knobs: - continue - - # get presets of knobs - for preset in settings_knobs[family]: - knobs[preset] = settings_knobs[family][preset] - - # Get invalid knobs. - nodes = [] - - for node in nuke.allNodes(): - nodes.append(node) - if node.Class() == "Group": - node.begin() - nodes.extend(iter(nuke.allNodes())) - node.end() - - for node in nodes: - for knob in node.knobs(): - if knob not in knobs.keys(): - continue - - expected = knobs[knob] - if node[knob].value() != expected: - invalid_knobs.append( - { - "node_name": node.name(), - "knob": node[knob], - "name": node[knob].name(), - "label": node[knob].label(), - "expected": expected, - "current": node[knob].value() - } - ) - - context.data["invalid_knobs"] = invalid_knobs - return invalid_knobs - - @classmethod - def repair(cls, instance): - invalid = cls.get_invalid(instance) - for data in invalid: - # TODO: will need to improve type definitions - # with the new settings for knob types - if isinstance(data["expected"], six.text_type): - data["knob"].setValue(str(data["expected"])) - continue - - data["knob"].setValue(data["expected"]) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_output_resolution.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_output_resolution.py deleted file mode 100644 index 440cb8b758..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_output_resolution.py +++ /dev/null @@ -1,114 +0,0 @@ -import pyblish.api - -from ayon_nuke import api as napi -from ayon_core.pipeline.publish import RepairAction -from ayon_core.pipeline import ( - PublishXmlValidationError, - OptionalPyblishPluginMixin -) - -import nuke - - -class ValidateOutputResolution( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """Validates Output Resolution. - - It is making sure the resolution of write's input is the same as - Format definition of script in Root node. - """ - - order = pyblish.api.ValidatorOrder - optional = True - families = ["render"] - label = "Validate Write resolution" - hosts = ["nuke"] - actions = [RepairAction] - - settings_category = "nuke" - - missing_msg = "Missing Reformat node in render group node" - resolution_msg = "Reformat is set to wrong format" - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - raise PublishXmlValidationError(self, invalid) - - @classmethod - def get_reformat(cls, instance): - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - reformat = None - for inode in child_nodes: - if inode.Class() != "Reformat": - continue - reformat = inode - - return reformat - - @classmethod - def get_invalid(cls, instance): - def _check_resolution(instance, reformat): - root_width = instance.data["resolutionWidth"] - root_height = instance.data["resolutionHeight"] - - write_width = reformat.format().width() - write_height = reformat.format().height() - - if (root_width != write_width) or (root_height != write_height): - return None - else: - return True - - # check if reformat is in render node - reformat = cls.get_reformat(instance) - if not reformat: - return cls.missing_msg - - # check if reformat is set to correct root format - correct_format = _check_resolution(instance, reformat) - if not correct_format: - return cls.resolution_msg - - @classmethod - def repair(cls, instance): - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - invalid = cls.get_invalid(instance) - grp_node = instance.data["transientData"]["node"] - - if cls.missing_msg == invalid: - # make sure we are inside of the group node - with grp_node: - # find input node and select it - _input = None - for inode in child_nodes: - if inode.Class() != "Input": - continue - _input = inode - - # add reformat node under it - with napi.maintained_selection(): - _input['selected'].setValue(True) - _rfn = nuke.createNode("Reformat", "name Reformat01") - _rfn["resize"].setValue(0) - _rfn["black_outside"].setValue(1) - - cls.log.info("Adding reformat node") - - if cls.resolution_msg == invalid: - reformat = cls.get_reformat(instance) - reformat["format"].setValue(nuke.root()["format"].value()) - cls.log.info("Fixing reformat to root.format") diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_proxy_mode.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_proxy_mode.py deleted file mode 100644 index 1eb858b17e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_proxy_mode.py +++ /dev/null @@ -1,38 +0,0 @@ -import pyblish -import nuke -from ayon_core.pipeline import PublishXmlValidationError - - -class FixProxyMode(pyblish.api.Action): - """ - Togger off proxy switch OFF - """ - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - rootNode = nuke.root() - rootNode["proxy"].setValue(False) - - -class ValidateProxyMode(pyblish.api.ContextPlugin): - """Validate active proxy mode""" - - order = pyblish.api.ValidatorOrder - label = "Validate Proxy Mode" - hosts = ["nuke"] - actions = [FixProxyMode] - - settings_category = "nuke" - - def process(self, context): - - rootNode = nuke.root() - isProxy = rootNode["proxy"].value() - - if isProxy: - raise PublishXmlValidationError( - self, "Proxy mode should be toggled OFF" - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_rendered_frames.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_rendered_frames.py deleted file mode 100644 index 20b7f6a6ac..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_rendered_frames.py +++ /dev/null @@ -1,139 +0,0 @@ -import pyblish.api -import clique - -from ayon_core.pipeline import PublishXmlValidationError -from ayon_core.pipeline.publish import get_errored_instances_from_context - - -class RepairActionBase(pyblish.api.Action): - on = "failed" - icon = "wrench" - - @staticmethod - def get_instance(context, plugin): - # Get the errored instances - return get_errored_instances_from_context(context, plugin=plugin) - - def repair_knob(self, context, instances, state): - create_context = context.data["create_context"] - for instance in instances: - # Reset the render knob - instance_id = instance.data.get("instance_id") - created_instance = create_context.get_instance_by_id( - instance_id - ) - created_instance.creator_attributes["render_target"] = state - self.log.info("Rendering toggled to `{}`".format(state)) - - create_context.save_changes() - - -class RepairCollectionActionToLocal(RepairActionBase): - label = "Repair - rerender with \"Local\"" - - def process(self, context, plugin): - instances = self.get_instance(context, plugin) - self.repair_knob(context, instances, "local") - - -class RepairCollectionActionToFarm(RepairActionBase): - label = "Repair - rerender with \"On farm\"" - - def process(self, context, plugin): - instances = self.get_instance(context, plugin) - self.repair_knob(context, instances, "farm") - - -class ValidateRenderedFrames(pyblish.api.InstancePlugin): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["render", "prerender", "still"] - - label = "Validate rendered frame" - hosts = ["nuke", "nukestudio"] - actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] - - settings_category = "nuke" - - def process(self, instance): - node = instance.data["transientData"]["node"] - - f_data = { - "node_name": node.name() - } - - for repre in instance.data["representations"]: - - if not repre.get("files"): - msg = ("no frames were collected, " - "you need to render them.\n" - "Check properties of write node (group) and" - "select 'Local' option in 'Publish' dropdown.") - self.log.error(msg) - raise PublishXmlValidationError( - self, msg, formatting_data=f_data) - - if isinstance(repre["files"], str): - return - - collections, remainder = clique.assemble(repre["files"]) - self.log.debug("collections: {}".format(str(collections))) - self.log.debug("remainder: {}".format(str(remainder))) - - collection = collections[0] - - f_start_h = instance.data["frameStartHandle"] - f_end_h = instance.data["frameEndHandle"] - - frame_length = int(f_end_h - f_start_h + 1) - - if frame_length != 1: - if len(collections) != 1: - msg = "There are multiple collections in the folder" - self.log.error(msg) - raise PublishXmlValidationError( - self, msg, formatting_data=f_data) - - if not collection.is_contiguous(): - msg = "Some frames appear to be missing" - self.log.error(msg) - raise PublishXmlValidationError( - self, msg, formatting_data=f_data) - - collected_frames_len = len(collection.indexes) - coll_start = min(collection.indexes) - coll_end = max(collection.indexes) - - self.log.debug("frame_length: {}".format(frame_length)) - self.log.debug("collected_frames_len: {}".format( - collected_frames_len)) - self.log.debug("f_start_h-f_end_h: {}-{}".format( - f_start_h, f_end_h)) - self.log.debug( - "coll_start-coll_end: {}-{}".format(coll_start, coll_end)) - - self.log.debug( - "len(collection.indexes): {}".format(collected_frames_len) - ) - - if ("slate" in instance.data["families"]) \ - and (frame_length != collected_frames_len): - collected_frames_len -= 1 - f_start_h += 1 - - if ( - collected_frames_len != frame_length - and coll_start <= f_start_h - and coll_end >= f_end_h - ): - raise PublishXmlValidationError( - self, ( - "{} missing frames. Use repair to " - "render all frames" - ).format(__name__), formatting_data=f_data - ) - - instance.data["collection"] = collection - - return diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_script_attributes.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_script_attributes.py deleted file mode 100644 index 617d8d835b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_script_attributes.py +++ /dev/null @@ -1,103 +0,0 @@ -from copy import deepcopy -import pyblish.api -from ayon_core.pipeline import ( - PublishXmlValidationError, - OptionalPyblishPluginMixin -) -from ayon_core.pipeline.publish import RepairAction -from ayon_nuke.api.lib import ( - WorkfileSettings -) - - -class ValidateScriptAttributes( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["workfile"] - label = "Validate script attributes" - hosts = ["nuke"] - optional = True - actions = [RepairAction] - - settings_category = "nuke" - - def process(self, instance): - if not self.is_active(instance.data): - return - - script_data = deepcopy(instance.context.data["scriptData"]) - - src_folder_attributes = instance.data["folderEntity"]["attrib"] - - # These attributes will be checked - attributes = [ - "fps", - "frameStart", - "frameEnd", - "resolutionWidth", - "resolutionHeight", - "handleStart", - "handleEnd" - ] - - # get only defined attributes from folder data - folder_attributes = { - attr: src_folder_attributes[attr] - for attr in attributes - if attr in src_folder_attributes - } - # fix frame values to include handles - folder_attributes["fps"] = float("{0:.4f}".format( - folder_attributes["fps"])) - script_data["fps"] = float("{0:.4f}".format( - script_data["fps"])) - - # Compare folder's values Nukescript X Database - not_matching = [] - for attr in attributes: - self.log.debug( - "Folder vs Script attribute \"{}\": {}, {}".format( - attr, - folder_attributes[attr], - script_data[attr] - ) - ) - if folder_attributes[attr] != script_data[attr]: - not_matching.append({ - "name": attr, - "expected": folder_attributes[attr], - "actual": script_data[attr] - }) - - # Raise error if not matching - if not_matching: - msg = "Following attributes are not set correctly: \n{}" - attrs_wrong_str = "\n".join([ - ( - "`{0}` is set to `{1}`, " - "but should be set to `{2}`" - ).format(at["name"], at["actual"], at["expected"]) - for at in not_matching - ]) - attrs_wrong_html = "- -Knobs values -- ## Invalid node's knobs values - - Following write node knobs needs to be repaired: - - {xml_msg} - - ### How to repair? - - 1. Use Repair button. - 2. Hit Reload button on the publisher. - -- -Legacy knob types -- ## Knobs are in obsolete configuration - - Settings needs to be fixed. - - ### How to repair? - - Contact your supervisor or fix it in project settings at - 'project_settings/nuke/imageio/nodes/required_nodes' at knobs. - Each '__legacy__' type has to be defined accordingly to its type. - -
".join([ - ( - "-- __{0}__ is set to __{1}__, " - "but should be set to __{2}__" - ).format(at["name"], at["actual"], at["expected"]) - for at in not_matching - ]) - raise PublishXmlValidationError( - self, msg.format(attrs_wrong_str), - formatting_data={ - "failed_attributes": attrs_wrong_html - } - ) - - @classmethod - def repair(cls, instance): - cls.log.debug("__ repairing instance: {}".format(instance)) - WorkfileSettings().set_context_settings() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_write_nodes.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_write_nodes.py deleted file mode 100644 index d642a4314c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_write_nodes.py +++ /dev/null @@ -1,156 +0,0 @@ -from collections import defaultdict - -import pyblish.api -from ayon_core.pipeline.publish import get_errored_instances_from_context -from ayon_nuke.api.lib import ( - get_write_node_template_attr, - set_node_knobs_from_settings, - color_gui_to_int -) - -from ayon_core.pipeline.publish import ( - PublishXmlValidationError, - OptionalPyblishPluginMixin -) - - -class RepairNukeWriteNodeAction(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - instances = get_errored_instances_from_context(context) - - for instance in instances: - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - write_group_node = instance.data["transientData"]["node"] - # get write node from inside of group - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x - - correct_data = get_write_node_template_attr(write_group_node) - - set_node_knobs_from_settings(write_node, correct_data["knobs"]) - - self.log.debug("Node attributes were fixed") - - -class ValidateNukeWriteNode( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """ Validate Write node's knobs. - - Compare knobs on write node inside the render group - with settings. At the moment supporting only `file` knob. - """ - - order = pyblish.api.ValidatorOrder - optional = True - families = ["render"] - label = "Validate write node" - actions = [RepairNukeWriteNodeAction] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - if not self.is_active(instance.data): - return - - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - write_group_node = instance.data["transientData"]["node"] - - # get write node from inside of group - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x - - if write_node is None: - return - - correct_data = get_write_node_template_attr(write_group_node) - - check = [] - - # Collect key values of same type in a list. - values_by_name = defaultdict(list) - for knob_data in correct_data["knobs"]: - knob_type = knob_data["type"] - knob_value = knob_data[knob_type] - - values_by_name[knob_data["name"]].append(knob_value) - - for knob_data in correct_data["knobs"]: - knob_type = knob_data["type"] - - if ( - knob_type == "__legacy__" - ): - raise PublishXmlValidationError( - self, ( - "Please update data in settings 'project_settings" - "/nuke/imageio/nodes/required_nodes'" - ), - key="legacy" - ) - - key = knob_data["name"] - values = values_by_name[key] - node_value = write_node[key].value() - - # fix type differences - fixed_values = [] - for value in values: - if type(node_value) in (int, float): - try: - if isinstance(value, list): - value = color_gui_to_int(value) - else: - value = float(value) - node_value = float(node_value) - except ValueError: - value = str(value) - else: - value = str(value) - node_value = str(node_value) - - fixed_values.append(value) - - if ( - node_value not in fixed_values - and key != "file" - and key != "tile_color" - ): - check.append([key, fixed_values, write_node[key].value()]) - - if check: - self._make_error(check) - - def _make_error(self, check): - # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block - dbg_msg = "Write node's knobs values are not correct!\n" - msg_add = "Knob '{0}' > Expected: `{1}` > Current: `{2}`" - - details = [ - msg_add.format(item[0], item[1], item[2]) - for item in check - ] - xml_msg = "
".join(details) - dbg_msg += "\n\t".join(details) - - raise PublishXmlValidationError( - self, dbg_msg, formatting_data={"xml_msg": xml_msg} - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/create_placeholder.py b/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/create_placeholder.py deleted file mode 100644 index 4d43d59bad..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/create_placeholder.py +++ /dev/null @@ -1,428 +0,0 @@ -import nuke - -from ayon_core.pipeline.workfile.workfile_template_builder import ( - CreatePlaceholderItem, - PlaceholderCreateMixin, -) -from ayon_nuke.api.lib import ( - find_free_space_to_paste_nodes, - get_extreme_positions, - get_group_io_nodes, - imprint, - refresh_node, - refresh_nodes, - reset_selection, - get_names_from_nodes, - get_nodes_by_names, - select_nodes, - duplicate_node, - node_tempfile, -) -from ayon_nuke.api.workfile_template_builder import ( - NukePlaceholderPlugin -) - - -class NukePlaceholderCreatePlugin( - NukePlaceholderPlugin, PlaceholderCreateMixin -): - identifier = "nuke.create" - label = "Nuke create" - - def _parse_placeholder_node_data(self, node): - placeholder_data = super( - NukePlaceholderCreatePlugin, self - )._parse_placeholder_node_data(node) - - node_knobs = node.knobs() - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - placeholder_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - placeholder_data["siblings"] = siblings - - node_full_name = node.fullName() - placeholder_data["group_name"] = node_full_name.rpartition(".")[0] - placeholder_data["last_loaded"] = [] - placeholder_data["delete"] = False - return placeholder_data - - def _before_instance_create(self, placeholder): - placeholder.data["nodes_init"] = nuke.allNodes() - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, node in scene_placeholders.items(): - plugin_identifier_knob = node.knob("plugin_identifier") - if ( - plugin_identifier_knob is None - or plugin_identifier_knob.getValue() != self.identifier - ): - continue - - placeholder_data = self._parse_placeholder_node_data(node) - - output.append( - CreatePlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_create_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - self.populate_create_placeholder(placeholder) - - def get_placeholder_options(self, options=None): - return self.get_create_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # deselect all selected nodes - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - # getting the latest nodes added - nodes_init = placeholder.data["nodes_init"] - nodes_created = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Created nodes: {}".format(nodes_created)) - if not nodes_created: - return - - placeholder.data["delete"] = True - - nodes_created = self._move_to_placeholder_group( - placeholder, nodes_created - ) - placeholder.data["last_created"] = nodes_created - refresh_nodes(nodes_created) - - # positioning of the created nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_created) - for node in nodes_created: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_created) - - # fix the problem of z_order for backdrops - self._fix_z_order(placeholder) - - if placeholder.data.get("keep_placeholder"): - self._imprint_siblings(placeholder) - - if placeholder.data["nb_children"] == 0: - # save initial nodes positions and dimensions, update them - # and set inputs and outputs of created nodes - - if placeholder.data.get("keep_placeholder"): - self._imprint_inits() - self._update_nodes(placeholder, nuke.allNodes(), nodes_created) - - self._set_created_connections(placeholder) - - elif placeholder.data["siblings"]: - # create copies of placeholder siblings for the new created nodes, - # set their inputs and outputs and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(placeholder.data["siblings"]) - refresh_nodes(siblings) - copies = self._create_sib_copies(placeholder) - new_nodes = list(copies.values()) # copies nodes - self._update_nodes(new_nodes, nodes_created) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self._set_copies_connections(placeholder, copies) - - self._update_nodes( - nuke.allNodes(), - new_nodes + nodes_created, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - placeholder.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the created - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_created, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_created: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - placeholder.data["nb_children"] += 1 - reset_selection() - - # go back to root group - nuke.root().begin() - - def _move_to_placeholder_group(self, placeholder, nodes_created): - """ - opening the placeholder's group and copying created nodes in it. - - Returns : - nodes_created (list): the new list of pasted nodes - """ - groups_name = placeholder.data["group_name"] - reset_selection() - select_nodes(nodes_created) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_created = nuke.selectedNodes() - return nodes_created - - def _fix_z_order(self, placeholder): - """Fix the problem of z_order when a backdrop is create.""" - - nodes_created = placeholder.data["last_created"] - created_backdrops = [] - bd_orders = set() - for node in nodes_created: - if isinstance(node, nuke.BackdropNode): - created_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in placeholder.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in created_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def _imprint_siblings(self, placeholder): - """ - - add siblings names to placeholder attributes (nodes created with it) - - add Id to the attributes of all the other nodes - """ - - created_nodes = placeholder.data["last_created"] - created_nodes_set = set(created_nodes) - - for node in created_nodes: - node_knobs = node.knobs() - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(created_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def _imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def _update_nodes( - self, placeholder, nodes, considered_nodes, offset_y=None - ): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(placeholder.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def _set_created_connections(self, placeholder): - """ - set inputs and outputs of created nodes""" - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - input_node, output_node = get_group_io_nodes( - placeholder.data["last_created"] - ) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node and output_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node and input_node: - input_node.setInput(0, node) - - def _create_sib_copies(self, placeholder): - """ creating copies of the palce_holder siblings (the ones who were - created with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(placeholder.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def _set_copies_connections(self, placeholder, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes( - placeholder.data["last_created"] - ) - siblings = get_nodes_by_names(placeholder.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/load_placeholder.py b/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/load_placeholder.py deleted file mode 100644 index 68bc10e41b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/load_placeholder.py +++ /dev/null @@ -1,455 +0,0 @@ -import nuke - -from ayon_core.pipeline.workfile.workfile_template_builder import ( - LoadPlaceholderItem, - PlaceholderLoadMixin, -) -from ayon_nuke.api.lib import ( - find_free_space_to_paste_nodes, - get_extreme_positions, - get_group_io_nodes, - imprint, - refresh_node, - refresh_nodes, - reset_selection, - get_names_from_nodes, - get_nodes_by_names, - select_nodes, - duplicate_node, - node_tempfile, -) -from ayon_nuke.api.workfile_template_builder import ( - NukePlaceholderPlugin -) - - -class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): - identifier = "nuke.load" - label = "Nuke load" - - def _parse_placeholder_node_data(self, node): - placeholder_data = super( - NukePlaceholderLoadPlugin, self - )._parse_placeholder_node_data(node) - - node_knobs = node.knobs() - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - placeholder_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - placeholder_data["siblings"] = siblings - - node_full_name = node.fullName() - placeholder_data["group_name"] = node_full_name.rpartition(".")[0] - placeholder_data["last_loaded"] = [] - placeholder_data["delete"] = False - return placeholder_data - - def _get_loaded_repre_ids(self): - loaded_representation_ids = self.builder.get_shared_populate_data( - "loaded_representation_ids" - ) - if loaded_representation_ids is None: - loaded_representation_ids = set() - for node in nuke.allNodes(): - if "repre_id" in node.knobs(): - loaded_representation_ids.add( - node.knob("repre_id").getValue() - ) - - self.builder.set_shared_populate_data( - "loaded_representation_ids", loaded_representation_ids - ) - return loaded_representation_ids - - def _before_placeholder_load(self, placeholder): - placeholder.data["nodes_init"] = nuke.allNodes() - - def _before_repre_load(self, placeholder, representation): - placeholder.data["last_repre_id"] = representation["id"] - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, node in scene_placeholders.items(): - plugin_identifier_knob = node.knob("plugin_identifier") - if ( - plugin_identifier_knob is None - or plugin_identifier_knob.getValue() != self.identifier - ): - continue - - placeholder_data = self._parse_placeholder_node_data(node) - # TODO do data validations and maybe updgrades if are invalid - output.append( - LoadPlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_load_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - repre_ids = self._get_loaded_repre_ids() - self.populate_load_placeholder(placeholder, repre_ids) - - def get_placeholder_options(self, options=None): - return self.get_load_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # deselect all selected nodes - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - # getting the latest nodes added - # TODO get from shared populate data! - nodes_init = placeholder.data["nodes_init"] - nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Loaded nodes: {}".format(nodes_loaded)) - if not nodes_loaded: - return - - placeholder.data["delete"] = True - - nodes_loaded = self._move_to_placeholder_group( - placeholder, nodes_loaded - ) - placeholder.data["last_loaded"] = nodes_loaded - refresh_nodes(nodes_loaded) - - # positioning of the loaded nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_loaded) - - # fix the problem of z_order for backdrops - self._fix_z_order(placeholder) - - if placeholder.data.get("keep_placeholder"): - self._imprint_siblings(placeholder) - - if placeholder.data["nb_children"] == 0: - # save initial nodes positions and dimensions, update them - # and set inputs and outputs of loaded nodes - if placeholder.data.get("keep_placeholder"): - self._imprint_inits() - self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded) - - self._set_loaded_connections(placeholder) - - elif placeholder.data["siblings"]: - # create copies of placeholder siblings for the new loaded nodes, - # set their inputs and outputs and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(placeholder.data["siblings"]) - refresh_nodes(siblings) - copies = self._create_sib_copies(placeholder) - new_nodes = list(copies.values()) # copies nodes - self._update_nodes(new_nodes, nodes_loaded) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self._set_copies_connections(placeholder, copies) - - self._update_nodes( - nuke.allNodes(), - new_nodes + nodes_loaded, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - placeholder.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the loaded - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_loaded, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - placeholder.data["nb_children"] += 1 - reset_selection() - - # go back to root group - nuke.root().begin() - - def _move_to_placeholder_group(self, placeholder, nodes_loaded): - """ - opening the placeholder's group and copying loaded nodes in it. - - Returns : - nodes_loaded (list): the new list of pasted nodes - """ - - groups_name = placeholder.data["group_name"] - reset_selection() - select_nodes(nodes_loaded) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_loaded = nuke.selectedNodes() - return nodes_loaded - - def _fix_z_order(self, placeholder): - """Fix the problem of z_order when a backdrop is loaded.""" - - nodes_loaded = placeholder.data["last_loaded"] - loaded_backdrops = [] - bd_orders = set() - for node in nodes_loaded: - if isinstance(node, nuke.BackdropNode): - loaded_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in placeholder.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in loaded_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def _imprint_siblings(self, placeholder): - """ - - add siblings names to placeholder attributes (nodes loaded with it) - - add Id to the attributes of all the other nodes - """ - - loaded_nodes = placeholder.data["last_loaded"] - loaded_nodes_set = set(loaded_nodes) - data = {"repre_id": str(placeholder.data["last_repre_id"])} - - for node in loaded_nodes: - node_knobs = node.knobs() - if "builder_type" not in node_knobs: - # save the id of representation for all imported nodes - imprint(node, data) - node.knob("repre_id").setVisible(False) - refresh_node(node) - continue - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(loaded_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def _imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def _update_nodes( - self, placeholder, nodes, considered_nodes, offset_y=None - ): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(placeholder.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def _set_loaded_connections(self, placeholder): - """ - set inputs and outputs of loaded nodes""" - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - input_node, output_node = get_group_io_nodes( - placeholder.data["last_loaded"] - ) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node and output_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node and input_node: - input_node.setInput(0, node) - - def _create_sib_copies(self, placeholder): - """ creating copies of the palce_holder siblings (the ones who were - loaded with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(placeholder.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def _set_copies_connections(self, placeholder, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes( - placeholder.data["last_loaded"] - ) - siblings = get_nodes_by_names(placeholder.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) diff --git a/server_addon/nuke/client/ayon_nuke/startup/__init__.py b/server_addon/nuke/client/ayon_nuke/startup/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/startup/clear_rendered.py b/server_addon/nuke/client/ayon_nuke/startup/clear_rendered.py deleted file mode 100644 index 8072aae14f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/clear_rendered.py +++ /dev/null @@ -1,12 +0,0 @@ -import os - -from ayon_core.lib import Logger - - -def clear_rendered(dir_path): - log = Logger.get_logger(__name__) - - for _f in os.listdir(dir_path): - _f_path = os.path.join(dir_path, _f) - log.info("Removing: `{}`".format(_f_path)) - os.remove(_f_path) diff --git a/server_addon/nuke/client/ayon_nuke/startup/custom_write_node.py b/server_addon/nuke/client/ayon_nuke/startup/custom_write_node.py deleted file mode 100644 index 5b0f240a49..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/custom_write_node.py +++ /dev/null @@ -1,153 +0,0 @@ -""" AYON custom script for setting up write nodes for non-publish """ -import os -import nuke -import nukescripts -from ayon_core.pipeline import Anatomy, get_current_project_name -from ayon_nuke.api.lib import ( - set_node_knobs_from_settings, - get_nuke_imageio_settings -) - - -temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - -knobs_setting = { - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": True - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 186, - 35, - 35, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "bool", - "name": "create_directories", - "value": True - } - ] -} - - -class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): - """ Write Node's Knobs Settings Panel """ - def __init__(self): - nukescripts.PythonPanel.__init__(self, "Set Knobs Value(Write Node)") - - preset_name, _ = self.get_node_knobs_setting() - # create knobs - - self.selected_preset_name = nuke.Enumeration_Knob( - 'preset_selector', 'presets', preset_name) - # add knobs to panel - self.addKnob(self.selected_preset_name) - - def process(self): - """ Process the panel values. """ - write_selected_nodes = [ - selected_nodes for selected_nodes in nuke.selectedNodes() - if selected_nodes.Class() == "Write"] - - selected_preset = self.selected_preset_name.value() - ext = None - knobs = knobs_setting["knobs"] - preset_name, node_knobs_presets = ( - self.get_node_knobs_setting(selected_preset) - ) - - if selected_preset and preset_name: - if not node_knobs_presets: - nuke.message( - "No knobs value found in subset group.." - "\nDefault setting will be used..") - else: - knobs = node_knobs_presets - - ext_knob_list = [knob for knob in knobs if knob["name"] == "file_type"] - if not ext_knob_list: - nuke.message( - "ERROR: No file type found in the subset's knobs." - "\nPlease add one to complete setting up the node") - return - else: - for knob in ext_knob_list: - ext = knob["value"] - - anatomy = Anatomy(get_current_project_name()) - - frame_padding = anatomy.templates_obj.frame_padding - for write_node in write_selected_nodes: - # data for mapping the path - # TODO add more fill data - product_name = write_node["name"].value() - data = { - "work": os.getenv("AYON_WORKDIR"), - "subset": product_name, - "product": { - "name": product_name, - }, - "frame": "#" * frame_padding, - "ext": ext - } - file_path = temp_rendering_path_template.format(**data) - file_path = file_path.replace("\\", "/") - write_node["file"].setValue(file_path) - set_node_knobs_from_settings(write_node, knobs) - - def get_node_knobs_setting(self, selected_preset=None): - preset_name = [] - knobs_nodes = [] - settings = [ - node_settings for node_settings - in get_nuke_imageio_settings()["nodes"]["override_nodes"] - if node_settings["nuke_node_class"] == "Write" - and node_settings["subsets"] - ] - if not settings: - return - - for i, _ in enumerate(settings): - if selected_preset in settings[i]["subsets"]: - knobs_nodes = settings[i]["knobs"] - - for setting in settings: - # TODO change 'subsets' to 'product_names' in settings - for product_name in setting["subsets"]: - preset_name.append(product_name) - - return preset_name, knobs_nodes - - -def main(): - p_ = WriteNodeKnobSettingPanel() - if p_.showModalDialog(): - print(p_.process()) diff --git a/server_addon/nuke/client/ayon_nuke/startup/frame_setting_for_read_nodes.py b/server_addon/nuke/client/ayon_nuke/startup/frame_setting_for_read_nodes.py deleted file mode 100644 index 3e1430c3b1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/frame_setting_for_read_nodes.py +++ /dev/null @@ -1,47 +0,0 @@ -""" AYON custom script for resetting read nodes start frame values """ - -import nuke -import nukescripts - - -class FrameSettingsPanel(nukescripts.PythonPanel): - """ Frame Settings Panel """ - def __init__(self): - nukescripts.PythonPanel.__init__(self, "Set Frame Start (Read Node)") - - # create knobs - self.frame = nuke.Int_Knob( - 'frame', 'Frame Number') - self.selected = nuke.Boolean_Knob("selection") - # add knobs to panel - self.addKnob(self.selected) - self.addKnob(self.frame) - - # set values - self.selected.setValue(False) - self.frame.setValue(nuke.root().firstFrame()) - - def process(self): - """ Process the panel values. """ - # get values - frame = self.frame.value() - if self.selected.value(): - # selected nodes processing - if not nuke.selectedNodes(): - return - for rn_ in nuke.selectedNodes(): - if rn_.Class() != "Read": - continue - rn_["frame_mode"].setValue("start_at") - rn_["frame"].setValue(str(frame)) - else: - # all nodes processing - for rn_ in nuke.allNodes(filter="Read"): - rn_["frame_mode"].setValue("start_at") - rn_["frame"].setValue(str(frame)) - - -def main(): - p_ = FrameSettingsPanel() - if p_.showModalDialog(): - print(p_.process()) diff --git a/server_addon/nuke/client/ayon_nuke/startup/menu.py b/server_addon/nuke/client/ayon_nuke/startup/menu.py deleted file mode 100644 index c3dd8cda8f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/menu.py +++ /dev/null @@ -1,5 +0,0 @@ -from ayon_core.pipeline import install_host -from ayon_nuke.api import NukeHost - -host = NukeHost() -install_host(host) diff --git a/server_addon/nuke/client/ayon_nuke/startup/write_to_read.py b/server_addon/nuke/client/ayon_nuke/startup/write_to_read.py deleted file mode 100644 index 8a8ffb8d3d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/write_to_read.py +++ /dev/null @@ -1,151 +0,0 @@ -import re -import os -import glob -import nuke -from ayon_core.lib import Logger -log = Logger.get_logger(__name__) - -SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v', - 'm2v'] - - -def evaluate_filepath_new( - k_value, k_eval, project_dir, first_frame, allow_relative): - - # get combined relative path - combined_relative_path = None - if k_eval is not None and project_dir is not None: - combined_relative_path = os.path.abspath( - os.path.join(project_dir, k_eval)) - combined_relative_path = combined_relative_path.replace('\\', '/') - filetype = combined_relative_path.split('.')[-1] - frame_number = re.findall(r'\d+', combined_relative_path)[-1] - basename = combined_relative_path[: combined_relative_path.rfind( - frame_number)] - filepath_glob = basename + '*' + filetype - glob_search_results = glob.glob(filepath_glob) - if len(glob_search_results) <= 0: - combined_relative_path = None - - try: - # k_value = k_value % first_frame - if os.path.isdir(os.path.basename(k_value)): - # doesn't check for file, only parent dir - filepath = k_value - elif os.path.exists(k_eval): - filepath = k_eval - elif not isinstance(project_dir, type(None)) and \ - not isinstance(combined_relative_path, type(None)): - filepath = combined_relative_path - - filepath = os.path.abspath(filepath) - except Exception as E: - log.error("Cannot create Read node. Perhaps it needs to be \ - rendered first :) Error: `{}`".format(E)) - return None - - filepath = filepath.replace('\\', '/') - # assumes last number is a sequence counter - current_frame = re.findall(r'\d+', filepath)[-1] - padding = len(current_frame) - basename = filepath[: filepath.rfind(current_frame)] - filetype = filepath.split('.')[-1] - - # sequence or not? - if filetype in SINGLE_FILE_FORMATS: - pass - else: - # Image sequence needs hashes - # to do still with no number not handled - filepath = basename + '#' * padding + '.' + filetype - - # relative path? make it relative again - if allow_relative: - if (not isinstance(project_dir, type(None))) and project_dir != "": - filepath = filepath.replace(project_dir, '.') - - # get first and last frame from disk - frames = [] - firstframe = 0 - lastframe = 0 - filepath_glob = basename + '*' + filetype - glob_search_results = glob.glob(filepath_glob) - for f in glob_search_results: - frame = re.findall(r'\d+', f)[-1] - frames.append(frame) - frames = sorted(frames) - firstframe = frames[0] - lastframe = frames[len(frames) - 1] - - if int(lastframe) < 0: - lastframe = firstframe - - return filepath, firstframe, lastframe - - -def create_read_node(ndata, comp_start): - read = nuke.createNode('Read', 'file "' + ndata['filepath'] + '"') - read.knob('colorspace').setValue(int(ndata['colorspace'])) - read.knob('raw').setValue(ndata['rawdata']) - read.knob('first').setValue(int(ndata['firstframe'])) - read.knob('last').setValue(int(ndata['lastframe'])) - read.knob('origfirst').setValue(int(ndata['firstframe'])) - read.knob('origlast').setValue(int(ndata['lastframe'])) - if comp_start == int(ndata['firstframe']): - read.knob('frame_mode').setValue("1") - read.knob('frame').setValue(str(comp_start)) - else: - read.knob('frame_mode').setValue("0") - read.knob('xpos').setValue(ndata['new_xpos']) - read.knob('ypos').setValue(ndata['new_ypos']) - nuke.inputs(read, 0) - return - - -def write_to_read(gn, - allow_relative=False): - - comp_start = nuke.Root().knob('first_frame').value() - project_dir = nuke.Root().knob('project_directory').getValue() - if not os.path.exists(project_dir): - project_dir = nuke.Root().knob('project_directory').evaluate() - - group_read_nodes = [] - with gn: - height = gn.screenHeight() # get group height and position - new_xpos = int(gn.knob('xpos').value()) - new_ypos = int(gn.knob('ypos').value()) + height + 20 - group_writes = [n for n in nuke.allNodes() if n.Class() == "Write"] - if group_writes != []: - # there can be only 1 write node, taking first - n = group_writes[0] - - if n.knob('file') is not None: - myfile, firstFrame, lastFrame = evaluate_filepath_new( - n.knob('file').getValue(), - n.knob('file').evaluate(), - project_dir, - comp_start, - allow_relative - ) - if not myfile: - return - - # get node data - ndata = { - 'filepath': myfile, - 'firstframe': int(firstFrame), - 'lastframe': int(lastFrame), - 'new_xpos': new_xpos, - 'new_ypos': new_ypos, - 'colorspace': n.knob('colorspace').getValue(), - 'rawdata': n.knob('raw').value(), - 'write_frame_mode': str(n.knob('frame_mode').value()), - 'write_frame': n.knob('frame').value() - } - group_read_nodes.append(ndata) - - # create reads in one go - for oneread in group_read_nodes: - # create read node - create_read_node(oneread, comp_start) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/__init__.py deleted file mode 100644 index 03f3b29ee7..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Copyright 2007 Google Inc. All Rights Reserved. - -__version__ = '3.20.1' diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/any_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/any_pb2.py deleted file mode 100644 index 9121193d11..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/any_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/any.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _ANY._serialized_start=46 - _ANY._serialized_end=84 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/api_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/api_pb2.py deleted file mode 100644 index 1721b10a75..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/api_pb2.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/api.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 -from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _API._serialized_start=113 - _API._serialized_end=370 - _METHOD._serialized_start=373 - _METHOD._serialized_end=586 - _MIXIN._serialized_start=588 - _MIXIN._serialized_end=623 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/plugin_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/plugin_pb2.py deleted file mode 100644 index 715a891370..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/plugin_pb2.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/compiler/plugin.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' - _VERSION._serialized_start=101 - _VERSION._serialized_end=171 - _CODEGENERATORREQUEST._serialized_start=174 - _CODEGENERATORREQUEST._serialized_end=360 - _CODEGENERATORRESPONSE._serialized_start=363 - _CODEGENERATORRESPONSE._serialized_end=684 - _CODEGENERATORRESPONSE_FILE._serialized_start=499 - _CODEGENERATORRESPONSE_FILE._serialized_end=626 - _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 - _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor.py deleted file mode 100644 index ad70be9a11..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor.py +++ /dev/null @@ -1,1224 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Descriptors essentially contain exactly the information found in a .proto -file, in types that make this information accessible in Python. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import threading -import warnings - -from google.protobuf.internal import api_implementation - -_USE_C_DESCRIPTORS = False -if api_implementation.Type() == 'cpp': - # Used by MakeDescriptor in cpp mode - import binascii - import os - from google.protobuf.pyext import _message - _USE_C_DESCRIPTORS = True - - -class Error(Exception): - """Base error for this module.""" - - -class TypeTransformationError(Error): - """Error transforming between python proto type and corresponding C++ type.""" - - -if _USE_C_DESCRIPTORS: - # This metaclass allows to override the behavior of code like - # isinstance(my_descriptor, FieldDescriptor) - # and make it return True when the descriptor is an instance of the extension - # type written in C++. - class DescriptorMetaclass(type): - def __instancecheck__(cls, obj): - if super(DescriptorMetaclass, cls).__instancecheck__(obj): - return True - if isinstance(obj, cls._C_DESCRIPTOR_CLASS): - return True - return False -else: - # The standard metaclass; nothing changes. - DescriptorMetaclass = type - - -class _Lock(object): - """Wrapper class of threading.Lock(), which is allowed by 'with'.""" - - def __new__(cls): - self = object.__new__(cls) - self._lock = threading.Lock() # pylint: disable=protected-access - return self - - def __enter__(self): - self._lock.acquire() - - def __exit__(self, exc_type, exc_value, exc_tb): - self._lock.release() - - -_lock = threading.Lock() - - -def _Deprecated(name): - if _Deprecated.count > 0: - _Deprecated.count -= 1 - warnings.warn( - 'Call to deprecated create function %s(). Note: Create unlinked ' - 'descriptors is going to go away. Please use get/find descriptors from ' - 'generated code or query the descriptor_pool.' - % name, - category=DeprecationWarning, stacklevel=3) - - -# Deprecated warnings will print 100 times at most which should be enough for -# users to notice and do not cause timeout. -_Deprecated.count = 100 - - -_internal_create_key = object() - - -class DescriptorBase(metaclass=DescriptorMetaclass): - - """Descriptors base class. - - This class is the base of all descriptor classes. It provides common options - related functionality. - - Attributes: - has_options: True if the descriptor has non-default options. Usually it - is not necessary to read this -- just call GetOptions() which will - happily return the default instance. However, it's sometimes useful - for efficiency, and also useful inside the protobuf implementation to - avoid some bootstrapping issues. - """ - - if _USE_C_DESCRIPTORS: - # The class, or tuple of classes, that are considered as "virtual - # subclasses" of this descriptor class. - _C_DESCRIPTOR_CLASS = () - - def __init__(self, options, serialized_options, options_class_name): - """Initialize the descriptor given its options message and the name of the - class of the options message. The name of the class is required in case - the options message is None and has to be created. - """ - self._options = options - self._options_class_name = options_class_name - self._serialized_options = serialized_options - - # Does this descriptor have non-default options? - self.has_options = (options is not None) or (serialized_options is not None) - - def _SetOptions(self, options, options_class_name): - """Sets the descriptor's options - - This function is used in generated proto2 files to update descriptor - options. It must not be used outside proto2. - """ - self._options = options - self._options_class_name = options_class_name - - # Does this descriptor have non-default options? - self.has_options = options is not None - - def GetOptions(self): - """Retrieves descriptor options. - - This method returns the options set or creates the default options for the - descriptor. - """ - if self._options: - return self._options - - from google.protobuf import descriptor_pb2 - try: - options_class = getattr(descriptor_pb2, - self._options_class_name) - except AttributeError: - raise RuntimeError('Unknown options class name %s!' % - (self._options_class_name)) - - with _lock: - if self._serialized_options is None: - self._options = options_class() - else: - self._options = _ParseOptions(options_class(), - self._serialized_options) - - return self._options - - -class _NestedDescriptorBase(DescriptorBase): - """Common class for descriptors that can be nested.""" - - def __init__(self, options, options_class_name, name, full_name, - file, containing_type, serialized_start=None, - serialized_end=None, serialized_options=None): - """Constructor. - - Args: - options: Protocol message options or None - to use default message options. - options_class_name (str): The class name of the above options. - name (str): Name of this protocol message type. - full_name (str): Fully-qualified name of this protocol message type, - which will include protocol "package" name and the name of any - enclosing types. - file (FileDescriptor): Reference to file info. - containing_type: if provided, this is a nested descriptor, with this - descriptor as parent, otherwise None. - serialized_start: The start index (inclusive) in block in the - file.serialized_pb that describes this descriptor. - serialized_end: The end index (exclusive) in block in the - file.serialized_pb that describes this descriptor. - serialized_options: Protocol message serialized options or None. - """ - super(_NestedDescriptorBase, self).__init__( - options, serialized_options, options_class_name) - - self.name = name - # TODO(falk): Add function to calculate full_name instead of having it in - # memory? - self.full_name = full_name - self.file = file - self.containing_type = containing_type - - self._serialized_start = serialized_start - self._serialized_end = serialized_end - - def CopyToProto(self, proto): - """Copies this to the matching proto in descriptor_pb2. - - Args: - proto: An empty proto instance from descriptor_pb2. - - Raises: - Error: If self couldn't be serialized, due to to few constructor - arguments. - """ - if (self.file is not None and - self._serialized_start is not None and - self._serialized_end is not None): - proto.ParseFromString(self.file.serialized_pb[ - self._serialized_start:self._serialized_end]) - else: - raise Error('Descriptor does not contain serialization.') - - -class Descriptor(_NestedDescriptorBase): - - """Descriptor for a protocol message type. - - Attributes: - name (str): Name of this protocol message type. - full_name (str): Fully-qualified name of this protocol message type, - which will include protocol "package" name and the name of any - enclosing types. - containing_type (Descriptor): Reference to the descriptor of the type - containing us, or None if this is top-level. - fields (list[FieldDescriptor]): Field descriptors for all fields in - this type. - fields_by_number (dict(int, FieldDescriptor)): Same - :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed - by "number" attribute in each FieldDescriptor. - fields_by_name (dict(str, FieldDescriptor)): Same - :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by - "name" attribute in each :class:`FieldDescriptor`. - nested_types (list[Descriptor]): Descriptor references - for all protocol message types nested within this one. - nested_types_by_name (dict(str, Descriptor)): Same Descriptor - objects as in :attr:`nested_types`, but indexed by "name" attribute - in each Descriptor. - enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references - for all enums contained within this type. - enum_types_by_name (dict(str, EnumDescriptor)): Same - :class:`EnumDescriptor` objects as in :attr:`enum_types`, but - indexed by "name" attribute in each EnumDescriptor. - enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping - from enum value name to :class:`EnumValueDescriptor` for that value. - extensions (list[FieldDescriptor]): All extensions defined directly - within this message type (NOT within a nested type). - extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor - objects as :attr:`extensions`, but indexed by "name" attribute of each - FieldDescriptor. - is_extendable (bool): Does this type define any extension ranges? - oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields - in this message. - oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in - :attr:`oneofs`, but indexed by "name" attribute. - file (FileDescriptor): Reference to file descriptor. - - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.Descriptor - - def __new__( - cls, - name=None, - full_name=None, - filename=None, - containing_type=None, - fields=None, - nested_types=None, - enum_types=None, - extensions=None, - options=None, - serialized_options=None, - is_extendable=True, - extension_ranges=None, - oneofs=None, - file=None, # pylint: disable=redefined-builtin - serialized_start=None, - serialized_end=None, - syntax=None, - create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindMessageTypeByName(full_name) - - # NOTE(tmarek): The file argument redefining a builtin is nothing we can - # fix right now since we don't know how many clients already rely on the - # name of the argument. - def __init__(self, name, full_name, filename, containing_type, fields, - nested_types, enum_types, extensions, options=None, - serialized_options=None, - is_extendable=True, extension_ranges=None, oneofs=None, - file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin - syntax=None, create_key=None): - """Arguments to __init__() are as described in the description - of Descriptor fields above. - - Note that filename is an obsolete argument, that is not used anymore. - Please use file.name to access this as an attribute. - """ - if create_key is not _internal_create_key: - _Deprecated('Descriptor') - - super(Descriptor, self).__init__( - options, 'MessageOptions', name, full_name, file, - containing_type, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - - # We have fields in addition to fields_by_name and fields_by_number, - # so that: - # 1. Clients can index fields by "order in which they're listed." - # 2. Clients can easily iterate over all fields with the terse - # syntax: for f in descriptor.fields: ... - self.fields = fields - for field in self.fields: - field.containing_type = self - self.fields_by_number = dict((f.number, f) for f in fields) - self.fields_by_name = dict((f.name, f) for f in fields) - self._fields_by_camelcase_name = None - - self.nested_types = nested_types - for nested_type in nested_types: - nested_type.containing_type = self - self.nested_types_by_name = dict((t.name, t) for t in nested_types) - - self.enum_types = enum_types - for enum_type in self.enum_types: - enum_type.containing_type = self - self.enum_types_by_name = dict((t.name, t) for t in enum_types) - self.enum_values_by_name = dict( - (v.name, v) for t in enum_types for v in t.values) - - self.extensions = extensions - for extension in self.extensions: - extension.extension_scope = self - self.extensions_by_name = dict((f.name, f) for f in extensions) - self.is_extendable = is_extendable - self.extension_ranges = extension_ranges - self.oneofs = oneofs if oneofs is not None else [] - self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) - for oneof in self.oneofs: - oneof.containing_type = self - self.syntax = syntax or "proto2" - - @property - def fields_by_camelcase_name(self): - """Same FieldDescriptor objects as in :attr:`fields`, but indexed by - :attr:`FieldDescriptor.camelcase_name`. - """ - if self._fields_by_camelcase_name is None: - self._fields_by_camelcase_name = dict( - (f.camelcase_name, f) for f in self.fields) - return self._fields_by_camelcase_name - - def EnumValueName(self, enum, value): - """Returns the string name of an enum value. - - This is just a small helper method to simplify a common operation. - - Args: - enum: string name of the Enum. - value: int, value of the enum. - - Returns: - string name of the enum value. - - Raises: - KeyError if either the Enum doesn't exist or the value is not a valid - value for the enum. - """ - return self.enum_types_by_name[enum].values_by_number[value].name - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.DescriptorProto. - - Args: - proto: An empty descriptor_pb2.DescriptorProto. - """ - # This function is overridden to give a better doc comment. - super(Descriptor, self).CopyToProto(proto) - - -# TODO(robinson): We should have aggressive checking here, -# for example: -# * If you specify a repeated field, you should not be allowed -# to specify a default value. -# * [Other examples here as needed]. -# -# TODO(robinson): for this and other *Descriptor classes, we -# might also want to lock things down aggressively (e.g., -# prevent clients from setting the attributes). Having -# stronger invariants here in general will reduce the number -# of runtime checks we must do in reflection.py... -class FieldDescriptor(DescriptorBase): - - """Descriptor for a single field in a .proto file. - - Attributes: - name (str): Name of this field, exactly as it appears in .proto. - full_name (str): Name of this field, including containing scope. This is - particularly relevant for extensions. - index (int): Dense, 0-indexed index giving the order that this - field textually appears within its message in the .proto file. - number (int): Tag number declared for this field in the .proto file. - - type (int): (One of the TYPE_* constants below) Declared type. - cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to - represent this field. - - label (int): (One of the LABEL_* constants below) Tells whether this - field is optional, required, or repeated. - has_default_value (bool): True if this field has a default value defined, - otherwise false. - default_value (Varies): Default value of this field. Only - meaningful for non-repeated scalar fields. Repeated fields - should always set this to [], and non-repeated composite - fields should always set this to None. - - containing_type (Descriptor): Descriptor of the protocol message - type that contains this field. Set by the Descriptor constructor - if we're passed into one. - Somewhat confusingly, for extension fields, this is the - descriptor of the EXTENDED message, not the descriptor - of the message containing this field. (See is_extension and - extension_scope below). - message_type (Descriptor): If a composite field, a descriptor - of the message type contained in this field. Otherwise, this is None. - enum_type (EnumDescriptor): If this field contains an enum, a - descriptor of that enum. Otherwise, this is None. - - is_extension: True iff this describes an extension field. - extension_scope (Descriptor): Only meaningful if is_extension is True. - Gives the message that immediately contains this extension field. - Will be None iff we're a top-level (file-level) extension field. - - options (descriptor_pb2.FieldOptions): Protocol message field options or - None to use default field options. - - containing_oneof (OneofDescriptor): If the field is a member of a oneof - union, contains its descriptor. Otherwise, None. - - file (FileDescriptor): Reference to file descriptor. - """ - - # Must be consistent with C++ FieldDescriptor::Type enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - TYPE_DOUBLE = 1 - TYPE_FLOAT = 2 - TYPE_INT64 = 3 - TYPE_UINT64 = 4 - TYPE_INT32 = 5 - TYPE_FIXED64 = 6 - TYPE_FIXED32 = 7 - TYPE_BOOL = 8 - TYPE_STRING = 9 - TYPE_GROUP = 10 - TYPE_MESSAGE = 11 - TYPE_BYTES = 12 - TYPE_UINT32 = 13 - TYPE_ENUM = 14 - TYPE_SFIXED32 = 15 - TYPE_SFIXED64 = 16 - TYPE_SINT32 = 17 - TYPE_SINT64 = 18 - MAX_TYPE = 18 - - # Must be consistent with C++ FieldDescriptor::CppType enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - CPPTYPE_INT32 = 1 - CPPTYPE_INT64 = 2 - CPPTYPE_UINT32 = 3 - CPPTYPE_UINT64 = 4 - CPPTYPE_DOUBLE = 5 - CPPTYPE_FLOAT = 6 - CPPTYPE_BOOL = 7 - CPPTYPE_ENUM = 8 - CPPTYPE_STRING = 9 - CPPTYPE_MESSAGE = 10 - MAX_CPPTYPE = 10 - - _PYTHON_TO_CPP_PROTO_TYPE_MAP = { - TYPE_DOUBLE: CPPTYPE_DOUBLE, - TYPE_FLOAT: CPPTYPE_FLOAT, - TYPE_ENUM: CPPTYPE_ENUM, - TYPE_INT64: CPPTYPE_INT64, - TYPE_SINT64: CPPTYPE_INT64, - TYPE_SFIXED64: CPPTYPE_INT64, - TYPE_UINT64: CPPTYPE_UINT64, - TYPE_FIXED64: CPPTYPE_UINT64, - TYPE_INT32: CPPTYPE_INT32, - TYPE_SFIXED32: CPPTYPE_INT32, - TYPE_SINT32: CPPTYPE_INT32, - TYPE_UINT32: CPPTYPE_UINT32, - TYPE_FIXED32: CPPTYPE_UINT32, - TYPE_BYTES: CPPTYPE_STRING, - TYPE_STRING: CPPTYPE_STRING, - TYPE_BOOL: CPPTYPE_BOOL, - TYPE_MESSAGE: CPPTYPE_MESSAGE, - TYPE_GROUP: CPPTYPE_MESSAGE - } - - # Must be consistent with C++ FieldDescriptor::Label enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - LABEL_OPTIONAL = 1 - LABEL_REQUIRED = 2 - LABEL_REPEATED = 3 - MAX_LABEL = 3 - - # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, - # and kLastReservedNumber in descriptor.h - MAX_FIELD_NUMBER = (1 << 29) - 1 - FIRST_RESERVED_FIELD_NUMBER = 19000 - LAST_RESERVED_FIELD_NUMBER = 19999 - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.FieldDescriptor - - def __new__(cls, name, full_name, index, number, type, cpp_type, label, - default_value, message_type, enum_type, containing_type, - is_extension, extension_scope, options=None, - serialized_options=None, - has_default_value=True, containing_oneof=None, json_name=None, - file=None, create_key=None): # pylint: disable=redefined-builtin - _message.Message._CheckCalledFromGeneratedFile() - if is_extension: - return _message.default_pool.FindExtensionByName(full_name) - else: - return _message.default_pool.FindFieldByName(full_name) - - def __init__(self, name, full_name, index, number, type, cpp_type, label, - default_value, message_type, enum_type, containing_type, - is_extension, extension_scope, options=None, - serialized_options=None, - has_default_value=True, containing_oneof=None, json_name=None, - file=None, create_key=None): # pylint: disable=redefined-builtin - """The arguments are as described in the description of FieldDescriptor - attributes above. - - Note that containing_type may be None, and may be set later if necessary - (to deal with circular references between message types, for example). - Likewise for extension_scope. - """ - if create_key is not _internal_create_key: - _Deprecated('FieldDescriptor') - - super(FieldDescriptor, self).__init__( - options, serialized_options, 'FieldOptions') - self.name = name - self.full_name = full_name - self.file = file - self._camelcase_name = None - if json_name is None: - self.json_name = _ToJsonName(name) - else: - self.json_name = json_name - self.index = index - self.number = number - self.type = type - self.cpp_type = cpp_type - self.label = label - self.has_default_value = has_default_value - self.default_value = default_value - self.containing_type = containing_type - self.message_type = message_type - self.enum_type = enum_type - self.is_extension = is_extension - self.extension_scope = extension_scope - self.containing_oneof = containing_oneof - if api_implementation.Type() == 'cpp': - if is_extension: - self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) - else: - self._cdescriptor = _message.default_pool.FindFieldByName(full_name) - else: - self._cdescriptor = None - - @property - def camelcase_name(self): - """Camelcase name of this field. - - Returns: - str: the name in CamelCase. - """ - if self._camelcase_name is None: - self._camelcase_name = _ToCamelCase(self.name) - return self._camelcase_name - - @property - def has_presence(self): - """Whether the field distinguishes between unpopulated and default values. - - Raises: - RuntimeError: singular field that is not linked with message nor file. - """ - if self.label == FieldDescriptor.LABEL_REPEATED: - return False - if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or - self.containing_oneof): - return True - if hasattr(self.file, 'syntax'): - return self.file.syntax == 'proto2' - if hasattr(self.message_type, 'syntax'): - return self.message_type.syntax == 'proto2' - raise RuntimeError( - 'has_presence is not ready to use because field %s is not' - ' linked with message type nor file' % self.full_name) - - @staticmethod - def ProtoTypeToCppProtoType(proto_type): - """Converts from a Python proto type to a C++ Proto Type. - - The Python ProtocolBuffer classes specify both the 'Python' datatype and the - 'C++' datatype - and they're not the same. This helper method should - translate from one to another. - - Args: - proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) - Returns: - int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. - Raises: - TypeTransformationError: when the Python proto type isn't known. - """ - try: - return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] - except KeyError: - raise TypeTransformationError('Unknown proto_type: %s' % proto_type) - - -class EnumDescriptor(_NestedDescriptorBase): - - """Descriptor for an enum defined in a .proto file. - - Attributes: - name (str): Name of the enum type. - full_name (str): Full name of the type, including package name - and any enclosing type(s). - - values (list[EnumValueDescriptor]): List of the values - in this enum. - values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, - but indexed by the "name" field of each EnumValueDescriptor. - values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, - but indexed by the "number" field of each EnumValueDescriptor. - containing_type (Descriptor): Descriptor of the immediate containing - type of this enum, or None if this is an enum defined at the - top level in a .proto file. Set by Descriptor's constructor - if we're passed into one. - file (FileDescriptor): Reference to file descriptor. - options (descriptor_pb2.EnumOptions): Enum options message or - None to use default enum options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.EnumDescriptor - - def __new__(cls, name, full_name, filename, values, - containing_type=None, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None, create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindEnumTypeByName(full_name) - - def __init__(self, name, full_name, filename, values, - containing_type=None, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None, create_key=None): - """Arguments are as described in the attribute description above. - - Note that filename is an obsolete argument, that is not used anymore. - Please use file.name to access this as an attribute. - """ - if create_key is not _internal_create_key: - _Deprecated('EnumDescriptor') - - super(EnumDescriptor, self).__init__( - options, 'EnumOptions', name, full_name, file, - containing_type, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - - self.values = values - for value in self.values: - value.type = self - self.values_by_name = dict((v.name, v) for v in values) - # Values are reversed to ensure that the first alias is retained. - self.values_by_number = dict((v.number, v) for v in reversed(values)) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.EnumDescriptorProto. - - Args: - proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. - """ - # This function is overridden to give a better doc comment. - super(EnumDescriptor, self).CopyToProto(proto) - - -class EnumValueDescriptor(DescriptorBase): - - """Descriptor for a single value within an enum. - - Attributes: - name (str): Name of this value. - index (int): Dense, 0-indexed index giving the order that this - value appears textually within its enum in the .proto file. - number (int): Actual number assigned to this enum value. - type (EnumDescriptor): :class:`EnumDescriptor` to which this value - belongs. Set by :class:`EnumDescriptor`'s constructor if we're - passed into one. - options (descriptor_pb2.EnumValueOptions): Enum value options message or - None to use default enum value options options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor - - def __new__(cls, name, index, number, - type=None, # pylint: disable=redefined-builtin - options=None, serialized_options=None, create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - # There is no way we can build a complete EnumValueDescriptor with the - # given parameters (the name of the Enum is not known, for example). - # Fortunately generated files just pass it to the EnumDescriptor() - # constructor, which will ignore it, so returning None is good enough. - return None - - def __init__(self, name, index, number, - type=None, # pylint: disable=redefined-builtin - options=None, serialized_options=None, create_key=None): - """Arguments are as described in the attribute description above.""" - if create_key is not _internal_create_key: - _Deprecated('EnumValueDescriptor') - - super(EnumValueDescriptor, self).__init__( - options, serialized_options, 'EnumValueOptions') - self.name = name - self.index = index - self.number = number - self.type = type - - -class OneofDescriptor(DescriptorBase): - """Descriptor for a oneof field. - - Attributes: - name (str): Name of the oneof field. - full_name (str): Full name of the oneof field, including package name. - index (int): 0-based index giving the order of the oneof field inside - its containing type. - containing_type (Descriptor): :class:`Descriptor` of the protocol message - type that contains this field. Set by the :class:`Descriptor` constructor - if we're passed into one. - fields (list[FieldDescriptor]): The list of field descriptors this - oneof can contain. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.OneofDescriptor - - def __new__( - cls, name, full_name, index, containing_type, fields, options=None, - serialized_options=None, create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindOneofByName(full_name) - - def __init__( - self, name, full_name, index, containing_type, fields, options=None, - serialized_options=None, create_key=None): - """Arguments are as described in the attribute description above.""" - if create_key is not _internal_create_key: - _Deprecated('OneofDescriptor') - - super(OneofDescriptor, self).__init__( - options, serialized_options, 'OneofOptions') - self.name = name - self.full_name = full_name - self.index = index - self.containing_type = containing_type - self.fields = fields - - -class ServiceDescriptor(_NestedDescriptorBase): - - """Descriptor for a service. - - Attributes: - name (str): Name of the service. - full_name (str): Full name of the service, including package name. - index (int): 0-indexed index giving the order that this services - definition appears within the .proto file. - methods (list[MethodDescriptor]): List of methods provided by this - service. - methods_by_name (dict(str, MethodDescriptor)): Same - :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but - indexed by "name" attribute in each :class:`MethodDescriptor`. - options (descriptor_pb2.ServiceOptions): Service options message or - None to use default service options. - file (FileDescriptor): Reference to file info. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor - - def __new__( - cls, - name=None, - full_name=None, - index=None, - methods=None, - options=None, - serialized_options=None, - file=None, # pylint: disable=redefined-builtin - serialized_start=None, - serialized_end=None, - create_key=None): - _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access - return _message.default_pool.FindServiceByName(full_name) - - def __init__(self, name, full_name, index, methods, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None, create_key=None): - if create_key is not _internal_create_key: - _Deprecated('ServiceDescriptor') - - super(ServiceDescriptor, self).__init__( - options, 'ServiceOptions', name, full_name, file, - None, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - self.index = index - self.methods = methods - self.methods_by_name = dict((m.name, m) for m in methods) - # Set the containing service for each method in this service. - for method in self.methods: - method.containing_service = self - - def FindMethodByName(self, name): - """Searches for the specified method, and returns its descriptor. - - Args: - name (str): Name of the method. - Returns: - MethodDescriptor or None: the descriptor for the requested method, if - found. - """ - return self.methods_by_name.get(name, None) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.ServiceDescriptorProto. - - Args: - proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. - """ - # This function is overridden to give a better doc comment. - super(ServiceDescriptor, self).CopyToProto(proto) - - -class MethodDescriptor(DescriptorBase): - - """Descriptor for a method in a service. - - Attributes: - name (str): Name of the method within the service. - full_name (str): Full name of method. - index (int): 0-indexed index of the method inside the service. - containing_service (ServiceDescriptor): The service that contains this - method. - input_type (Descriptor): The descriptor of the message that this method - accepts. - output_type (Descriptor): The descriptor of the message that this method - returns. - client_streaming (bool): Whether this method uses client streaming. - server_streaming (bool): Whether this method uses server streaming. - options (descriptor_pb2.MethodOptions or None): Method options message, or - None to use default method options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.MethodDescriptor - - def __new__(cls, - name, - full_name, - index, - containing_service, - input_type, - output_type, - client_streaming=False, - server_streaming=False, - options=None, - serialized_options=None, - create_key=None): - _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access - return _message.default_pool.FindMethodByName(full_name) - - def __init__(self, - name, - full_name, - index, - containing_service, - input_type, - output_type, - client_streaming=False, - server_streaming=False, - options=None, - serialized_options=None, - create_key=None): - """The arguments are as described in the description of MethodDescriptor - attributes above. - - Note that containing_service may be None, and may be set later if necessary. - """ - if create_key is not _internal_create_key: - _Deprecated('MethodDescriptor') - - super(MethodDescriptor, self).__init__( - options, serialized_options, 'MethodOptions') - self.name = name - self.full_name = full_name - self.index = index - self.containing_service = containing_service - self.input_type = input_type - self.output_type = output_type - self.client_streaming = client_streaming - self.server_streaming = server_streaming - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.MethodDescriptorProto. - - Args: - proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. - - Raises: - Error: If self couldn't be serialized, due to too few constructor - arguments. - """ - if self.containing_service is not None: - from google.protobuf import descriptor_pb2 - service_proto = descriptor_pb2.ServiceDescriptorProto() - self.containing_service.CopyToProto(service_proto) - proto.CopyFrom(service_proto.method[self.index]) - else: - raise Error('Descriptor does not contain a service.') - - -class FileDescriptor(DescriptorBase): - """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. - - Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and - :attr:`dependencies` fields are only set by the - :py:mod:`google.protobuf.message_factory` module, and not by the generated - proto code. - - Attributes: - name (str): Name of file, relative to root of source tree. - package (str): Name of the package - syntax (str): string indicating syntax of the file (can be "proto2" or - "proto3") - serialized_pb (bytes): Byte string of serialized - :class:`descriptor_pb2.FileDescriptorProto`. - dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` - objects this :class:`FileDescriptor` depends on. - public_dependencies (list[FileDescriptor]): A subset of - :attr:`dependencies`, which were declared as "public". - message_types_by_name (dict(str, Descriptor)): Mapping from message names - to their :class:`Descriptor`. - enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to - their :class:`EnumDescriptor`. - extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension - names declared at file scope to their :class:`FieldDescriptor`. - services_by_name (dict(str, ServiceDescriptor)): Mapping from services' - names to their :class:`ServiceDescriptor`. - pool (DescriptorPool): The pool this descriptor belongs to. When not - passed to the constructor, the global default pool is used. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.FileDescriptor - - def __new__(cls, name, package, options=None, - serialized_options=None, serialized_pb=None, - dependencies=None, public_dependencies=None, - syntax=None, pool=None, create_key=None): - # FileDescriptor() is called from various places, not only from generated - # files, to register dynamic proto files and messages. - # pylint: disable=g-explicit-bool-comparison - if serialized_pb == b'': - # Cpp generated code must be linked in if serialized_pb is '' - try: - return _message.default_pool.FindFileByName(name) - except KeyError: - raise RuntimeError('Please link in cpp generated lib for %s' % (name)) - elif serialized_pb: - return _message.default_pool.AddSerializedFile(serialized_pb) - else: - return super(FileDescriptor, cls).__new__(cls) - - def __init__(self, name, package, options=None, - serialized_options=None, serialized_pb=None, - dependencies=None, public_dependencies=None, - syntax=None, pool=None, create_key=None): - """Constructor.""" - if create_key is not _internal_create_key: - _Deprecated('FileDescriptor') - - super(FileDescriptor, self).__init__( - options, serialized_options, 'FileOptions') - - if pool is None: - from google.protobuf import descriptor_pool - pool = descriptor_pool.Default() - self.pool = pool - self.message_types_by_name = {} - self.name = name - self.package = package - self.syntax = syntax or "proto2" - self.serialized_pb = serialized_pb - - self.enum_types_by_name = {} - self.extensions_by_name = {} - self.services_by_name = {} - self.dependencies = (dependencies or []) - self.public_dependencies = (public_dependencies or []) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.FileDescriptorProto. - - Args: - proto: An empty descriptor_pb2.FileDescriptorProto. - """ - proto.ParseFromString(self.serialized_pb) - - -def _ParseOptions(message, string): - """Parses serialized options. - - This helper function is used to parse serialized options in generated - proto2 files. It must not be used outside proto2. - """ - message.ParseFromString(string) - return message - - -def _ToCamelCase(name): - """Converts name to camel-case and returns it.""" - capitalize_next = False - result = [] - - for c in name: - if c == '_': - if result: - capitalize_next = True - elif capitalize_next: - result.append(c.upper()) - capitalize_next = False - else: - result += c - - # Lower-case the first letter. - if result and result[0].isupper(): - result[0] = result[0].lower() - return ''.join(result) - - -def _OptionsOrNone(descriptor_proto): - """Returns the value of the field `options`, or None if it is not set.""" - if descriptor_proto.HasField('options'): - return descriptor_proto.options - else: - return None - - -def _ToJsonName(name): - """Converts name to Json name and returns it.""" - capitalize_next = False - result = [] - - for c in name: - if c == '_': - capitalize_next = True - elif capitalize_next: - result.append(c.upper()) - capitalize_next = False - else: - result += c - - return ''.join(result) - - -def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, - syntax=None): - """Make a protobuf Descriptor given a DescriptorProto protobuf. - - Handles nested descriptors. Note that this is limited to the scope of defining - a message inside of another message. Composite fields can currently only be - resolved if the message is defined in the same scope as the field. - - Args: - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. - package: Optional package name for the new message Descriptor (string). - build_file_if_cpp: Update the C++ descriptor pool if api matches. - Set to False on recursion, so no duplicates are created. - syntax: The syntax/semantics that should be used. Set to "proto3" to get - proto3 field presence semantics. - Returns: - A Descriptor for protobuf messages. - """ - if api_implementation.Type() == 'cpp' and build_file_if_cpp: - # The C++ implementation requires all descriptors to be backed by the same - # definition in the C++ descriptor pool. To do this, we build a - # FileDescriptorProto with the same definition as this descriptor and build - # it into the pool. - from google.protobuf import descriptor_pb2 - file_descriptor_proto = descriptor_pb2.FileDescriptorProto() - file_descriptor_proto.message_type.add().MergeFrom(desc_proto) - - # Generate a random name for this proto file to prevent conflicts with any - # imported ones. We need to specify a file name so the descriptor pool - # accepts our FileDescriptorProto, but it is not important what that file - # name is actually set to. - proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') - - if package: - file_descriptor_proto.name = os.path.join(package.replace('.', '/'), - proto_name + '.proto') - file_descriptor_proto.package = package - else: - file_descriptor_proto.name = proto_name + '.proto' - - _message.default_pool.Add(file_descriptor_proto) - result = _message.default_pool.FindFileByName(file_descriptor_proto.name) - - if _USE_C_DESCRIPTORS: - return result.message_types_by_name[desc_proto.name] - - full_message_name = [desc_proto.name] - if package: full_message_name.insert(0, package) - - # Create Descriptors for enum types - enum_types = {} - for enum_proto in desc_proto.enum_type: - full_name = '.'.join(full_message_name + [enum_proto.name]) - enum_desc = EnumDescriptor( - enum_proto.name, full_name, None, [ - EnumValueDescriptor(enum_val.name, ii, enum_val.number, - create_key=_internal_create_key) - for ii, enum_val in enumerate(enum_proto.value)], - create_key=_internal_create_key) - enum_types[full_name] = enum_desc - - # Create Descriptors for nested types - nested_types = {} - for nested_proto in desc_proto.nested_type: - full_name = '.'.join(full_message_name + [nested_proto.name]) - # Nested types are just those defined inside of the message, not all types - # used by fields in the message, so no loops are possible here. - nested_desc = MakeDescriptor(nested_proto, - package='.'.join(full_message_name), - build_file_if_cpp=False, - syntax=syntax) - nested_types[full_name] = nested_desc - - fields = [] - for field_proto in desc_proto.field: - full_name = '.'.join(full_message_name + [field_proto.name]) - enum_desc = None - nested_desc = None - if field_proto.json_name: - json_name = field_proto.json_name - else: - json_name = None - if field_proto.HasField('type_name'): - type_name = field_proto.type_name - full_type_name = '.'.join(full_message_name + - [type_name[type_name.rfind('.')+1:]]) - if full_type_name in nested_types: - nested_desc = nested_types[full_type_name] - elif full_type_name in enum_types: - enum_desc = enum_types[full_type_name] - # Else type_name references a non-local type, which isn't implemented - field = FieldDescriptor( - field_proto.name, full_name, field_proto.number - 1, - field_proto.number, field_proto.type, - FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), - field_proto.label, None, nested_desc, enum_desc, None, False, None, - options=_OptionsOrNone(field_proto), has_default_value=False, - json_name=json_name, create_key=_internal_create_key) - fields.append(field) - - desc_name = '.'.join(full_message_name) - return Descriptor(desc_proto.name, desc_name, None, None, fields, - list(nested_types.values()), list(enum_types.values()), [], - options=_OptionsOrNone(desc_proto), - create_key=_internal_create_key) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_database.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_database.py deleted file mode 100644 index 073eddc711..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_database.py +++ /dev/null @@ -1,177 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides a container for DescriptorProtos.""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -import warnings - - -class Error(Exception): - pass - - -class DescriptorDatabaseConflictingDefinitionError(Error): - """Raised when a proto is added with the same name & different descriptor.""" - - -class DescriptorDatabase(object): - """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" - - def __init__(self): - self._file_desc_protos_by_file = {} - self._file_desc_protos_by_symbol = {} - - def Add(self, file_desc_proto): - """Adds the FileDescriptorProto and its types to this database. - - Args: - file_desc_proto: The FileDescriptorProto to add. - Raises: - DescriptorDatabaseConflictingDefinitionError: if an attempt is made to - add a proto with the same name but different definition than an - existing proto in the database. - """ - proto_name = file_desc_proto.name - if proto_name not in self._file_desc_protos_by_file: - self._file_desc_protos_by_file[proto_name] = file_desc_proto - elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: - raise DescriptorDatabaseConflictingDefinitionError( - '%s already added, but with different descriptor.' % proto_name) - else: - return - - # Add all the top-level descriptors to the index. - package = file_desc_proto.package - for message in file_desc_proto.message_type: - for name in _ExtractSymbols(message, package): - self._AddSymbol(name, file_desc_proto) - for enum in file_desc_proto.enum_type: - self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) - for enum_value in enum.value: - self._file_desc_protos_by_symbol[ - '.'.join((package, enum_value.name))] = file_desc_proto - for extension in file_desc_proto.extension: - self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) - for service in file_desc_proto.service: - self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) - - def FindFileByName(self, name): - """Finds the file descriptor proto by file name. - - Typically the file name is a relative path ending to a .proto file. The - proto with the given name will have to have been added to this database - using the Add method or else an error will be raised. - - Args: - name: The file name to find. - - Returns: - The file descriptor proto matching the name. - - Raises: - KeyError if no file by the given name was added. - """ - - return self._file_desc_protos_by_file[name] - - def FindFileContainingSymbol(self, symbol): - """Finds the file descriptor proto containing the specified symbol. - - The symbol should be a fully qualified name including the file descriptor's - package and any containing messages. Some examples: - - 'some.package.name.Message' - 'some.package.name.Message.NestedEnum' - 'some.package.name.Message.some_field' - - The file descriptor proto containing the specified symbol must be added to - this database using the Add method or else an error will be raised. - - Args: - symbol: The fully qualified symbol name. - - Returns: - The file descriptor proto containing the symbol. - - Raises: - KeyError if no file contains the specified symbol. - """ - try: - return self._file_desc_protos_by_symbol[symbol] - except KeyError: - # Fields, enum values, and nested extensions are not in - # _file_desc_protos_by_symbol. Try to find the top level - # descriptor. Non-existent nested symbol under a valid top level - # descriptor can also be found. The behavior is the same with - # protobuf C++. - top_level, _, _ = symbol.rpartition('.') - try: - return self._file_desc_protos_by_symbol[top_level] - except KeyError: - # Raise the original symbol as a KeyError for better diagnostics. - raise KeyError(symbol) - - def FindFileContainingExtension(self, extendee_name, extension_number): - # TODO(jieluo): implement this API. - return None - - def FindAllExtensionNumbers(self, extendee_name): - # TODO(jieluo): implement this API. - return [] - - def _AddSymbol(self, name, file_desc_proto): - if name in self._file_desc_protos_by_symbol: - warn_msg = ('Conflict register for file "' + file_desc_proto.name + - '": ' + name + - ' is already defined in file "' + - self._file_desc_protos_by_symbol[name].name + '"') - warnings.warn(warn_msg, RuntimeWarning) - self._file_desc_protos_by_symbol[name] = file_desc_proto - - -def _ExtractSymbols(desc_proto, package): - """Pulls out all the symbols from a descriptor proto. - - Args: - desc_proto: The proto to extract symbols from. - package: The package containing the descriptor type. - - Yields: - The fully qualified name found in the descriptor. - """ - message_name = package + '.' + desc_proto.name if package else desc_proto.name - yield message_name - for nested_type in desc_proto.nested_type: - for symbol in _ExtractSymbols(nested_type, message_name): - yield symbol - for enum_type in desc_proto.enum_type: - yield '.'.join((message_name, enum_type.name)) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pb2.py deleted file mode 100644 index f570386432..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pb2.py +++ /dev/null @@ -1,1925 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/descriptor.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR = _descriptor.FileDescriptor( - name='google/protobuf/descriptor.proto', - package='google.protobuf', - syntax='proto2', - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' - ) -else: - DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') - -if _descriptor._USE_C_DESCRIPTORS == False: - _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( - name='Type', - full_name='google.protobuf.FieldDescriptorProto.Type', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='TYPE_DOUBLE', index=0, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_FLOAT', index=1, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_INT64', index=2, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_UINT64', index=3, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_INT32', index=4, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_FIXED64', index=5, number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_FIXED32', index=6, number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_BOOL', index=7, number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_STRING', index=8, number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_GROUP', index=9, number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_MESSAGE', index=10, number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_BYTES', index=11, number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_UINT32', index=12, number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_ENUM', index=13, number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SFIXED32', index=14, number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SFIXED64', index=15, number=16, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SINT32', index=16, number=17, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SINT64', index=17, number=18, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) - - _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( - name='Label', - full_name='google.protobuf.FieldDescriptorProto.Label', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='LABEL_OPTIONAL', index=0, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='LABEL_REQUIRED', index=1, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='LABEL_REPEATED', index=2, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) - - _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( - name='OptimizeMode', - full_name='google.protobuf.FileOptions.OptimizeMode', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='SPEED', index=0, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='CODE_SIZE', index=1, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='LITE_RUNTIME', index=2, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) - - _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( - name='CType', - full_name='google.protobuf.FieldOptions.CType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='STRING', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='CORD', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STRING_PIECE', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) - - _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( - name='JSType', - full_name='google.protobuf.FieldOptions.JSType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='JS_NORMAL', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='JS_STRING', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='JS_NUMBER', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) - - _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( - name='IdempotencyLevel', - full_name='google.protobuf.MethodOptions.IdempotencyLevel', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='IDEMPOTENCY_UNKNOWN', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='NO_SIDE_EFFECTS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='IDEMPOTENT', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) - - - _FILEDESCRIPTORSET = _descriptor.Descriptor( - name='FileDescriptorSet', - full_name='google.protobuf.FileDescriptorSet', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _FILEDESCRIPTORPROTO = _descriptor.Descriptor( - name='FileDescriptorProto', - full_name='google.protobuf.FileDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, - number=10, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, - number=11, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, - number=7, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, - number=12, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( - name='ExtensionRange', - full_name='google.protobuf.DescriptorProto.ExtensionRange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( - name='ReservedRange', - full_name='google.protobuf.DescriptorProto.ReservedRange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _DESCRIPTORPROTO = _descriptor.Descriptor( - name='DescriptorProto', - full_name='google.protobuf.DescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.DescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='field', full_name='google.protobuf.DescriptorProto.field', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, - number=8, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.DescriptorProto.options', index=7, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, - number=9, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, - number=10, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( - name='ExtensionRangeOptions', - full_name='google.protobuf.ExtensionRangeOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( - name='FieldDescriptorProto', - full_name='google.protobuf.FieldDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, - number=4, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, - number=9, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, - number=10, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, - number=17, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FIELDDESCRIPTORPROTO_TYPE, - _FIELDDESCRIPTORPROTO_LABEL, - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( - name='OneofDescriptorProto', - full_name='google.protobuf.OneofDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( - name='EnumReservedRange', - full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( - name='EnumDescriptorProto', - full_name='google.protobuf.EnumDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, - number=5, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( - name='EnumValueDescriptorProto', - full_name='google.protobuf.EnumValueDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( - name='ServiceDescriptorProto', - full_name='google.protobuf.ServiceDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _METHODDESCRIPTORPROTO = _descriptor.Descriptor( - name='MethodDescriptorProto', - full_name='google.protobuf.MethodDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _FILEOPTIONS = _descriptor.Descriptor( - name='FileOptions', - full_name='google.protobuf.FileOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, - number=10, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, - number=20, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, - number=27, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, - number=9, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, - number=11, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, - number=16, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, - number=17, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, - number=18, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, - number=42, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, - number=23, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, - number=31, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=True, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, - number=36, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, - number=37, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, - number=39, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, - number=40, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, - number=41, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, - number=44, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, - number=45, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FILEOPTIONS_OPTIMIZEMODE, - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _MESSAGEOPTIONS = _descriptor.Descriptor( - name='MessageOptions', - full_name='google.protobuf.MessageOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _FIELDOPTIONS = _descriptor.Descriptor( - name='FieldOptions', - full_name='google.protobuf.FieldOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, - number=6, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, - number=5, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, - number=15, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, - number=3, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, - number=10, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FIELDOPTIONS_CTYPE, - _FIELDOPTIONS_JSTYPE, - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _ONEOFOPTIONS = _descriptor.Descriptor( - name='OneofOptions', - full_name='google.protobuf.OneofOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _ENUMOPTIONS = _descriptor.Descriptor( - name='EnumOptions', - full_name='google.protobuf.EnumOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, - number=3, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _ENUMVALUEOPTIONS = _descriptor.Descriptor( - name='EnumValueOptions', - full_name='google.protobuf.EnumValueOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _SERVICEOPTIONS = _descriptor.Descriptor( - name='ServiceOptions', - full_name='google.protobuf.ServiceOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, - number=33, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _METHODOPTIONS = _descriptor.Descriptor( - name='MethodOptions', - full_name='google.protobuf.MethodOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, - number=33, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, - number=34, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _METHODOPTIONS_IDEMPOTENCYLEVEL, - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( - name='NamePart', - full_name='google.protobuf.UninterpretedOption.NamePart', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, - number=1, type=9, cpp_type=9, label=2, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, - number=2, type=8, cpp_type=7, label=2, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _UNINTERPRETEDOPTION = _descriptor.Descriptor( - name='UninterpretedOption', - full_name='google.protobuf.UninterpretedOption', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, - number=5, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, - number=6, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( - name='Location', - full_name='google.protobuf.SourceCodeInfo.Location', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, - number=1, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, - number=2, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, - number=6, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _SOURCECODEINFO = _descriptor.Descriptor( - name='SourceCodeInfo', - full_name='google.protobuf.SourceCodeInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_SOURCECODEINFO_LOCATION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( - name='Annotation', - full_name='google.protobuf.GeneratedCodeInfo.Annotation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, - number=1, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _GENERATEDCODEINFO = _descriptor.Descriptor( - name='GeneratedCodeInfo', - full_name='google.protobuf.GeneratedCodeInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS - _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO - _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS - _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO - _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE - _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS - _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE - _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL - _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE - _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS - _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO - _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO - _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO - _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO - _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS - _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE - _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS - _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO - _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS - _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS - _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE - _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS - _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE - _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE - _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS - _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS - _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL - _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS - _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION - _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART - _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO - _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION - _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO - _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION - DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET - DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS - DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS - DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS - DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS - DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS - DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS - DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS - DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS - DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS - DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION - DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO - DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO - _sym_db.RegisterFileDescriptor(DESCRIPTOR) - -else: - _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _FILEDESCRIPTORSET._serialized_start=53 - _FILEDESCRIPTORSET._serialized_end=124 - _FILEDESCRIPTORPROTO._serialized_start=127 - _FILEDESCRIPTORPROTO._serialized_end=602 - _DESCRIPTORPROTO._serialized_start=605 - _DESCRIPTORPROTO._serialized_end=1286 - _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 - _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 - _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 - _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 - _EXTENSIONRANGEOPTIONS._serialized_start=1288 - _EXTENSIONRANGEOPTIONS._serialized_end=1391 - _FIELDDESCRIPTORPROTO._serialized_start=1394 - _FIELDDESCRIPTORPROTO._serialized_end=2119 - _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 - _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 - _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 - _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 - _ONEOFDESCRIPTORPROTO._serialized_start=2121 - _ONEOFDESCRIPTORPROTO._serialized_end=2205 - _ENUMDESCRIPTORPROTO._serialized_start=2208 - _ENUMDESCRIPTORPROTO._serialized_end=2500 - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 - _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 - _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 - _SERVICEDESCRIPTORPROTO._serialized_start=2613 - _SERVICEDESCRIPTORPROTO._serialized_end=2757 - _METHODDESCRIPTORPROTO._serialized_start=2760 - _METHODDESCRIPTORPROTO._serialized_end=2953 - _FILEOPTIONS._serialized_start=2956 - _FILEOPTIONS._serialized_end=3761 - _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 - _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 - _MESSAGEOPTIONS._serialized_start=3764 - _MESSAGEOPTIONS._serialized_end=4024 - _FIELDOPTIONS._serialized_start=4027 - _FIELDOPTIONS._serialized_end=4473 - _FIELDOPTIONS_CTYPE._serialized_start=4354 - _FIELDOPTIONS_CTYPE._serialized_end=4401 - _FIELDOPTIONS_JSTYPE._serialized_start=4403 - _FIELDOPTIONS_JSTYPE._serialized_end=4456 - _ONEOFOPTIONS._serialized_start=4475 - _ONEOFOPTIONS._serialized_end=4569 - _ENUMOPTIONS._serialized_start=4572 - _ENUMOPTIONS._serialized_end=4719 - _ENUMVALUEOPTIONS._serialized_start=4721 - _ENUMVALUEOPTIONS._serialized_end=4846 - _SERVICEOPTIONS._serialized_start=4848 - _SERVICEOPTIONS._serialized_end=4971 - _METHODOPTIONS._serialized_start=4974 - _METHODOPTIONS._serialized_end=5275 - _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 - _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 - _UNINTERPRETEDOPTION._serialized_start=5278 - _UNINTERPRETEDOPTION._serialized_end=5564 - _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 - _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 - _SOURCECODEINFO._serialized_start=5567 - _SOURCECODEINFO._serialized_end=5780 - _SOURCECODEINFO_LOCATION._serialized_start=5646 - _SOURCECODEINFO_LOCATION._serialized_end=5780 - _GENERATEDCODEINFO._serialized_start=5783 - _GENERATEDCODEINFO._serialized_end=5950 - _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 - _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pool.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pool.py deleted file mode 100644 index 911372a8b0..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pool.py +++ /dev/null @@ -1,1295 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides DescriptorPool to use as a container for proto2 descriptors. - -The DescriptorPool is used in conjection with a DescriptorDatabase to maintain -a collection of protocol buffer descriptors for use when dynamically creating -message types at runtime. - -For most applications protocol buffers should be used via modules generated by -the protocol buffer compiler tool. This should only be used when the type of -protocol buffers used in an application or library cannot be predetermined. - -Below is a straightforward example on how to use this class:: - - pool = DescriptorPool() - file_descriptor_protos = [ ... ] - for file_descriptor_proto in file_descriptor_protos: - pool.Add(file_descriptor_proto) - my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') - -The message descriptor can be used in conjunction with the message_factory -module in order to create a protocol buffer class that can be encoded and -decoded. - -If you want to get a Python class for the specified proto, use the -helper functions inside google.protobuf.message_factory -directly instead of this class. -""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -import collections -import warnings - -from google.protobuf import descriptor -from google.protobuf import descriptor_database -from google.protobuf import text_encoding - - -_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access - - -def _Deprecated(func): - """Mark functions as deprecated.""" - - def NewFunc(*args, **kwargs): - warnings.warn( - 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' - 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' - 'instead.' % func.__name__, - category=DeprecationWarning) - return func(*args, **kwargs) - NewFunc.__name__ = func.__name__ - NewFunc.__doc__ = func.__doc__ - NewFunc.__dict__.update(func.__dict__) - return NewFunc - - -def _NormalizeFullyQualifiedName(name): - """Remove leading period from fully-qualified type name. - - Due to b/13860351 in descriptor_database.py, types in the root namespace are - generated with a leading period. This function removes that prefix. - - Args: - name (str): The fully-qualified symbol name. - - Returns: - str: The normalized fully-qualified symbol name. - """ - return name.lstrip('.') - - -def _OptionsOrNone(descriptor_proto): - """Returns the value of the field `options`, or None if it is not set.""" - if descriptor_proto.HasField('options'): - return descriptor_proto.options - else: - return None - - -def _IsMessageSetExtension(field): - return (field.is_extension and - field.containing_type.has_options and - field.containing_type.GetOptions().message_set_wire_format and - field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) - - -class DescriptorPool(object): - """A collection of protobufs dynamically constructed by descriptor protos.""" - - if _USE_C_DESCRIPTORS: - - def __new__(cls, descriptor_db=None): - # pylint: disable=protected-access - return descriptor._message.DescriptorPool(descriptor_db) - - def __init__(self, descriptor_db=None): - """Initializes a Pool of proto buffs. - - The descriptor_db argument to the constructor is provided to allow - specialized file descriptor proto lookup code to be triggered on demand. An - example would be an implementation which will read and compile a file - specified in a call to FindFileByName() and not require the call to Add() - at all. Results from this database will be cached internally here as well. - - Args: - descriptor_db: A secondary source of file descriptors. - """ - - self._internal_db = descriptor_database.DescriptorDatabase() - self._descriptor_db = descriptor_db - self._descriptors = {} - self._enum_descriptors = {} - self._service_descriptors = {} - self._file_descriptors = {} - self._toplevel_extensions = {} - # TODO(jieluo): Remove _file_desc_by_toplevel_extension after - # maybe year 2020 for compatibility issue (with 3.4.1 only). - self._file_desc_by_toplevel_extension = {} - self._top_enum_values = {} - # We store extensions in two two-level mappings: The first key is the - # descriptor of the message being extended, the second key is the extension - # full name or its tag number. - self._extensions_by_name = collections.defaultdict(dict) - self._extensions_by_number = collections.defaultdict(dict) - - def _CheckConflictRegister(self, desc, desc_name, file_name): - """Check if the descriptor name conflicts with another of the same name. - - Args: - desc: Descriptor of a message, enum, service, extension or enum value. - desc_name (str): the full name of desc. - file_name (str): The file name of descriptor. - """ - for register, descriptor_type in [ - (self._descriptors, descriptor.Descriptor), - (self._enum_descriptors, descriptor.EnumDescriptor), - (self._service_descriptors, descriptor.ServiceDescriptor), - (self._toplevel_extensions, descriptor.FieldDescriptor), - (self._top_enum_values, descriptor.EnumValueDescriptor)]: - if desc_name in register: - old_desc = register[desc_name] - if isinstance(old_desc, descriptor.EnumValueDescriptor): - old_file = old_desc.type.file.name - else: - old_file = old_desc.file.name - - if not isinstance(desc, descriptor_type) or ( - old_file != file_name): - error_msg = ('Conflict register for file "' + file_name + - '": ' + desc_name + - ' is already defined in file "' + - old_file + '". Please fix the conflict by adding ' - 'package name on the proto file, or use different ' - 'name for the duplication.') - if isinstance(desc, descriptor.EnumValueDescriptor): - error_msg += ('\nNote: enum values appear as ' - 'siblings of the enum type instead of ' - 'children of it.') - - raise TypeError(error_msg) - - return - - def Add(self, file_desc_proto): - """Adds the FileDescriptorProto and its types to this pool. - - Args: - file_desc_proto (FileDescriptorProto): The file descriptor to add. - """ - - self._internal_db.Add(file_desc_proto) - - def AddSerializedFile(self, serialized_file_desc_proto): - """Adds the FileDescriptorProto and its types to this pool. - - Args: - serialized_file_desc_proto (bytes): A bytes string, serialization of the - :class:`FileDescriptorProto` to add. - - Returns: - FileDescriptor: Descriptor for the added file. - """ - - # pylint: disable=g-import-not-at-top - from google.protobuf import descriptor_pb2 - file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( - serialized_file_desc_proto) - file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) - file_desc.serialized_pb = serialized_file_desc_proto - return file_desc - - # Add Descriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddDescriptor(self, desc): - self._AddDescriptor(desc) - - # Never call this method. It is for internal usage only. - def _AddDescriptor(self, desc): - """Adds a Descriptor to the pool, non-recursively. - - If the Descriptor contains nested messages or enums, the caller must - explicitly register them. This method also registers the FileDescriptor - associated with the message. - - Args: - desc: A Descriptor. - """ - if not isinstance(desc, descriptor.Descriptor): - raise TypeError('Expected instance of descriptor.Descriptor.') - - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - - self._descriptors[desc.full_name] = desc - self._AddFileDescriptor(desc.file) - - # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddEnumDescriptor(self, enum_desc): - self._AddEnumDescriptor(enum_desc) - - # Never call this method. It is for internal usage only. - def _AddEnumDescriptor(self, enum_desc): - """Adds an EnumDescriptor to the pool. - - This method also registers the FileDescriptor associated with the enum. - - Args: - enum_desc: An EnumDescriptor. - """ - - if not isinstance(enum_desc, descriptor.EnumDescriptor): - raise TypeError('Expected instance of descriptor.EnumDescriptor.') - - file_name = enum_desc.file.name - self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) - self._enum_descriptors[enum_desc.full_name] = enum_desc - - # Top enum values need to be indexed. - # Count the number of dots to see whether the enum is toplevel or nested - # in a message. We cannot use enum_desc.containing_type at this stage. - if enum_desc.file.package: - top_level = (enum_desc.full_name.count('.') - - enum_desc.file.package.count('.') == 1) - else: - top_level = enum_desc.full_name.count('.') == 0 - if top_level: - file_name = enum_desc.file.name - package = enum_desc.file.package - for enum_value in enum_desc.values: - full_name = _NormalizeFullyQualifiedName( - '.'.join((package, enum_value.name))) - self._CheckConflictRegister(enum_value, full_name, file_name) - self._top_enum_values[full_name] = enum_value - self._AddFileDescriptor(enum_desc.file) - - # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddServiceDescriptor(self, service_desc): - self._AddServiceDescriptor(service_desc) - - # Never call this method. It is for internal usage only. - def _AddServiceDescriptor(self, service_desc): - """Adds a ServiceDescriptor to the pool. - - Args: - service_desc: A ServiceDescriptor. - """ - - if not isinstance(service_desc, descriptor.ServiceDescriptor): - raise TypeError('Expected instance of descriptor.ServiceDescriptor.') - - self._CheckConflictRegister(service_desc, service_desc.full_name, - service_desc.file.name) - self._service_descriptors[service_desc.full_name] = service_desc - - # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddExtensionDescriptor(self, extension): - self._AddExtensionDescriptor(extension) - - # Never call this method. It is for internal usage only. - def _AddExtensionDescriptor(self, extension): - """Adds a FieldDescriptor describing an extension to the pool. - - Args: - extension: A FieldDescriptor. - - Raises: - AssertionError: when another extension with the same number extends the - same message. - TypeError: when the specified extension is not a - descriptor.FieldDescriptor. - """ - if not (isinstance(extension, descriptor.FieldDescriptor) and - extension.is_extension): - raise TypeError('Expected an extension descriptor.') - - if extension.extension_scope is None: - self._toplevel_extensions[extension.full_name] = extension - - try: - existing_desc = self._extensions_by_number[ - extension.containing_type][extension.number] - except KeyError: - pass - else: - if extension is not existing_desc: - raise AssertionError( - 'Extensions "%s" and "%s" both try to extend message type "%s" ' - 'with field number %d.' % - (extension.full_name, existing_desc.full_name, - extension.containing_type.full_name, extension.number)) - - self._extensions_by_number[extension.containing_type][ - extension.number] = extension - self._extensions_by_name[extension.containing_type][ - extension.full_name] = extension - - # Also register MessageSet extensions with the type name. - if _IsMessageSetExtension(extension): - self._extensions_by_name[extension.containing_type][ - extension.message_type.full_name] = extension - - @_Deprecated - def AddFileDescriptor(self, file_desc): - self._InternalAddFileDescriptor(file_desc) - - # Never call this method. It is for internal usage only. - def _InternalAddFileDescriptor(self, file_desc): - """Adds a FileDescriptor to the pool, non-recursively. - - If the FileDescriptor contains messages or enums, the caller must explicitly - register them. - - Args: - file_desc: A FileDescriptor. - """ - - self._AddFileDescriptor(file_desc) - # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. - # FieldDescriptor.file is added in code gen. Remove this solution after - # maybe 2020 for compatibility reason (with 3.4.1 only). - for extension in file_desc.extensions_by_name.values(): - self._file_desc_by_toplevel_extension[ - extension.full_name] = file_desc - - def _AddFileDescriptor(self, file_desc): - """Adds a FileDescriptor to the pool, non-recursively. - - If the FileDescriptor contains messages or enums, the caller must explicitly - register them. - - Args: - file_desc: A FileDescriptor. - """ - - if not isinstance(file_desc, descriptor.FileDescriptor): - raise TypeError('Expected instance of descriptor.FileDescriptor.') - self._file_descriptors[file_desc.name] = file_desc - - def FindFileByName(self, file_name): - """Gets a FileDescriptor by file name. - - Args: - file_name (str): The path to the file to get a descriptor for. - - Returns: - FileDescriptor: The descriptor for the named file. - - Raises: - KeyError: if the file cannot be found in the pool. - """ - - try: - return self._file_descriptors[file_name] - except KeyError: - pass - - try: - file_proto = self._internal_db.FindFileByName(file_name) - except KeyError as error: - if self._descriptor_db: - file_proto = self._descriptor_db.FindFileByName(file_name) - else: - raise error - if not file_proto: - raise KeyError('Cannot find a file named %s' % file_name) - return self._ConvertFileProtoToFileDescriptor(file_proto) - - def FindFileContainingSymbol(self, symbol): - """Gets the FileDescriptor for the file containing the specified symbol. - - Args: - symbol (str): The name of the symbol to search for. - - Returns: - FileDescriptor: Descriptor for the file that contains the specified - symbol. - - Raises: - KeyError: if the file cannot be found in the pool. - """ - - symbol = _NormalizeFullyQualifiedName(symbol) - try: - return self._InternalFindFileContainingSymbol(symbol) - except KeyError: - pass - - try: - # Try fallback database. Build and find again if possible. - self._FindFileContainingSymbolInDb(symbol) - return self._InternalFindFileContainingSymbol(symbol) - except KeyError: - raise KeyError('Cannot find a file containing %s' % symbol) - - def _InternalFindFileContainingSymbol(self, symbol): - """Gets the already built FileDescriptor containing the specified symbol. - - Args: - symbol (str): The name of the symbol to search for. - - Returns: - FileDescriptor: Descriptor for the file that contains the specified - symbol. - - Raises: - KeyError: if the file cannot be found in the pool. - """ - try: - return self._descriptors[symbol].file - except KeyError: - pass - - try: - return self._enum_descriptors[symbol].file - except KeyError: - pass - - try: - return self._service_descriptors[symbol].file - except KeyError: - pass - - try: - return self._top_enum_values[symbol].type.file - except KeyError: - pass - - try: - return self._file_desc_by_toplevel_extension[symbol] - except KeyError: - pass - - # Try fields, enum values and nested extensions inside a message. - top_name, _, sub_name = symbol.rpartition('.') - try: - message = self.FindMessageTypeByName(top_name) - assert (sub_name in message.extensions_by_name or - sub_name in message.fields_by_name or - sub_name in message.enum_values_by_name) - return message.file - except (KeyError, AssertionError): - raise KeyError('Cannot find a file containing %s' % symbol) - - def FindMessageTypeByName(self, full_name): - """Loads the named descriptor from the pool. - - Args: - full_name (str): The full name of the descriptor to load. - - Returns: - Descriptor: The descriptor for the named type. - - Raises: - KeyError: if the message cannot be found in the pool. - """ - - full_name = _NormalizeFullyQualifiedName(full_name) - if full_name not in self._descriptors: - self._FindFileContainingSymbolInDb(full_name) - return self._descriptors[full_name] - - def FindEnumTypeByName(self, full_name): - """Loads the named enum descriptor from the pool. - - Args: - full_name (str): The full name of the enum descriptor to load. - - Returns: - EnumDescriptor: The enum descriptor for the named type. - - Raises: - KeyError: if the enum cannot be found in the pool. - """ - - full_name = _NormalizeFullyQualifiedName(full_name) - if full_name not in self._enum_descriptors: - self._FindFileContainingSymbolInDb(full_name) - return self._enum_descriptors[full_name] - - def FindFieldByName(self, full_name): - """Loads the named field descriptor from the pool. - - Args: - full_name (str): The full name of the field descriptor to load. - - Returns: - FieldDescriptor: The field descriptor for the named field. - - Raises: - KeyError: if the field cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - message_name, _, field_name = full_name.rpartition('.') - message_descriptor = self.FindMessageTypeByName(message_name) - return message_descriptor.fields_by_name[field_name] - - def FindOneofByName(self, full_name): - """Loads the named oneof descriptor from the pool. - - Args: - full_name (str): The full name of the oneof descriptor to load. - - Returns: - OneofDescriptor: The oneof descriptor for the named oneof. - - Raises: - KeyError: if the oneof cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - message_name, _, oneof_name = full_name.rpartition('.') - message_descriptor = self.FindMessageTypeByName(message_name) - return message_descriptor.oneofs_by_name[oneof_name] - - def FindExtensionByName(self, full_name): - """Loads the named extension descriptor from the pool. - - Args: - full_name (str): The full name of the extension descriptor to load. - - Returns: - FieldDescriptor: The field descriptor for the named extension. - - Raises: - KeyError: if the extension cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - try: - # The proto compiler does not give any link between the FileDescriptor - # and top-level extensions unless the FileDescriptorProto is added to - # the DescriptorDatabase, but this can impact memory usage. - # So we registered these extensions by name explicitly. - return self._toplevel_extensions[full_name] - except KeyError: - pass - message_name, _, extension_name = full_name.rpartition('.') - try: - # Most extensions are nested inside a message. - scope = self.FindMessageTypeByName(message_name) - except KeyError: - # Some extensions are defined at file scope. - scope = self._FindFileContainingSymbolInDb(full_name) - return scope.extensions_by_name[extension_name] - - def FindExtensionByNumber(self, message_descriptor, number): - """Gets the extension of the specified message with the specified number. - - Extensions have to be registered to this pool by calling :func:`Add` or - :func:`AddExtensionDescriptor`. - - Args: - message_descriptor (Descriptor): descriptor of the extended message. - number (int): Number of the extension field. - - Returns: - FieldDescriptor: The descriptor for the extension. - - Raises: - KeyError: when no extension with the given number is known for the - specified message. - """ - try: - return self._extensions_by_number[message_descriptor][number] - except KeyError: - self._TryLoadExtensionFromDB(message_descriptor, number) - return self._extensions_by_number[message_descriptor][number] - - def FindAllExtensions(self, message_descriptor): - """Gets all the known extensions of a given message. - - Extensions have to be registered to this pool by build related - :func:`Add` or :func:`AddExtensionDescriptor`. - - Args: - message_descriptor (Descriptor): Descriptor of the extended message. - - Returns: - list[FieldDescriptor]: Field descriptors describing the extensions. - """ - # Fallback to descriptor db if FindAllExtensionNumbers is provided. - if self._descriptor_db and hasattr( - self._descriptor_db, 'FindAllExtensionNumbers'): - full_name = message_descriptor.full_name - all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) - for number in all_numbers: - if number in self._extensions_by_number[message_descriptor]: - continue - self._TryLoadExtensionFromDB(message_descriptor, number) - - return list(self._extensions_by_number[message_descriptor].values()) - - def _TryLoadExtensionFromDB(self, message_descriptor, number): - """Try to Load extensions from descriptor db. - - Args: - message_descriptor: descriptor of the extended message. - number: the extension number that needs to be loaded. - """ - if not self._descriptor_db: - return - # Only supported when FindFileContainingExtension is provided. - if not hasattr( - self._descriptor_db, 'FindFileContainingExtension'): - return - - full_name = message_descriptor.full_name - file_proto = self._descriptor_db.FindFileContainingExtension( - full_name, number) - - if file_proto is None: - return - - try: - self._ConvertFileProtoToFileDescriptor(file_proto) - except: - warn_msg = ('Unable to load proto file %s for extension number %d.' % - (file_proto.name, number)) - warnings.warn(warn_msg, RuntimeWarning) - - def FindServiceByName(self, full_name): - """Loads the named service descriptor from the pool. - - Args: - full_name (str): The full name of the service descriptor to load. - - Returns: - ServiceDescriptor: The service descriptor for the named service. - - Raises: - KeyError: if the service cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - if full_name not in self._service_descriptors: - self._FindFileContainingSymbolInDb(full_name) - return self._service_descriptors[full_name] - - def FindMethodByName(self, full_name): - """Loads the named service method descriptor from the pool. - - Args: - full_name (str): The full name of the method descriptor to load. - - Returns: - MethodDescriptor: The method descriptor for the service method. - - Raises: - KeyError: if the method cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - service_name, _, method_name = full_name.rpartition('.') - service_descriptor = self.FindServiceByName(service_name) - return service_descriptor.methods_by_name[method_name] - - def _FindFileContainingSymbolInDb(self, symbol): - """Finds the file in descriptor DB containing the specified symbol. - - Args: - symbol (str): The name of the symbol to search for. - - Returns: - FileDescriptor: The file that contains the specified symbol. - - Raises: - KeyError: if the file cannot be found in the descriptor database. - """ - try: - file_proto = self._internal_db.FindFileContainingSymbol(symbol) - except KeyError as error: - if self._descriptor_db: - file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) - else: - raise error - if not file_proto: - raise KeyError('Cannot find a file containing %s' % symbol) - return self._ConvertFileProtoToFileDescriptor(file_proto) - - def _ConvertFileProtoToFileDescriptor(self, file_proto): - """Creates a FileDescriptor from a proto or returns a cached copy. - - This method also has the side effect of loading all the symbols found in - the file into the appropriate dictionaries in the pool. - - Args: - file_proto: The proto to convert. - - Returns: - A FileDescriptor matching the passed in proto. - """ - if file_proto.name not in self._file_descriptors: - built_deps = list(self._GetDeps(file_proto.dependency)) - direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] - public_deps = [direct_deps[i] for i in file_proto.public_dependency] - - file_descriptor = descriptor.FileDescriptor( - pool=self, - name=file_proto.name, - package=file_proto.package, - syntax=file_proto.syntax, - options=_OptionsOrNone(file_proto), - serialized_pb=file_proto.SerializeToString(), - dependencies=direct_deps, - public_dependencies=public_deps, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - scope = {} - - # This loop extracts all the message and enum types from all the - # dependencies of the file_proto. This is necessary to create the - # scope of available message types when defining the passed in - # file proto. - for dependency in built_deps: - scope.update(self._ExtractSymbols( - dependency.message_types_by_name.values())) - scope.update((_PrefixWithDot(enum.full_name), enum) - for enum in dependency.enum_types_by_name.values()) - - for message_type in file_proto.message_type: - message_desc = self._ConvertMessageDescriptor( - message_type, file_proto.package, file_descriptor, scope, - file_proto.syntax) - file_descriptor.message_types_by_name[message_desc.name] = ( - message_desc) - - for enum_type in file_proto.enum_type: - file_descriptor.enum_types_by_name[enum_type.name] = ( - self._ConvertEnumDescriptor(enum_type, file_proto.package, - file_descriptor, None, scope, True)) - - for index, extension_proto in enumerate(file_proto.extension): - extension_desc = self._MakeFieldDescriptor( - extension_proto, file_proto.package, index, file_descriptor, - is_extension=True) - extension_desc.containing_type = self._GetTypeFromScope( - file_descriptor.package, extension_proto.extendee, scope) - self._SetFieldType(extension_proto, extension_desc, - file_descriptor.package, scope) - file_descriptor.extensions_by_name[extension_desc.name] = ( - extension_desc) - self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( - file_descriptor) - - for desc_proto in file_proto.message_type: - self._SetAllFieldTypes(file_proto.package, desc_proto, scope) - - if file_proto.package: - desc_proto_prefix = _PrefixWithDot(file_proto.package) - else: - desc_proto_prefix = '' - - for desc_proto in file_proto.message_type: - desc = self._GetTypeFromScope( - desc_proto_prefix, desc_proto.name, scope) - file_descriptor.message_types_by_name[desc_proto.name] = desc - - for index, service_proto in enumerate(file_proto.service): - file_descriptor.services_by_name[service_proto.name] = ( - self._MakeServiceDescriptor(service_proto, index, scope, - file_proto.package, file_descriptor)) - - self._file_descriptors[file_proto.name] = file_descriptor - - # Add extensions to the pool - file_desc = self._file_descriptors[file_proto.name] - for extension in file_desc.extensions_by_name.values(): - self._AddExtensionDescriptor(extension) - for message_type in file_desc.message_types_by_name.values(): - for extension in message_type.extensions: - self._AddExtensionDescriptor(extension) - - return file_desc - - def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, - scope=None, syntax=None): - """Adds the proto to the pool in the specified package. - - Args: - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. - package: The package the proto should be located in. - file_desc: The file containing this message. - scope: Dict mapping short and full symbols to message and enum types. - syntax: string indicating syntax of the file ("proto2" or "proto3") - - Returns: - The added descriptor. - """ - - if package: - desc_name = '.'.join((package, desc_proto.name)) - else: - desc_name = desc_proto.name - - if file_desc is None: - file_name = None - else: - file_name = file_desc.name - - if scope is None: - scope = {} - - nested = [ - self._ConvertMessageDescriptor( - nested, desc_name, file_desc, scope, syntax) - for nested in desc_proto.nested_type] - enums = [ - self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, - scope, False) - for enum in desc_proto.enum_type] - fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) - for index, field in enumerate(desc_proto.field)] - extensions = [ - self._MakeFieldDescriptor(extension, desc_name, index, file_desc, - is_extension=True) - for index, extension in enumerate(desc_proto.extension)] - oneofs = [ - # pylint: disable=g-complex-comprehension - descriptor.OneofDescriptor( - desc.name, - '.'.join((desc_name, desc.name)), - index, - None, - [], - _OptionsOrNone(desc), - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - for index, desc in enumerate(desc_proto.oneof_decl) - ] - extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] - if extension_ranges: - is_extendable = True - else: - is_extendable = False - desc = descriptor.Descriptor( - name=desc_proto.name, - full_name=desc_name, - filename=file_name, - containing_type=None, - fields=fields, - oneofs=oneofs, - nested_types=nested, - enum_types=enums, - extensions=extensions, - options=_OptionsOrNone(desc_proto), - is_extendable=is_extendable, - extension_ranges=extension_ranges, - file=file_desc, - serialized_start=None, - serialized_end=None, - syntax=syntax, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - for nested in desc.nested_types: - nested.containing_type = desc - for enum in desc.enum_types: - enum.containing_type = desc - for field_index, field_desc in enumerate(desc_proto.field): - if field_desc.HasField('oneof_index'): - oneof_index = field_desc.oneof_index - oneofs[oneof_index].fields.append(fields[field_index]) - fields[field_index].containing_oneof = oneofs[oneof_index] - - scope[_PrefixWithDot(desc_name)] = desc - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - self._descriptors[desc_name] = desc - return desc - - def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, - containing_type=None, scope=None, top_level=False): - """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. - - Args: - enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. - package: Optional package name for the new message EnumDescriptor. - file_desc: The file containing the enum descriptor. - containing_type: The type containing this enum. - scope: Scope containing available types. - top_level: If True, the enum is a top level symbol. If False, the enum - is defined inside a message. - - Returns: - The added descriptor - """ - - if package: - enum_name = '.'.join((package, enum_proto.name)) - else: - enum_name = enum_proto.name - - if file_desc is None: - file_name = None - else: - file_name = file_desc.name - - values = [self._MakeEnumValueDescriptor(value, index) - for index, value in enumerate(enum_proto.value)] - desc = descriptor.EnumDescriptor(name=enum_proto.name, - full_name=enum_name, - filename=file_name, - file=file_desc, - values=values, - containing_type=containing_type, - options=_OptionsOrNone(enum_proto), - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - scope['.%s' % enum_name] = desc - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - self._enum_descriptors[enum_name] = desc - - # Add top level enum values. - if top_level: - for value in values: - full_name = _NormalizeFullyQualifiedName( - '.'.join((package, value.name))) - self._CheckConflictRegister(value, full_name, file_name) - self._top_enum_values[full_name] = value - - return desc - - def _MakeFieldDescriptor(self, field_proto, message_name, index, - file_desc, is_extension=False): - """Creates a field descriptor from a FieldDescriptorProto. - - For message and enum type fields, this method will do a look up - in the pool for the appropriate descriptor for that type. If it - is unavailable, it will fall back to the _source function to - create it. If this type is still unavailable, construction will - fail. - - Args: - field_proto: The proto describing the field. - message_name: The name of the containing message. - index: Index of the field - file_desc: The file containing the field descriptor. - is_extension: Indication that this field is for an extension. - - Returns: - An initialized FieldDescriptor object - """ - - if message_name: - full_name = '.'.join((message_name, field_proto.name)) - else: - full_name = field_proto.name - - if field_proto.json_name: - json_name = field_proto.json_name - else: - json_name = None - - return descriptor.FieldDescriptor( - name=field_proto.name, - full_name=full_name, - index=index, - number=field_proto.number, - type=field_proto.type, - cpp_type=None, - message_type=None, - enum_type=None, - containing_type=None, - label=field_proto.label, - has_default_value=False, - default_value=None, - is_extension=is_extension, - extension_scope=None, - options=_OptionsOrNone(field_proto), - json_name=json_name, - file=file_desc, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - - def _SetAllFieldTypes(self, package, desc_proto, scope): - """Sets all the descriptor's fields's types. - - This method also sets the containing types on any extensions. - - Args: - package: The current package of desc_proto. - desc_proto: The message descriptor to update. - scope: Enclosing scope of available types. - """ - - package = _PrefixWithDot(package) - - main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) - - if package == '.': - nested_package = _PrefixWithDot(desc_proto.name) - else: - nested_package = '.'.join([package, desc_proto.name]) - - for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): - self._SetFieldType(field_proto, field_desc, nested_package, scope) - - for extension_proto, extension_desc in ( - zip(desc_proto.extension, main_desc.extensions)): - extension_desc.containing_type = self._GetTypeFromScope( - nested_package, extension_proto.extendee, scope) - self._SetFieldType(extension_proto, extension_desc, nested_package, scope) - - for nested_type in desc_proto.nested_type: - self._SetAllFieldTypes(nested_package, nested_type, scope) - - def _SetFieldType(self, field_proto, field_desc, package, scope): - """Sets the field's type, cpp_type, message_type and enum_type. - - Args: - field_proto: Data about the field in proto format. - field_desc: The descriptor to modify. - package: The package the field's container is in. - scope: Enclosing scope of available types. - """ - if field_proto.type_name: - desc = self._GetTypeFromScope(package, field_proto.type_name, scope) - else: - desc = None - - if not field_proto.HasField('type'): - if isinstance(desc, descriptor.Descriptor): - field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE - else: - field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM - - field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( - field_proto.type) - - if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE - or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): - field_desc.message_type = desc - - if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: - field_desc.enum_type = desc - - if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: - field_desc.has_default_value = False - field_desc.default_value = [] - elif field_proto.HasField('default_value'): - field_desc.has_default_value = True - if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or - field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): - field_desc.default_value = float(field_proto.default_value) - elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: - field_desc.default_value = field_proto.default_value - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: - field_desc.default_value = field_proto.default_value.lower() == 'true' - elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: - field_desc.default_value = field_desc.enum_type.values_by_name[ - field_proto.default_value].number - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: - field_desc.default_value = text_encoding.CUnescape( - field_proto.default_value) - elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: - field_desc.default_value = None - else: - # All other types are of the "int" type. - field_desc.default_value = int(field_proto.default_value) - else: - field_desc.has_default_value = False - if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or - field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): - field_desc.default_value = 0.0 - elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: - field_desc.default_value = u'' - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: - field_desc.default_value = False - elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: - field_desc.default_value = field_desc.enum_type.values[0].number - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: - field_desc.default_value = b'' - elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: - field_desc.default_value = None - elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: - field_desc.default_value = None - else: - # All other types are of the "int" type. - field_desc.default_value = 0 - - field_desc.type = field_proto.type - - def _MakeEnumValueDescriptor(self, value_proto, index): - """Creates a enum value descriptor object from a enum value proto. - - Args: - value_proto: The proto describing the enum value. - index: The index of the enum value. - - Returns: - An initialized EnumValueDescriptor object. - """ - - return descriptor.EnumValueDescriptor( - name=value_proto.name, - index=index, - number=value_proto.number, - options=_OptionsOrNone(value_proto), - type=None, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - - def _MakeServiceDescriptor(self, service_proto, service_index, scope, - package, file_desc): - """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. - - Args: - service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. - service_index: The index of the service in the File. - scope: Dict mapping short and full symbols to message and enum types. - package: Optional package name for the new message EnumDescriptor. - file_desc: The file containing the service descriptor. - - Returns: - The added descriptor. - """ - - if package: - service_name = '.'.join((package, service_proto.name)) - else: - service_name = service_proto.name - - methods = [self._MakeMethodDescriptor(method_proto, service_name, package, - scope, index) - for index, method_proto in enumerate(service_proto.method)] - desc = descriptor.ServiceDescriptor( - name=service_proto.name, - full_name=service_name, - index=service_index, - methods=methods, - options=_OptionsOrNone(service_proto), - file=file_desc, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - self._service_descriptors[service_name] = desc - return desc - - def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, - index): - """Creates a method descriptor from a MethodDescriptorProto. - - Args: - method_proto: The proto describing the method. - service_name: The name of the containing service. - package: Optional package name to look up for types. - scope: Scope containing available types. - index: Index of the method in the service. - - Returns: - An initialized MethodDescriptor object. - """ - full_name = '.'.join((service_name, method_proto.name)) - input_type = self._GetTypeFromScope( - package, method_proto.input_type, scope) - output_type = self._GetTypeFromScope( - package, method_proto.output_type, scope) - return descriptor.MethodDescriptor( - name=method_proto.name, - full_name=full_name, - index=index, - containing_service=None, - input_type=input_type, - output_type=output_type, - client_streaming=method_proto.client_streaming, - server_streaming=method_proto.server_streaming, - options=_OptionsOrNone(method_proto), - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - - def _ExtractSymbols(self, descriptors): - """Pulls out all the symbols from descriptor protos. - - Args: - descriptors: The messages to extract descriptors from. - Yields: - A two element tuple of the type name and descriptor object. - """ - - for desc in descriptors: - yield (_PrefixWithDot(desc.full_name), desc) - for symbol in self._ExtractSymbols(desc.nested_types): - yield symbol - for enum in desc.enum_types: - yield (_PrefixWithDot(enum.full_name), enum) - - def _GetDeps(self, dependencies, visited=None): - """Recursively finds dependencies for file protos. - - Args: - dependencies: The names of the files being depended on. - visited: The names of files already found. - - Yields: - Each direct and indirect dependency. - """ - - visited = visited or set() - for dependency in dependencies: - if dependency not in visited: - visited.add(dependency) - dep_desc = self.FindFileByName(dependency) - yield dep_desc - public_files = [d.name for d in dep_desc.public_dependencies] - yield from self._GetDeps(public_files, visited) - - def _GetTypeFromScope(self, package, type_name, scope): - """Finds a given type name in the current scope. - - Args: - package: The package the proto should be located in. - type_name: The name of the type to be found in the scope. - scope: Dict mapping short and full symbols to message and enum types. - - Returns: - The descriptor for the requested type. - """ - if type_name not in scope: - components = _PrefixWithDot(package).split('.') - while components: - possible_match = '.'.join(components + [type_name]) - if possible_match in scope: - type_name = possible_match - break - else: - components.pop(-1) - return scope[type_name] - - -def _PrefixWithDot(name): - return name if name.startswith('.') else '.%s' % name - - -if _USE_C_DESCRIPTORS: - # TODO(amauryfa): This pool could be constructed from Python code, when we - # support a flag like 'use_cpp_generated_pool=True'. - # pylint: disable=protected-access - _DEFAULT = descriptor._message.default_pool -else: - _DEFAULT = DescriptorPool() - - -def Default(): - return _DEFAULT diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/duration_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/duration_pb2.py deleted file mode 100644 index a8ecc07bdf..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/duration_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/duration.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _DURATION._serialized_start=51 - _DURATION._serialized_end=93 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/empty_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/empty_pb2.py deleted file mode 100644 index 0b4d554db3..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/empty_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/empty.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _EMPTY._serialized_start=48 - _EMPTY._serialized_end=55 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/field_mask_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/field_mask_pb2.py deleted file mode 100644 index 80a4e96e59..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/field_mask_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/field_mask.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _FIELDMASK._serialized_start=53 - _FIELDMASK._serialized_end=79 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/_parameterized.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/_parameterized.py deleted file mode 100644 index afdbb78c36..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/_parameterized.py +++ /dev/null @@ -1,443 +0,0 @@ -#! /usr/bin/env python -# -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Adds support for parameterized tests to Python's unittest TestCase class. - -A parameterized test is a method in a test case that is invoked with different -argument tuples. - -A simple example: - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - (1, 2, 3), - (4, 5, 9), - (1, 1, 3)) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) - - -Each invocation is a separate test case and properly isolated just -like a normal test method, with its own setUp/tearDown cycle. In the -example above, there are three separate testcases, one of which will -fail due to an assertion error (1 + 1 != 3). - -Parameters for individual test cases can be tuples (with positional parameters) -or dictionaries (with named parameters): - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - {'op1': 1, 'op2': 2, 'result': 3}, - {'op1': 4, 'op2': 5, 'result': 9}, - ) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) - -If a parameterized test fails, the error message will show the -original test name (which is modified internally) and the arguments -for the specific invocation, which are part of the string returned by -the shortDescription() method on test cases. - -The id method of the test, used internally by the unittest framework, -is also modified to show the arguments. To make sure that test names -stay the same across several invocations, object representations like - - >>> class Foo(object): - ... pass - >>> repr(Foo()) - '<__main__.Foo object at 0x23d8610>' - -are turned into '<__main__.Foo>'. For even more descriptive names, -especially in test logs, you can use the named_parameters decorator. In -this case, only tuples are supported, and the first parameters has to -be a string (or an object that returns an apt name when converted via -str()): - - class NamedExample(parameterized.TestCase): - @parameterized.named_parameters( - ('Normal', 'aa', 'aaa', True), - ('EmptyPrefix', '', 'abc', True), - ('BothEmpty', '', '', True)) - def testStartsWith(self, prefix, string, result): - self.assertEqual(result, strings.startswith(prefix)) - -Named tests also have the benefit that they can be run individually -from the command line: - - $ testmodule.py NamedExample.testStartsWithNormal - . - -------------------------------------------------------------------- - Ran 1 test in 0.000s - - OK - -Parameterized Classes -===================== -If invocation arguments are shared across test methods in a single -TestCase class, instead of decorating all test methods -individually, the class itself can be decorated: - - @parameterized.parameters( - (1, 2, 3) - (4, 5, 9)) - class ArithmeticTest(parameterized.TestCase): - def testAdd(self, arg1, arg2, result): - self.assertEqual(arg1 + arg2, result) - - def testSubtract(self, arg2, arg2, result): - self.assertEqual(result - arg1, arg2) - -Inputs from Iterables -===================== -If parameters should be shared across several test cases, or are dynamically -created from other sources, a single non-tuple iterable can be passed into -the decorator. This iterable will be used to obtain the test cases: - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - c.op1, c.op2, c.result for c in testcases - ) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) - - -Single-Argument Test Methods -============================ -If a test method takes only one argument, the single argument does not need to -be wrapped into a tuple: - - class NegativeNumberExample(parameterized.TestCase): - @parameterized.parameters( - -1, -3, -4, -5 - ) - def testIsNegative(self, arg): - self.assertTrue(IsNegative(arg)) -""" - -__author__ = 'tmarek@google.com (Torsten Marek)' - -import functools -import re -import types -import unittest -import uuid - -try: - # Since python 3 - import collections.abc as collections_abc -except ImportError: - # Won't work after python 3.8 - import collections as collections_abc - -ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') -_SEPARATOR = uuid.uuid1().hex -_FIRST_ARG = object() -_ARGUMENT_REPR = object() - - -def _CleanRepr(obj): - return ADDR_RE.sub(r'<\1>', repr(obj)) - - -# Helper function formerly from the unittest module, removed from it in -# Python 2.7. -def _StrClass(cls): - return '%s.%s' % (cls.__module__, cls.__name__) - - -def _NonStringIterable(obj): - return (isinstance(obj, collections_abc.Iterable) and - not isinstance(obj, str)) - - -def _FormatParameterList(testcase_params): - if isinstance(testcase_params, collections_abc.Mapping): - return ', '.join('%s=%s' % (argname, _CleanRepr(value)) - for argname, value in testcase_params.items()) - elif _NonStringIterable(testcase_params): - return ', '.join(map(_CleanRepr, testcase_params)) - else: - return _FormatParameterList((testcase_params,)) - - -class _ParameterizedTestIter(object): - """Callable and iterable class for producing new test cases.""" - - def __init__(self, test_method, testcases, naming_type): - """Returns concrete test functions for a test and a list of parameters. - - The naming_type is used to determine the name of the concrete - functions as reported by the unittest framework. If naming_type is - _FIRST_ARG, the testcases must be tuples, and the first element must - have a string representation that is a valid Python identifier. - - Args: - test_method: The decorated test method. - testcases: (list of tuple/dict) A list of parameter - tuples/dicts for individual test invocations. - naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. - """ - self._test_method = test_method - self.testcases = testcases - self._naming_type = naming_type - - def __call__(self, *args, **kwargs): - raise RuntimeError('You appear to be running a parameterized test case ' - 'without having inherited from parameterized.' - 'TestCase. This is bad because none of ' - 'your test cases are actually being run.') - - def __iter__(self): - test_method = self._test_method - naming_type = self._naming_type - - def MakeBoundParamTest(testcase_params): - @functools.wraps(test_method) - def BoundParamTest(self): - if isinstance(testcase_params, collections_abc.Mapping): - test_method(self, **testcase_params) - elif _NonStringIterable(testcase_params): - test_method(self, *testcase_params) - else: - test_method(self, testcase_params) - - if naming_type is _FIRST_ARG: - # Signal the metaclass that the name of the test function is unique - # and descriptive. - BoundParamTest.__x_use_name__ = True - BoundParamTest.__name__ += str(testcase_params[0]) - testcase_params = testcase_params[1:] - elif naming_type is _ARGUMENT_REPR: - # __x_extra_id__ is used to pass naming information to the __new__ - # method of TestGeneratorMetaclass. - # The metaclass will make sure to create a unique, but nondescriptive - # name for this test. - BoundParamTest.__x_extra_id__ = '(%s)' % ( - _FormatParameterList(testcase_params),) - else: - raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) - - BoundParamTest.__doc__ = '%s(%s)' % ( - BoundParamTest.__name__, _FormatParameterList(testcase_params)) - if test_method.__doc__: - BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) - return BoundParamTest - return (MakeBoundParamTest(c) for c in self.testcases) - - -def _IsSingletonList(testcases): - """True iff testcases contains only a single non-tuple element.""" - return len(testcases) == 1 and not isinstance(testcases[0], tuple) - - -def _ModifyClass(class_object, testcases, naming_type): - assert not getattr(class_object, '_id_suffix', None), ( - 'Cannot add parameters to %s,' - ' which already has parameterized methods.' % (class_object,)) - class_object._id_suffix = id_suffix = {} - # We change the size of __dict__ while we iterate over it, - # which Python 3.x will complain about, so use copy(). - for name, obj in class_object.__dict__.copy().items(): - if (name.startswith(unittest.TestLoader.testMethodPrefix) - and isinstance(obj, types.FunctionType)): - delattr(class_object, name) - methods = {} - _UpdateClassDictForParamTestCase( - methods, id_suffix, name, - _ParameterizedTestIter(obj, testcases, naming_type)) - for name, meth in methods.items(): - setattr(class_object, name, meth) - - -def _ParameterDecorator(naming_type, testcases): - """Implementation of the parameterization decorators. - - Args: - naming_type: The naming type. - testcases: Testcase parameters. - - Returns: - A function for modifying the decorated object. - """ - def _Apply(obj): - if isinstance(obj, type): - _ModifyClass( - obj, - list(testcases) if not isinstance(testcases, collections_abc.Sequence) - else testcases, - naming_type) - return obj - else: - return _ParameterizedTestIter(obj, testcases, naming_type) - - if _IsSingletonList(testcases): - assert _NonStringIterable(testcases[0]), ( - 'Single parameter argument must be a non-string iterable') - testcases = testcases[0] - - return _Apply - - -def parameters(*testcases): # pylint: disable=invalid-name - """A decorator for creating parameterized tests. - - See the module docstring for a usage example. - Args: - *testcases: Parameters for the decorated method, either a single - iterable, or a list of tuples/dicts/objects (for tests - with only one argument). - - Returns: - A test generator to be handled by TestGeneratorMetaclass. - """ - return _ParameterDecorator(_ARGUMENT_REPR, testcases) - - -def named_parameters(*testcases): # pylint: disable=invalid-name - """A decorator for creating parameterized tests. - - See the module docstring for a usage example. The first element of - each parameter tuple should be a string and will be appended to the - name of the test method. - - Args: - *testcases: Parameters for the decorated method, either a single - iterable, or a list of tuples. - - Returns: - A test generator to be handled by TestGeneratorMetaclass. - """ - return _ParameterDecorator(_FIRST_ARG, testcases) - - -class TestGeneratorMetaclass(type): - """Metaclass for test cases with test generators. - - A test generator is an iterable in a testcase that produces callables. These - callables must be single-argument methods. These methods are injected into - the class namespace and the original iterable is removed. If the name of the - iterable conforms to the test pattern, the injected methods will be picked - up as tests by the unittest framework. - - In general, it is supposed to be used in conjunction with the - parameters decorator. - """ - - def __new__(mcs, class_name, bases, dct): - dct['_id_suffix'] = id_suffix = {} - for name, obj in dct.copy().items(): - if (name.startswith(unittest.TestLoader.testMethodPrefix) and - _NonStringIterable(obj)): - iterator = iter(obj) - dct.pop(name) - _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) - - return type.__new__(mcs, class_name, bases, dct) - - -def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): - """Adds individual test cases to a dictionary. - - Args: - dct: The target dictionary. - id_suffix: The dictionary for mapping names to test IDs. - name: The original name of the test case. - iterator: The iterator generating the individual test cases. - """ - for idx, func in enumerate(iterator): - assert callable(func), 'Test generators must yield callables, got %r' % ( - func,) - if getattr(func, '__x_use_name__', False): - new_name = func.__name__ - else: - new_name = '%s%s%d' % (name, _SEPARATOR, idx) - assert new_name not in dct, ( - 'Name of parameterized test case "%s" not unique' % (new_name,)) - dct[new_name] = func - id_suffix[new_name] = getattr(func, '__x_extra_id__', '') - - -class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): - """Base class for test cases using the parameters decorator.""" - - def _OriginalName(self): - return self._testMethodName.split(_SEPARATOR)[0] - - def __str__(self): - return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) - - def id(self): # pylint: disable=invalid-name - """Returns the descriptive ID of the test. - - This is used internally by the unittesting framework to get a name - for the test to be used in reports. - - Returns: - The test id. - """ - return '%s.%s%s' % (_StrClass(self.__class__), - self._OriginalName(), - self._id_suffix.get(self._testMethodName, '')) - - -def CoopTestCase(other_base_class): - """Returns a new base class with a cooperative metaclass base. - - This enables the TestCase to be used in combination - with other base classes that have custom metaclasses, such as - mox.MoxTestBase. - - Only works with metaclasses that do not override type.__new__. - - Example: - - import google3 - import mox - - from google3.testing.pybase import parameterized - - class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): - ... - - Args: - other_base_class: (class) A test case base class. - - Returns: - A new class object. - """ - metaclass = type( - 'CoopMetaclass', - (other_base_class.__metaclass__, - TestGeneratorMetaclass), {}) - return metaclass( - 'CoopTestCase', - (other_base_class, TestCase), {}) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/api_implementation.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/api_implementation.py deleted file mode 100644 index 7fef237670..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/api_implementation.py +++ /dev/null @@ -1,112 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Determine which implementation of the protobuf API is used in this process. -""" - -import os -import sys -import warnings - -try: - # pylint: disable=g-import-not-at-top - from google.protobuf.internal import _api_implementation - # The compile-time constants in the _api_implementation module can be used to - # switch to a certain implementation of the Python API at build time. - _api_version = _api_implementation.api_version -except ImportError: - _api_version = -1 # Unspecified by compiler flags. - -if _api_version == 1: - raise ValueError('api_version=1 is no longer supported.') - - -_default_implementation_type = ('cpp' if _api_version > 0 else 'python') - - -# This environment variable can be used to switch to a certain implementation -# of the Python API, overriding the compile-time constants in the -# _api_implementation module. Right now only 'python' and 'cpp' are valid -# values. Any other value will be ignored. -_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', - _default_implementation_type) - -if _implementation_type != 'python': - _implementation_type = 'cpp' - -if 'PyPy' in sys.version and _implementation_type == 'cpp': - warnings.warn('PyPy does not work yet with cpp protocol buffers. ' - 'Falling back to the python implementation.') - _implementation_type = 'python' - - -# Detect if serialization should be deterministic by default -try: - # The presence of this module in a build allows the proto implementation to - # be upgraded merely via build deps. - # - # NOTE: Merely importing this automatically enables deterministic proto - # serialization for C++ code, but we still need to export it as a boolean so - # that we can do the same for `_implementation_type == 'python'`. - # - # NOTE2: It is possible for C++ code to enable deterministic serialization by - # default _without_ affecting Python code, if the C++ implementation is not in - # use by this module. That is intended behavior, so we don't actually expose - # this boolean outside of this module. - # - # pylint: disable=g-import-not-at-top,unused-import - from google.protobuf import enable_deterministic_proto_serialization - _python_deterministic_proto_serialization = True -except ImportError: - _python_deterministic_proto_serialization = False - - -# Usage of this function is discouraged. Clients shouldn't care which -# implementation of the API is in use. Note that there is no guarantee -# that differences between APIs will be maintained. -# Please don't use this function if possible. -def Type(): - return _implementation_type - - -def _SetType(implementation_type): - """Never use! Only for protobuf benchmark.""" - global _implementation_type - _implementation_type = implementation_type - - -# See comment on 'Type' above. -def Version(): - return 2 - - -# For internal use only -def IsPythonDefaultSerializationDeterministic(): - return _python_deterministic_proto_serialization diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/builder.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/builder.py deleted file mode 100644 index 64353ee4af..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/builder.py +++ /dev/null @@ -1,130 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Builds descriptors, message classes and services for generated _pb2.py. - -This file is only called in python generated _pb2.py files. It builds -descriptors, message classes and services that users can directly use -in generated code. -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -_sym_db = _symbol_database.Default() - - -def BuildMessageAndEnumDescriptors(file_des, module): - """Builds message and enum descriptors. - - Args: - file_des: FileDescriptor of the .proto file - module: Generated _pb2 module - """ - - def BuildNestedDescriptors(msg_des, prefix): - for (name, nested_msg) in msg_des.nested_types_by_name.items(): - module_name = prefix + name.upper() - module[module_name] = nested_msg - BuildNestedDescriptors(nested_msg, module_name + '_') - for enum_des in msg_des.enum_types: - module[prefix + enum_des.name.upper()] = enum_des - - for (name, msg_des) in file_des.message_types_by_name.items(): - module_name = '_' + name.upper() - module[module_name] = msg_des - BuildNestedDescriptors(msg_des, module_name + '_') - - -def BuildTopDescriptorsAndMessages(file_des, module_name, module): - """Builds top level descriptors and message classes. - - Args: - file_des: FileDescriptor of the .proto file - module_name: str, the name of generated _pb2 module - module: Generated _pb2 module - """ - - def BuildMessage(msg_des): - create_dict = {} - for (name, nested_msg) in msg_des.nested_types_by_name.items(): - create_dict[name] = BuildMessage(nested_msg) - create_dict['DESCRIPTOR'] = msg_des - create_dict['__module__'] = module_name - message_class = _reflection.GeneratedProtocolMessageType( - msg_des.name, (_message.Message,), create_dict) - _sym_db.RegisterMessage(message_class) - return message_class - - # top level enums - for (name, enum_des) in file_des.enum_types_by_name.items(): - module['_' + name.upper()] = enum_des - module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) - for enum_value in enum_des.values: - module[enum_value.name] = enum_value.number - - # top level extensions - for (name, extension_des) in file_des.extensions_by_name.items(): - module[name.upper() + '_FIELD_NUMBER'] = extension_des.number - module[name] = extension_des - - # services - for (name, service) in file_des.services_by_name.items(): - module['_' + name.upper()] = service - - # Build messages. - for (name, msg_des) in file_des.message_types_by_name.items(): - module[name] = BuildMessage(msg_des) - - -def BuildServices(file_des, module_name, module): - """Builds services classes and services stub class. - - Args: - file_des: FileDescriptor of the .proto file - module_name: str, the name of generated _pb2 module - module: Generated _pb2 module - """ - # pylint: disable=g-import-not-at-top - from google.protobuf import service as _service - from google.protobuf import service_reflection - # pylint: enable=g-import-not-at-top - for (name, service) in file_des.services_by_name.items(): - module[name] = service_reflection.GeneratedServiceType( - name, (_service.Service,), - dict(DESCRIPTOR=service, __module__=module_name)) - stub_name = name + '_Stub' - module[stub_name] = service_reflection.GeneratedServiceStubType( - stub_name, (module[name],), - dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/containers.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/containers.py deleted file mode 100644 index 29fbb53d2f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/containers.py +++ /dev/null @@ -1,710 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains container classes to represent different protocol buffer types. - -This file defines container classes which represent categories of protocol -buffer field types which need extra maintenance. Currently these categories -are: - -- Repeated scalar fields - These are all repeated fields which aren't - composite (e.g. they are of simple types like int32, string, etc). -- Repeated composite fields - Repeated fields which are composite. This - includes groups and nested messages. -""" - -import collections.abc -import copy -import pickle -from typing import ( - Any, - Iterable, - Iterator, - List, - MutableMapping, - MutableSequence, - NoReturn, - Optional, - Sequence, - TypeVar, - Union, - overload, -) - - -_T = TypeVar('_T') -_K = TypeVar('_K') -_V = TypeVar('_V') - - -class BaseContainer(Sequence[_T]): - """Base container class.""" - - # Minimizes memory usage and disallows assignment to other attributes. - __slots__ = ['_message_listener', '_values'] - - def __init__(self, message_listener: Any) -> None: - """ - Args: - message_listener: A MessageListener implementation. - The RepeatedScalarFieldContainer will call this object's - Modified() method when it is modified. - """ - self._message_listener = message_listener - self._values = [] - - @overload - def __getitem__(self, key: int) -> _T: - ... - - @overload - def __getitem__(self, key: slice) -> List[_T]: - ... - - def __getitem__(self, key): - """Retrieves item by the specified key.""" - return self._values[key] - - def __len__(self) -> int: - """Returns the number of elements in the container.""" - return len(self._values) - - def __ne__(self, other: Any) -> bool: - """Checks if another instance isn't equal to this one.""" - # The concrete classes should define __eq__. - return not self == other - - __hash__ = None - - def __repr__(self) -> str: - return repr(self._values) - - def sort(self, *args, **kwargs) -> None: - # Continue to support the old sort_function keyword argument. - # This is expected to be a rare occurrence, so use LBYL to avoid - # the overhead of actually catching KeyError. - if 'sort_function' in kwargs: - kwargs['cmp'] = kwargs.pop('sort_function') - self._values.sort(*args, **kwargs) - - def reverse(self) -> None: - self._values.reverse() - - -# TODO(slebedev): Remove this. BaseContainer does *not* conform to -# MutableSequence, only its subclasses do. -collections.abc.MutableSequence.register(BaseContainer) - - -class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): - """Simple, type-checked, list-like container for holding repeated scalars.""" - - # Disallows assignment to other attributes. - __slots__ = ['_type_checker'] - - def __init__( - self, - message_listener: Any, - type_checker: Any, - ) -> None: - """Args: - - message_listener: A MessageListener implementation. The - RepeatedScalarFieldContainer will call this object's Modified() method - when it is modified. - type_checker: A type_checkers.ValueChecker instance to run on elements - inserted into this container. - """ - super().__init__(message_listener) - self._type_checker = type_checker - - def append(self, value: _T) -> None: - """Appends an item to the list. Similar to list.append().""" - self._values.append(self._type_checker.CheckValue(value)) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def insert(self, key: int, value: _T) -> None: - """Inserts the item at the specified position. Similar to list.insert().""" - self._values.insert(key, self._type_checker.CheckValue(value)) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def extend(self, elem_seq: Iterable[_T]) -> None: - """Extends by appending the given iterable. Similar to list.extend().""" - if elem_seq is None: - return - try: - elem_seq_iter = iter(elem_seq) - except TypeError: - if not elem_seq: - # silently ignore falsy inputs :-/. - # TODO(ptucker): Deprecate this behavior. b/18413862 - return - raise - - new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] - if new_values: - self._values.extend(new_values) - self._message_listener.Modified() - - def MergeFrom( - self, - other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], - ) -> None: - """Appends the contents of another repeated field of the same type to this - one. We do not check the types of the individual fields. - """ - self._values.extend(other) - self._message_listener.Modified() - - def remove(self, elem: _T): - """Removes an item from the list. Similar to list.remove().""" - self._values.remove(elem) - self._message_listener.Modified() - - def pop(self, key: Optional[int] = -1) -> _T: - """Removes and returns an item at a given index. Similar to list.pop().""" - value = self._values[key] - self.__delitem__(key) - return value - - @overload - def __setitem__(self, key: int, value: _T) -> None: - ... - - @overload - def __setitem__(self, key: slice, value: Iterable[_T]) -> None: - ... - - def __setitem__(self, key, value) -> None: - """Sets the item on the specified position.""" - if isinstance(key, slice): - if key.step is not None: - raise ValueError('Extended slices not supported') - self._values[key] = map(self._type_checker.CheckValue, value) - self._message_listener.Modified() - else: - self._values[key] = self._type_checker.CheckValue(value) - self._message_listener.Modified() - - def __delitem__(self, key: Union[int, slice]) -> None: - """Deletes the item at the specified position.""" - del self._values[key] - self._message_listener.Modified() - - def __eq__(self, other: Any) -> bool: - """Compares the current instance with another one.""" - if self is other: - return True - # Special case for the same type which should be common and fast. - if isinstance(other, self.__class__): - return other._values == self._values - # We are presumably comparing against some other sequence type. - return other == self._values - - def __deepcopy__( - self, - unused_memo: Any = None, - ) -> 'RepeatedScalarFieldContainer[_T]': - clone = RepeatedScalarFieldContainer( - copy.deepcopy(self._message_listener), self._type_checker) - clone.MergeFrom(self) - return clone - - def __reduce__(self, **kwargs) -> NoReturn: - raise pickle.PickleError( - "Can't pickle repeated scalar fields, convert to list first") - - -# TODO(slebedev): Constrain T to be a subtype of Message. -class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): - """Simple, list-like container for holding repeated composite fields.""" - - # Disallows assignment to other attributes. - __slots__ = ['_message_descriptor'] - - def __init__(self, message_listener: Any, message_descriptor: Any) -> None: - """ - Note that we pass in a descriptor instead of the generated directly, - since at the time we construct a _RepeatedCompositeFieldContainer we - haven't yet necessarily initialized the type that will be contained in the - container. - - Args: - message_listener: A MessageListener implementation. - The RepeatedCompositeFieldContainer will call this object's - Modified() method when it is modified. - message_descriptor: A Descriptor instance describing the protocol type - that should be present in this container. We'll use the - _concrete_class field of this descriptor when the client calls add(). - """ - super().__init__(message_listener) - self._message_descriptor = message_descriptor - - def add(self, **kwargs: Any) -> _T: - """Adds a new element at the end of the list and returns it. Keyword - arguments may be used to initialize the element. - """ - new_element = self._message_descriptor._concrete_class(**kwargs) - new_element._SetListener(self._message_listener) - self._values.append(new_element) - if not self._message_listener.dirty: - self._message_listener.Modified() - return new_element - - def append(self, value: _T) -> None: - """Appends one element by copying the message.""" - new_element = self._message_descriptor._concrete_class() - new_element._SetListener(self._message_listener) - new_element.CopyFrom(value) - self._values.append(new_element) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def insert(self, key: int, value: _T) -> None: - """Inserts the item at the specified position by copying.""" - new_element = self._message_descriptor._concrete_class() - new_element._SetListener(self._message_listener) - new_element.CopyFrom(value) - self._values.insert(key, new_element) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def extend(self, elem_seq: Iterable[_T]) -> None: - """Extends by appending the given sequence of elements of the same type - - as this one, copying each individual message. - """ - message_class = self._message_descriptor._concrete_class - listener = self._message_listener - values = self._values - for message in elem_seq: - new_element = message_class() - new_element._SetListener(listener) - new_element.MergeFrom(message) - values.append(new_element) - listener.Modified() - - def MergeFrom( - self, - other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], - ) -> None: - """Appends the contents of another repeated field of the same type to this - one, copying each individual message. - """ - self.extend(other) - - def remove(self, elem: _T) -> None: - """Removes an item from the list. Similar to list.remove().""" - self._values.remove(elem) - self._message_listener.Modified() - - def pop(self, key: Optional[int] = -1) -> _T: - """Removes and returns an item at a given index. Similar to list.pop().""" - value = self._values[key] - self.__delitem__(key) - return value - - @overload - def __setitem__(self, key: int, value: _T) -> None: - ... - - @overload - def __setitem__(self, key: slice, value: Iterable[_T]) -> None: - ... - - def __setitem__(self, key, value): - # This method is implemented to make RepeatedCompositeFieldContainer - # structurally compatible with typing.MutableSequence. It is - # otherwise unsupported and will always raise an error. - raise TypeError( - f'{self.__class__.__name__} object does not support item assignment') - - def __delitem__(self, key: Union[int, slice]) -> None: - """Deletes the item at the specified position.""" - del self._values[key] - self._message_listener.Modified() - - def __eq__(self, other: Any) -> bool: - """Compares the current instance with another one.""" - if self is other: - return True - if not isinstance(other, self.__class__): - raise TypeError('Can only compare repeated composite fields against ' - 'other repeated composite fields.') - return self._values == other._values - - -class ScalarMap(MutableMapping[_K, _V]): - """Simple, type-checked, dict-like container for holding repeated scalars.""" - - # Disallows assignment to other attributes. - __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', - '_entry_descriptor'] - - def __init__( - self, - message_listener: Any, - key_checker: Any, - value_checker: Any, - entry_descriptor: Any, - ) -> None: - """ - Args: - message_listener: A MessageListener implementation. - The ScalarMap will call this object's Modified() method when it - is modified. - key_checker: A type_checkers.ValueChecker instance to run on keys - inserted into this container. - value_checker: A type_checkers.ValueChecker instance to run on values - inserted into this container. - entry_descriptor: The MessageDescriptor of a map entry: key and value. - """ - self._message_listener = message_listener - self._key_checker = key_checker - self._value_checker = value_checker - self._entry_descriptor = entry_descriptor - self._values = {} - - def __getitem__(self, key: _K) -> _V: - try: - return self._values[key] - except KeyError: - key = self._key_checker.CheckValue(key) - val = self._value_checker.DefaultValue() - self._values[key] = val - return val - - def __contains__(self, item: _K) -> bool: - # We check the key's type to match the strong-typing flavor of the API. - # Also this makes it easier to match the behavior of the C++ implementation. - self._key_checker.CheckValue(item) - return item in self._values - - @overload - def get(self, key: _K) -> Optional[_V]: - ... - - @overload - def get(self, key: _K, default: _T) -> Union[_V, _T]: - ... - - # We need to override this explicitly, because our defaultdict-like behavior - # will make the default implementation (from our base class) always insert - # the key. - def get(self, key, default=None): - if key in self: - return self[key] - else: - return default - - def __setitem__(self, key: _K, value: _V) -> _T: - checked_key = self._key_checker.CheckValue(key) - checked_value = self._value_checker.CheckValue(value) - self._values[checked_key] = checked_value - self._message_listener.Modified() - - def __delitem__(self, key: _K) -> None: - del self._values[key] - self._message_listener.Modified() - - def __len__(self) -> int: - return len(self._values) - - def __iter__(self) -> Iterator[_K]: - return iter(self._values) - - def __repr__(self) -> str: - return repr(self._values) - - def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: - self._values.update(other._values) - self._message_listener.Modified() - - def InvalidateIterators(self) -> None: - # It appears that the only way to reliably invalidate iterators to - # self._values is to ensure that its size changes. - original = self._values - self._values = original.copy() - original[None] = None - - # This is defined in the abstract base, but we can do it much more cheaply. - def clear(self) -> None: - self._values.clear() - self._message_listener.Modified() - - def GetEntryClass(self) -> Any: - return self._entry_descriptor._concrete_class - - -class MessageMap(MutableMapping[_K, _V]): - """Simple, type-checked, dict-like container for with submessage values.""" - - # Disallows assignment to other attributes. - __slots__ = ['_key_checker', '_values', '_message_listener', - '_message_descriptor', '_entry_descriptor'] - - def __init__( - self, - message_listener: Any, - message_descriptor: Any, - key_checker: Any, - entry_descriptor: Any, - ) -> None: - """ - Args: - message_listener: A MessageListener implementation. - The ScalarMap will call this object's Modified() method when it - is modified. - key_checker: A type_checkers.ValueChecker instance to run on keys - inserted into this container. - value_checker: A type_checkers.ValueChecker instance to run on values - inserted into this container. - entry_descriptor: The MessageDescriptor of a map entry: key and value. - """ - self._message_listener = message_listener - self._message_descriptor = message_descriptor - self._key_checker = key_checker - self._entry_descriptor = entry_descriptor - self._values = {} - - def __getitem__(self, key: _K) -> _V: - key = self._key_checker.CheckValue(key) - try: - return self._values[key] - except KeyError: - new_element = self._message_descriptor._concrete_class() - new_element._SetListener(self._message_listener) - self._values[key] = new_element - self._message_listener.Modified() - return new_element - - def get_or_create(self, key: _K) -> _V: - """get_or_create() is an alias for getitem (ie. map[key]). - - Args: - key: The key to get or create in the map. - - This is useful in cases where you want to be explicit that the call is - mutating the map. This can avoid lint errors for statements like this - that otherwise would appear to be pointless statements: - - msg.my_map[key] - """ - return self[key] - - @overload - def get(self, key: _K) -> Optional[_V]: - ... - - @overload - def get(self, key: _K, default: _T) -> Union[_V, _T]: - ... - - # We need to override this explicitly, because our defaultdict-like behavior - # will make the default implementation (from our base class) always insert - # the key. - def get(self, key, default=None): - if key in self: - return self[key] - else: - return default - - def __contains__(self, item: _K) -> bool: - item = self._key_checker.CheckValue(item) - return item in self._values - - def __setitem__(self, key: _K, value: _V) -> NoReturn: - raise ValueError('May not set values directly, call my_map[key].foo = 5') - - def __delitem__(self, key: _K) -> None: - key = self._key_checker.CheckValue(key) - del self._values[key] - self._message_listener.Modified() - - def __len__(self) -> int: - return len(self._values) - - def __iter__(self) -> Iterator[_K]: - return iter(self._values) - - def __repr__(self) -> str: - return repr(self._values) - - def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: - # pylint: disable=protected-access - for key in other._values: - # According to documentation: "When parsing from the wire or when merging, - # if there are duplicate map keys the last key seen is used". - if key in self: - del self[key] - self[key].CopyFrom(other[key]) - # self._message_listener.Modified() not required here, because - # mutations to submessages already propagate. - - def InvalidateIterators(self) -> None: - # It appears that the only way to reliably invalidate iterators to - # self._values is to ensure that its size changes. - original = self._values - self._values = original.copy() - original[None] = None - - # This is defined in the abstract base, but we can do it much more cheaply. - def clear(self) -> None: - self._values.clear() - self._message_listener.Modified() - - def GetEntryClass(self) -> Any: - return self._entry_descriptor._concrete_class - - -class _UnknownField: - """A parsed unknown field.""" - - # Disallows assignment to other attributes. - __slots__ = ['_field_number', '_wire_type', '_data'] - - def __init__(self, field_number, wire_type, data): - self._field_number = field_number - self._wire_type = wire_type - self._data = data - return - - def __lt__(self, other): - # pylint: disable=protected-access - return self._field_number < other._field_number - - def __eq__(self, other): - if self is other: - return True - # pylint: disable=protected-access - return (self._field_number == other._field_number and - self._wire_type == other._wire_type and - self._data == other._data) - - -class UnknownFieldRef: # pylint: disable=missing-class-docstring - - def __init__(self, parent, index): - self._parent = parent - self._index = index - - def _check_valid(self): - if not self._parent: - raise ValueError('UnknownField does not exist. ' - 'The parent message might be cleared.') - if self._index >= len(self._parent): - raise ValueError('UnknownField does not exist. ' - 'The parent message might be cleared.') - - @property - def field_number(self): - self._check_valid() - # pylint: disable=protected-access - return self._parent._internal_get(self._index)._field_number - - @property - def wire_type(self): - self._check_valid() - # pylint: disable=protected-access - return self._parent._internal_get(self._index)._wire_type - - @property - def data(self): - self._check_valid() - # pylint: disable=protected-access - return self._parent._internal_get(self._index)._data - - -class UnknownFieldSet: - """UnknownField container""" - - # Disallows assignment to other attributes. - __slots__ = ['_values'] - - def __init__(self): - self._values = [] - - def __getitem__(self, index): - if self._values is None: - raise ValueError('UnknownFields does not exist. ' - 'The parent message might be cleared.') - size = len(self._values) - if index < 0: - index += size - if index < 0 or index >= size: - raise IndexError('index %d out of range'.index) - - return UnknownFieldRef(self, index) - - def _internal_get(self, index): - return self._values[index] - - def __len__(self): - if self._values is None: - raise ValueError('UnknownFields does not exist. ' - 'The parent message might be cleared.') - return len(self._values) - - def _add(self, field_number, wire_type, data): - unknown_field = _UnknownField(field_number, wire_type, data) - self._values.append(unknown_field) - return unknown_field - - def __iter__(self): - for i in range(len(self)): - yield UnknownFieldRef(self, i) - - def _extend(self, other): - if other is None: - return - # pylint: disable=protected-access - self._values.extend(other._values) - - def __eq__(self, other): - if self is other: - return True - # Sort unknown fields because their order shouldn't - # affect equality test. - values = list(self._values) - if other is None: - return not values - values.sort() - # pylint: disable=protected-access - other_values = sorted(other._values) - return values == other_values - - def _clear(self): - for value in self._values: - # pylint: disable=protected-access - if isinstance(value._data, UnknownFieldSet): - value._data._clear() # pylint: disable=protected-access - self._values = None diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/decoder.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/decoder.py deleted file mode 100644 index bc1b7b785c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/decoder.py +++ /dev/null @@ -1,1029 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Code for decoding protocol buffer primitives. - -This code is very similar to encoder.py -- read the docs for that module first. - -A "decoder" is a function with the signature: - Decode(buffer, pos, end, message, field_dict) -The arguments are: - buffer: The string containing the encoded message. - pos: The current position in the string. - end: The position in the string where the current message ends. May be - less than len(buffer) if we're reading a sub-message. - message: The message object into which we're parsing. - field_dict: message._fields (avoids a hashtable lookup). -The decoder reads the field and stores it into field_dict, returning the new -buffer position. A decoder for a repeated field may proactively decode all of -the elements of that field, if they appear consecutively. - -Note that decoders may throw any of the following: - IndexError: Indicates a truncated message. - struct.error: Unpacking of a fixed-width field failed. - message.DecodeError: Other errors. - -Decoders are expected to raise an exception if they are called with pos > end. -This allows callers to be lax about bounds checking: it's fineto read past -"end" as long as you are sure that someone else will notice and throw an -exception later on. - -Something up the call stack is expected to catch IndexError and struct.error -and convert them to message.DecodeError. - -Decoders are constructed using decoder constructors with the signature: - MakeDecoder(field_number, is_repeated, is_packed, key, new_default) -The arguments are: - field_number: The field number of the field we want to decode. - is_repeated: Is the field a repeated field? (bool) - is_packed: Is the field a packed field? (bool) - key: The key to use when looking up the field within field_dict. - (This is actually the FieldDescriptor but nothing in this - file should depend on that.) - new_default: A function which takes a message object as a parameter and - returns a new instance of the default value for this field. - (This is called for repeated fields and sub-messages, when an - instance does not already exist.) - -As with encoders, we define a decoder constructor for every type of field. -Then, for every field of every message class we construct an actual decoder. -That decoder goes into a dict indexed by tag, so when we decode a message -we repeatedly read a tag, look up the corresponding decoder, and invoke it. -""" - -__author__ = 'kenton@google.com (Kenton Varda)' - -import math -import struct - -from google.protobuf.internal import containers -from google.protobuf.internal import encoder -from google.protobuf.internal import wire_format -from google.protobuf import message - - -# This is not for optimization, but rather to avoid conflicts with local -# variables named "message". -_DecodeError = message.DecodeError - - -def _VarintDecoder(mask, result_type): - """Return an encoder for a basic varint value (does not include tag). - - Decoded values will be bitwise-anded with the given mask before being - returned, e.g. to limit them to 32 bits. The returned decoder does not - take the usual "end" parameter -- the caller is expected to do bounds checking - after the fact (often the caller can defer such checking until later). The - decoder returns a (value, new_pos) pair. - """ - - def DecodeVarint(buffer, pos): - result = 0 - shift = 0 - while 1: - b = buffer[pos] - result |= ((b & 0x7f) << shift) - pos += 1 - if not (b & 0x80): - result &= mask - result = result_type(result) - return (result, pos) - shift += 7 - if shift >= 64: - raise _DecodeError('Too many bytes when decoding varint.') - return DecodeVarint - - -def _SignedVarintDecoder(bits, result_type): - """Like _VarintDecoder() but decodes signed values.""" - - signbit = 1 << (bits - 1) - mask = (1 << bits) - 1 - - def DecodeVarint(buffer, pos): - result = 0 - shift = 0 - while 1: - b = buffer[pos] - result |= ((b & 0x7f) << shift) - pos += 1 - if not (b & 0x80): - result &= mask - result = (result ^ signbit) - signbit - result = result_type(result) - return (result, pos) - shift += 7 - if shift >= 64: - raise _DecodeError('Too many bytes when decoding varint.') - return DecodeVarint - -# All 32-bit and 64-bit values are represented as int. -_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) -_DecodeSignedVarint = _SignedVarintDecoder(64, int) - -# Use these versions for values which must be limited to 32 bits. -_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) -_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) - - -def ReadTag(buffer, pos): - """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. - - We return the raw bytes of the tag rather than decoding them. The raw - bytes can then be used to look up the proper decoder. This effectively allows - us to trade some work that would be done in pure-python (decoding a varint) - for work that is done in C (searching for a byte string in a hash table). - In a low-level language it would be much cheaper to decode the varint and - use that, but not in Python. - - Args: - buffer: memoryview object of the encoded bytes - pos: int of the current position to start from - - Returns: - Tuple[bytes, int] of the tag data and new position. - """ - start = pos - while buffer[pos] & 0x80: - pos += 1 - pos += 1 - - tag_bytes = buffer[start:pos].tobytes() - return tag_bytes, pos - - -# -------------------------------------------------------------------- - - -def _SimpleDecoder(wire_type, decode_value): - """Return a constructor for a decoder for fields of a particular type. - - Args: - wire_type: The field's wire type. - decode_value: A function which decodes an individual value, e.g. - _DecodeVarint() - """ - - def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, - clear_if_default=False): - if is_packed: - local_DecodeVarint = _DecodeVarint - def DecodePackedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - (endpoint, pos) = local_DecodeVarint(buffer, pos) - endpoint += pos - if endpoint > end: - raise _DecodeError('Truncated message.') - while pos < endpoint: - (element, pos) = decode_value(buffer, pos) - value.append(element) - if pos > endpoint: - del value[-1] # Discard corrupt value. - raise _DecodeError('Packed element was truncated.') - return pos - return DecodePackedField - elif is_repeated: - tag_bytes = encoder.TagBytes(field_number, wire_type) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - (element, new_pos) = decode_value(buffer, pos) - value.append(element) - # Predict that the next tag is another copy of the same repeated - # field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos >= end: - # Prediction failed. Return. - if new_pos > end: - raise _DecodeError('Truncated message.') - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - (new_value, pos) = decode_value(buffer, pos) - if pos > end: - raise _DecodeError('Truncated message.') - if clear_if_default and not new_value: - field_dict.pop(key, None) - else: - field_dict[key] = new_value - return pos - return DecodeField - - return SpecificDecoder - - -def _ModifiedDecoder(wire_type, decode_value, modify_value): - """Like SimpleDecoder but additionally invokes modify_value on every value - before storing it. Usually modify_value is ZigZagDecode. - """ - - # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but - # not enough to make a significant difference. - - def InnerDecode(buffer, pos): - (result, new_pos) = decode_value(buffer, pos) - return (modify_value(result), new_pos) - return _SimpleDecoder(wire_type, InnerDecode) - - -def _StructPackDecoder(wire_type, format): - """Return a constructor for a decoder for a fixed-width field. - - Args: - wire_type: The field's wire type. - format: The format string to pass to struct.unpack(). - """ - - value_size = struct.calcsize(format) - local_unpack = struct.unpack - - # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but - # not enough to make a significant difference. - - # Note that we expect someone up-stack to catch struct.error and convert - # it to _DecodeError -- this way we don't have to set up exception- - # handling blocks every time we parse one value. - - def InnerDecode(buffer, pos): - new_pos = pos + value_size - result = local_unpack(format, buffer[pos:new_pos])[0] - return (result, new_pos) - return _SimpleDecoder(wire_type, InnerDecode) - - -def _FloatDecoder(): - """Returns a decoder for a float field. - - This code works around a bug in struct.unpack for non-finite 32-bit - floating-point values. - """ - - local_unpack = struct.unpack - - def InnerDecode(buffer, pos): - """Decode serialized float to a float and new position. - - Args: - buffer: memoryview of the serialized bytes - pos: int, position in the memory view to start at. - - Returns: - Tuple[float, int] of the deserialized float value and new position - in the serialized data. - """ - # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign - # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. - new_pos = pos + 4 - float_bytes = buffer[pos:new_pos].tobytes() - - # If this value has all its exponent bits set, then it's non-finite. - # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. - # To avoid that, we parse it specially. - if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): - # If at least one significand bit is set... - if float_bytes[0:3] != b'\x00\x00\x80': - return (math.nan, new_pos) - # If sign bit is set... - if float_bytes[3:4] == b'\xFF': - return (-math.inf, new_pos) - return (math.inf, new_pos) - - # Note that we expect someone up-stack to catch struct.error and convert - # it to _DecodeError -- this way we don't have to set up exception- - # handling blocks every time we parse one value. - result = local_unpack('= b'\xF0') - and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): - return (math.nan, new_pos) - - # Note that we expect someone up-stack to catch struct.error and convert - # it to _DecodeError -- this way we don't have to set up exception- - # handling blocks every time we parse one value. - result = local_unpack(' end: - raise _DecodeError('Truncated message.') - while pos < endpoint: - value_start_pos = pos - (element, pos) = _DecodeSignedVarint32(buffer, pos) - # pylint: disable=protected-access - if element in enum_type.values_by_number: - value.append(element) - else: - if not message._unknown_fields: - message._unknown_fields = [] - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_VARINT) - - message._unknown_fields.append( - (tag_bytes, buffer[value_start_pos:pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - field_number, wire_format.WIRETYPE_VARINT, element) - # pylint: enable=protected-access - if pos > endpoint: - if element in enum_type.values_by_number: - del value[-1] # Discard corrupt value. - else: - del message._unknown_fields[-1] - # pylint: disable=protected-access - del message._unknown_field_set._values[-1] - # pylint: enable=protected-access - raise _DecodeError('Packed element was truncated.') - return pos - return DecodePackedField - elif is_repeated: - tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - """Decode serialized repeated enum to its value and a new position. - - Args: - buffer: memoryview of the serialized bytes. - pos: int, position in the memory view to start at. - end: int, end position of serialized data - message: Message object to store unknown fields in - field_dict: Map[Descriptor, Any] to store decoded values in. - - Returns: - int, new position in serialized data. - """ - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - (element, new_pos) = _DecodeSignedVarint32(buffer, pos) - # pylint: disable=protected-access - if element in enum_type.values_by_number: - value.append(element) - else: - if not message._unknown_fields: - message._unknown_fields = [] - message._unknown_fields.append( - (tag_bytes, buffer[pos:new_pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - field_number, wire_format.WIRETYPE_VARINT, element) - # pylint: enable=protected-access - # Predict that the next tag is another copy of the same repeated - # field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos >= end: - # Prediction failed. Return. - if new_pos > end: - raise _DecodeError('Truncated message.') - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - """Decode serialized repeated enum to its value and a new position. - - Args: - buffer: memoryview of the serialized bytes. - pos: int, position in the memory view to start at. - end: int, end position of serialized data - message: Message object to store unknown fields in - field_dict: Map[Descriptor, Any] to store decoded values in. - - Returns: - int, new position in serialized data. - """ - value_start_pos = pos - (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) - if pos > end: - raise _DecodeError('Truncated message.') - if clear_if_default and not enum_value: - field_dict.pop(key, None) - return pos - # pylint: disable=protected-access - if enum_value in enum_type.values_by_number: - field_dict[key] = enum_value - else: - if not message._unknown_fields: - message._unknown_fields = [] - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_VARINT) - message._unknown_fields.append( - (tag_bytes, buffer[value_start_pos:pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - field_number, wire_format.WIRETYPE_VARINT, enum_value) - # pylint: enable=protected-access - return pos - return DecodeField - - -# -------------------------------------------------------------------- - - -Int32Decoder = _SimpleDecoder( - wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) - -Int64Decoder = _SimpleDecoder( - wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) - -UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) -UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) - -SInt32Decoder = _ModifiedDecoder( - wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) -SInt64Decoder = _ModifiedDecoder( - wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) - -# Note that Python conveniently guarantees that when using the '<' prefix on -# formats, they will also have the same size across all platforms (as opposed -# to without the prefix, where their sizes depend on the C compiler's basic -# type sizes). -Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: - raise _DecodeError('Truncated string.') - value.append(_ConvertToUnicode(buffer[pos:new_pos])) - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated string.') - if clear_if_default and not size: - field_dict.pop(key, None) - else: - field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) - return new_pos - return DecodeField - - -def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, - clear_if_default=False): - """Returns a decoder for a bytes field.""" - - local_DecodeVarint = _DecodeVarint - - assert not is_packed - if is_repeated: - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_LENGTH_DELIMITED) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated string.') - value.append(buffer[pos:new_pos].tobytes()) - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated string.') - if clear_if_default and not size: - field_dict.pop(key, None) - else: - field_dict[key] = buffer[pos:new_pos].tobytes() - return new_pos - return DecodeField - - -def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): - """Returns a decoder for a group field.""" - - end_tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_END_GROUP) - end_tag_len = len(end_tag_bytes) - - assert not is_packed - if is_repeated: - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_START_GROUP) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - # Read sub-message. - pos = value.add()._InternalParse(buffer, pos, end) - # Read end tag. - new_pos = pos+end_tag_len - if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: - raise _DecodeError('Missing group end tag.') - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - # Read sub-message. - pos = value._InternalParse(buffer, pos, end) - # Read end tag. - new_pos = pos+end_tag_len - if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: - raise _DecodeError('Missing group end tag.') - return new_pos - return DecodeField - - -def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): - """Returns a decoder for a message field.""" - - local_DecodeVarint = _DecodeVarint - - assert not is_packed - if is_repeated: - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_LENGTH_DELIMITED) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - # Read length. - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated message.') - # Read sub-message. - if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - # Read length. - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated message.') - # Read sub-message. - if value._InternalParse(buffer, pos, new_pos) != new_pos: - # The only reason _InternalParse would return early is if it encountered - # an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - return new_pos - return DecodeField - - -# -------------------------------------------------------------------- - -MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) - -def MessageSetItemDecoder(descriptor): - """Returns a decoder for a MessageSet item. - - The parameter is the message Descriptor. - - The message set message looks like this: - message MessageSet { - repeated group Item = 1 { - required int32 type_id = 2; - required string message = 3; - } - } - """ - - type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) - message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) - item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) - - local_ReadTag = ReadTag - local_DecodeVarint = _DecodeVarint - local_SkipField = SkipField - - def DecodeItem(buffer, pos, end, message, field_dict): - """Decode serialized message set to its value and new position. - - Args: - buffer: memoryview of the serialized bytes. - pos: int, position in the memory view to start at. - end: int, end position of serialized data - message: Message object to store unknown fields in - field_dict: Map[Descriptor, Any] to store decoded values in. - - Returns: - int, new position in serialized data. - """ - message_set_item_start = pos - type_id = -1 - message_start = -1 - message_end = -1 - - # Technically, type_id and message can appear in any order, so we need - # a little loop here. - while 1: - (tag_bytes, pos) = local_ReadTag(buffer, pos) - if tag_bytes == type_id_tag_bytes: - (type_id, pos) = local_DecodeVarint(buffer, pos) - elif tag_bytes == message_tag_bytes: - (size, message_start) = local_DecodeVarint(buffer, pos) - pos = message_end = message_start + size - elif tag_bytes == item_end_tag_bytes: - break - else: - pos = SkipField(buffer, pos, end, tag_bytes) - if pos == -1: - raise _DecodeError('Missing group end tag.') - - if pos > end: - raise _DecodeError('Truncated message.') - - if type_id == -1: - raise _DecodeError('MessageSet item missing type_id.') - if message_start == -1: - raise _DecodeError('MessageSet item missing message.') - - extension = message.Extensions._FindExtensionByNumber(type_id) - # pylint: disable=protected-access - if extension is not None: - value = field_dict.get(extension) - if value is None: - message_type = extension.message_type - if not hasattr(message_type, '_concrete_class'): - # pylint: disable=protected-access - message._FACTORY.GetPrototype(message_type) - value = field_dict.setdefault( - extension, message_type._concrete_class()) - if value._InternalParse(buffer, message_start,message_end) != message_end: - # The only reason _InternalParse would return early is if it encountered - # an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - else: - if not message._unknown_fields: - message._unknown_fields = [] - message._unknown_fields.append( - (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - type_id, - wire_format.WIRETYPE_LENGTH_DELIMITED, - buffer[message_start:message_end].tobytes()) - # pylint: enable=protected-access - - return pos - - return DecodeItem - -# -------------------------------------------------------------------- - -def MapDecoder(field_descriptor, new_default, is_message_map): - """Returns a decoder for a map field.""" - - key = field_descriptor - tag_bytes = encoder.TagBytes(field_descriptor.number, - wire_format.WIRETYPE_LENGTH_DELIMITED) - tag_len = len(tag_bytes) - local_DecodeVarint = _DecodeVarint - # Can't read _concrete_class yet; might not be initialized. - message_type = field_descriptor.message_type - - def DecodeMap(buffer, pos, end, message, field_dict): - submsg = message_type._concrete_class() - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - # Read length. - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated message.') - # Read sub-message. - submsg.Clear() - if submsg._InternalParse(buffer, pos, new_pos) != new_pos: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - - if is_message_map: - value[submsg.key].CopyFrom(submsg.value) - else: - value[submsg.key] = submsg.value - - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - - return DecodeMap - -# -------------------------------------------------------------------- -# Optimization is not as heavy here because calls to SkipField() are rare, -# except for handling end-group tags. - -def _SkipVarint(buffer, pos, end): - """Skip a varint value. Returns the new position.""" - # Previously ord(buffer[pos]) raised IndexError when pos is out of range. - # With this code, ord(b'') raises TypeError. Both are handled in - # python_message.py to generate a 'Truncated message' error. - while ord(buffer[pos:pos+1].tobytes()) & 0x80: - pos += 1 - pos += 1 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - -def _SkipFixed64(buffer, pos, end): - """Skip a fixed64 value. Returns the new position.""" - - pos += 8 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - - -def _DecodeFixed64(buffer, pos): - """Decode a fixed64.""" - new_pos = pos + 8 - return (struct.unpack(' end: - raise _DecodeError('Truncated message.') - return pos - - -def _SkipGroup(buffer, pos, end): - """Skip sub-group. Returns the new position.""" - - while 1: - (tag_bytes, pos) = ReadTag(buffer, pos) - new_pos = SkipField(buffer, pos, end, tag_bytes) - if new_pos == -1: - return pos - pos = new_pos - - -def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): - """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" - - unknown_field_set = containers.UnknownFieldSet() - while end_pos is None or pos < end_pos: - (tag_bytes, pos) = ReadTag(buffer, pos) - (tag, _) = _DecodeVarint(tag_bytes, 0) - field_number, wire_type = wire_format.UnpackTag(tag) - if wire_type == wire_format.WIRETYPE_END_GROUP: - break - (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) - # pylint: disable=protected-access - unknown_field_set._add(field_number, wire_type, data) - - return (unknown_field_set, pos) - - -def _DecodeUnknownField(buffer, pos, wire_type): - """Decode a unknown field. Returns the UnknownField and new position.""" - - if wire_type == wire_format.WIRETYPE_VARINT: - (data, pos) = _DecodeVarint(buffer, pos) - elif wire_type == wire_format.WIRETYPE_FIXED64: - (data, pos) = _DecodeFixed64(buffer, pos) - elif wire_type == wire_format.WIRETYPE_FIXED32: - (data, pos) = _DecodeFixed32(buffer, pos) - elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: - (size, pos) = _DecodeVarint(buffer, pos) - data = buffer[pos:pos+size].tobytes() - pos += size - elif wire_type == wire_format.WIRETYPE_START_GROUP: - (data, pos) = _DecodeUnknownFieldSet(buffer, pos) - elif wire_type == wire_format.WIRETYPE_END_GROUP: - return (0, -1) - else: - raise _DecodeError('Wrong wire type in tag.') - - return (data, pos) - - -def _EndGroup(buffer, pos, end): - """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" - - return -1 - - -def _SkipFixed32(buffer, pos, end): - """Skip a fixed32 value. Returns the new position.""" - - pos += 4 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - - -def _DecodeFixed32(buffer, pos): - """Decode a fixed32.""" - - new_pos = pos + 4 - return (struct.unpack('B').pack - - def EncodeVarint(write, value, unused_deterministic=None): - bits = value & 0x7f - value >>= 7 - while value: - write(local_int2byte(0x80|bits)) - bits = value & 0x7f - value >>= 7 - return write(local_int2byte(bits)) - - return EncodeVarint - - -def _SignedVarintEncoder(): - """Return an encoder for a basic signed varint value (does not include - tag).""" - - local_int2byte = struct.Struct('>B').pack - - def EncodeSignedVarint(write, value, unused_deterministic=None): - if value < 0: - value += (1 << 64) - bits = value & 0x7f - value >>= 7 - while value: - write(local_int2byte(0x80|bits)) - bits = value & 0x7f - value >>= 7 - return write(local_int2byte(bits)) - - return EncodeSignedVarint - - -_EncodeVarint = _VarintEncoder() -_EncodeSignedVarint = _SignedVarintEncoder() - - -def _VarintBytes(value): - """Encode the given integer as a varint and return the bytes. This is only - called at startup time so it doesn't need to be fast.""" - - pieces = [] - _EncodeVarint(pieces.append, value, True) - return b"".join(pieces) - - -def TagBytes(field_number, wire_type): - """Encode the given tag and return the bytes. Only called at startup.""" - - return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) - -# -------------------------------------------------------------------- -# As with sizers (see above), we have a number of common encoder -# implementations. - - -def _SimpleEncoder(wire_type, encode_value, compute_value_size): - """Return a constructor for an encoder for fields of a particular type. - - Args: - wire_type: The field's wire type, for encoding tags. - encode_value: A function which encodes an individual value, e.g. - _EncodeVarint(). - compute_value_size: A function which computes the size of an individual - value, e.g. _VarintSize(). - """ - - def SpecificEncoder(field_number, is_repeated, is_packed): - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - size = 0 - for element in value: - size += compute_value_size(element) - local_EncodeVarint(write, size, deterministic) - for element in value: - encode_value(write, element, deterministic) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, deterministic): - for element in value: - write(tag_bytes) - encode_value(write, element, deterministic) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, deterministic): - write(tag_bytes) - return encode_value(write, value, deterministic) - return EncodeField - - return SpecificEncoder - - -def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): - """Like SimpleEncoder but additionally invokes modify_value on every value - before passing it to encode_value. Usually modify_value is ZigZagEncode.""" - - def SpecificEncoder(field_number, is_repeated, is_packed): - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - size = 0 - for element in value: - size += compute_value_size(modify_value(element)) - local_EncodeVarint(write, size, deterministic) - for element in value: - encode_value(write, modify_value(element), deterministic) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, deterministic): - for element in value: - write(tag_bytes) - encode_value(write, modify_value(element), deterministic) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, deterministic): - write(tag_bytes) - return encode_value(write, modify_value(value), deterministic) - return EncodeField - - return SpecificEncoder - - -def _StructPackEncoder(wire_type, format): - """Return a constructor for an encoder for a fixed-width field. - - Args: - wire_type: The field's wire type, for encoding tags. - format: The format string to pass to struct.pack(). - """ - - value_size = struct.calcsize(format) - - def SpecificEncoder(field_number, is_repeated, is_packed): - local_struct_pack = struct.pack - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - local_EncodeVarint(write, len(value) * value_size, deterministic) - for element in value: - write(local_struct_pack(format, element)) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, unused_deterministic=None): - for element in value: - write(tag_bytes) - write(local_struct_pack(format, element)) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, unused_deterministic=None): - write(tag_bytes) - return write(local_struct_pack(format, value)) - return EncodeField - - return SpecificEncoder - - -def _FloatingPointEncoder(wire_type, format): - """Return a constructor for an encoder for float fields. - - This is like StructPackEncoder, but catches errors that may be due to - passing non-finite floating-point values to struct.pack, and makes a - second attempt to encode those values. - - Args: - wire_type: The field's wire type, for encoding tags. - format: The format string to pass to struct.pack(). - """ - - value_size = struct.calcsize(format) - if value_size == 4: - def EncodeNonFiniteOrRaise(write, value): - # Remember that the serialized form uses little-endian byte order. - if value == _POS_INF: - write(b'\x00\x00\x80\x7F') - elif value == _NEG_INF: - write(b'\x00\x00\x80\xFF') - elif value != value: # NaN - write(b'\x00\x00\xC0\x7F') - else: - raise - elif value_size == 8: - def EncodeNonFiniteOrRaise(write, value): - if value == _POS_INF: - write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') - elif value == _NEG_INF: - write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') - elif value != value: # NaN - write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') - else: - raise - else: - raise ValueError('Can\'t encode floating-point values that are ' - '%d bytes long (only 4 or 8)' % value_size) - - def SpecificEncoder(field_number, is_repeated, is_packed): - local_struct_pack = struct.pack - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - local_EncodeVarint(write, len(value) * value_size, deterministic) - for element in value: - # This try/except block is going to be faster than any code that - # we could write to check whether element is finite. - try: - write(local_struct_pack(format, element)) - except SystemError: - EncodeNonFiniteOrRaise(write, element) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, unused_deterministic=None): - for element in value: - write(tag_bytes) - try: - write(local_struct_pack(format, element)) - except SystemError: - EncodeNonFiniteOrRaise(write, element) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, unused_deterministic=None): - write(tag_bytes) - try: - write(local_struct_pack(format, value)) - except SystemError: - EncodeNonFiniteOrRaise(write, value) - return EncodeField - - return SpecificEncoder - - -# ==================================================================== -# Here we declare an encoder constructor for each field type. These work -# very similarly to sizer constructors, described earlier. - - -Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( - wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) - -UInt32Encoder = UInt64Encoder = _SimpleEncoder( - wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) - -SInt32Encoder = SInt64Encoder = _ModifiedEncoder( - wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, - wire_format.ZigZagEncode) - -# Note that Python conveniently guarantees that when using the '<' prefix on -# formats, they will also have the same size across all platforms (as opposed -# to without the prefix, where their sizes depend on the C compiler's basic -# type sizes). -Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str - ValueType = int - - def __init__(self, enum_type): - """Inits EnumTypeWrapper with an EnumDescriptor.""" - self._enum_type = enum_type - self.DESCRIPTOR = enum_type # pylint: disable=invalid-name - - def Name(self, number): # pylint: disable=invalid-name - """Returns a string containing the name of an enum value.""" - try: - return self._enum_type.values_by_number[number].name - except KeyError: - pass # fall out to break exception chaining - - if not isinstance(number, int): - raise TypeError( - 'Enum value for {} must be an int, but got {} {!r}.'.format( - self._enum_type.name, type(number), number)) - else: - # repr here to handle the odd case when you pass in a boolean. - raise ValueError('Enum {} has no name defined for value {!r}'.format( - self._enum_type.name, number)) - - def Value(self, name): # pylint: disable=invalid-name - """Returns the value corresponding to the given enum name.""" - try: - return self._enum_type.values_by_name[name].number - except KeyError: - pass # fall out to break exception chaining - raise ValueError('Enum {} has no value defined for name {!r}'.format( - self._enum_type.name, name)) - - def keys(self): - """Return a list of the string names in the enum. - - Returns: - A list of strs, in the order they were defined in the .proto file. - """ - - return [value_descriptor.name - for value_descriptor in self._enum_type.values] - - def values(self): - """Return a list of the integer values in the enum. - - Returns: - A list of ints, in the order they were defined in the .proto file. - """ - - return [value_descriptor.number - for value_descriptor in self._enum_type.values] - - def items(self): - """Return a list of the (name, value) pairs of the enum. - - Returns: - A list of (str, int) pairs, in the order they were defined - in the .proto file. - """ - return [(value_descriptor.name, value_descriptor.number) - for value_descriptor in self._enum_type.values] - - def __getattr__(self, name): - """Returns the value corresponding to the given enum name.""" - try: - return super( - EnumTypeWrapper, - self).__getattribute__('_enum_type').values_by_name[name].number - except KeyError: - pass # fall out to break exception chaining - raise AttributeError('Enum {} has no value defined for name {!r}'.format( - self._enum_type.name, name)) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/extension_dict.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/extension_dict.py deleted file mode 100644 index b346cf283e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/extension_dict.py +++ /dev/null @@ -1,213 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains _ExtensionDict class to represent extensions. -""" - -from google.protobuf.internal import type_checkers -from google.protobuf.descriptor import FieldDescriptor - - -def _VerifyExtensionHandle(message, extension_handle): - """Verify that the given extension handle is valid.""" - - if not isinstance(extension_handle, FieldDescriptor): - raise KeyError('HasExtension() expects an extension handle, got: %s' % - extension_handle) - - if not extension_handle.is_extension: - raise KeyError('"%s" is not an extension.' % extension_handle.full_name) - - if not extension_handle.containing_type: - raise KeyError('"%s" is missing a containing_type.' - % extension_handle.full_name) - - if extension_handle.containing_type is not message.DESCRIPTOR: - raise KeyError('Extension "%s" extends message type "%s", but this ' - 'message is of type "%s".' % - (extension_handle.full_name, - extension_handle.containing_type.full_name, - message.DESCRIPTOR.full_name)) - - -# TODO(robinson): Unify error handling of "unknown extension" crap. -# TODO(robinson): Support iteritems()-style iteration over all -# extensions with the "has" bits turned on? -class _ExtensionDict(object): - - """Dict-like container for Extension fields on proto instances. - - Note that in all cases we expect extension handles to be - FieldDescriptors. - """ - - def __init__(self, extended_message): - """ - Args: - extended_message: Message instance for which we are the Extensions dict. - """ - self._extended_message = extended_message - - def __getitem__(self, extension_handle): - """Returns the current value of the given extension handle.""" - - _VerifyExtensionHandle(self._extended_message, extension_handle) - - result = self._extended_message._fields.get(extension_handle) - if result is not None: - return result - - if extension_handle.label == FieldDescriptor.LABEL_REPEATED: - result = extension_handle._default_constructor(self._extended_message) - elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - message_type = extension_handle.message_type - if not hasattr(message_type, '_concrete_class'): - # pylint: disable=protected-access - self._extended_message._FACTORY.GetPrototype(message_type) - assert getattr(extension_handle.message_type, '_concrete_class', None), ( - 'Uninitialized concrete class found for field %r (message type %r)' - % (extension_handle.full_name, - extension_handle.message_type.full_name)) - result = extension_handle.message_type._concrete_class() - try: - result._SetListener(self._extended_message._listener_for_children) - except ReferenceError: - pass - else: - # Singular scalar -- just return the default without inserting into the - # dict. - return extension_handle.default_value - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - result = self._extended_message._fields.setdefault( - extension_handle, result) - - return result - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - - my_fields = self._extended_message.ListFields() - other_fields = other._extended_message.ListFields() - - # Get rid of non-extension fields. - my_fields = [field for field in my_fields if field.is_extension] - other_fields = [field for field in other_fields if field.is_extension] - - return my_fields == other_fields - - def __ne__(self, other): - return not self == other - - def __len__(self): - fields = self._extended_message.ListFields() - # Get rid of non-extension fields. - extension_fields = [field for field in fields if field[0].is_extension] - return len(extension_fields) - - def __hash__(self): - raise TypeError('unhashable object') - - # Note that this is only meaningful for non-repeated, scalar extension - # fields. Note also that we may have to call _Modified() when we do - # successfully set a field this way, to set any necessary "has" bits in the - # ancestors of the extended message. - def __setitem__(self, extension_handle, value): - """If extension_handle specifies a non-repeated, scalar extension - field, sets the value of that field. - """ - - _VerifyExtensionHandle(self._extended_message, extension_handle) - - if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or - extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): - raise TypeError( - 'Cannot assign to extension "%s" because it is a repeated or ' - 'composite type.' % extension_handle.full_name) - - # It's slightly wasteful to lookup the type checker each time, - # but we expect this to be a vanishingly uncommon case anyway. - type_checker = type_checkers.GetTypeChecker(extension_handle) - # pylint: disable=protected-access - self._extended_message._fields[extension_handle] = ( - type_checker.CheckValue(value)) - self._extended_message._Modified() - - def __delitem__(self, extension_handle): - self._extended_message.ClearExtension(extension_handle) - - def _FindExtensionByName(self, name): - """Tries to find a known extension with the specified name. - - Args: - name: Extension full name. - - Returns: - Extension field descriptor. - """ - return self._extended_message._extensions_by_name.get(name, None) - - def _FindExtensionByNumber(self, number): - """Tries to find a known extension with the field number. - - Args: - number: Extension field number. - - Returns: - Extension field descriptor. - """ - return self._extended_message._extensions_by_number.get(number, None) - - def __iter__(self): - # Return a generator over the populated extension fields - return (f[0] for f in self._extended_message.ListFields() - if f[0].is_extension) - - def __contains__(self, extension_handle): - _VerifyExtensionHandle(self._extended_message, extension_handle) - - if extension_handle not in self._extended_message._fields: - return False - - if extension_handle.label == FieldDescriptor.LABEL_REPEATED: - return bool(self._extended_message._fields.get(extension_handle)) - - if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - value = self._extended_message._fields.get(extension_handle) - # pylint: disable=protected-access - return value is not None and value._is_present_in_parent - - return True diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_listener.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_listener.py deleted file mode 100644 index 0fc255a774..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_listener.py +++ /dev/null @@ -1,78 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Defines a listener interface for observing certain -state transitions on Message objects. - -Also defines a null implementation of this interface. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - - -class MessageListener(object): - - """Listens for modifications made to a message. Meant to be registered via - Message._SetListener(). - - Attributes: - dirty: If True, then calling Modified() would be a no-op. This can be - used to avoid these calls entirely in the common case. - """ - - def Modified(self): - """Called every time the message is modified in such a way that the parent - message may need to be updated. This currently means either: - (a) The message was modified for the first time, so the parent message - should henceforth mark the message as present. - (b) The message's cached byte size became dirty -- i.e. the message was - modified for the first time after a previous call to ByteSize(). - Therefore the parent should also mark its byte size as dirty. - Note that (a) implies (b), since new objects start out with a client cached - size (zero). However, we document (a) explicitly because it is important. - - Modified() will *only* be called in response to one of these two events -- - not every time the sub-message is modified. - - Note that if the listener's |dirty| attribute is true, then calling - Modified at the moment would be a no-op, so it can be skipped. Performance- - sensitive callers should check this attribute directly before calling since - it will be true most of the time. - """ - - raise NotImplementedError - - -class NullMessageListener(object): - - """No-op MessageListener implementation.""" - - def Modified(self): - pass diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py deleted file mode 100644 index 63651a3f19..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/message_set_extensions.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestMessageSet.RegisterExtension(message_set_extension3) - TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) - TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) - - DESCRIPTOR._options = None - _TESTMESSAGESET._options = None - _TESTMESSAGESET._serialized_options = b'\010\001' - _TESTMESSAGESET._serialized_start=83 - _TESTMESSAGESET._serialized_end=113 - _TESTMESSAGESETEXTENSION1._serialized_start=116 - _TESTMESSAGESETEXTENSION1._serialized_end=281 - _TESTMESSAGESETEXTENSION2._serialized_start=284 - _TESTMESSAGESETEXTENSION2._serialized_end=451 - _TESTMESSAGESETEXTENSION3._serialized_start=453 - _TESTMESSAGESETEXTENSION3._serialized_end=493 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py deleted file mode 100644 index 5497083197..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/missing_enum_values.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None - _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' - _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None - _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' - _TESTENUMVALUES._serialized_start=88 - _TESTENUMVALUES._serialized_end=409 - _TESTENUMVALUES_NESTEDENUM._serialized_start=378 - _TESTENUMVALUES_NESTEDENUM._serialized_end=409 - _TESTMISSINGENUMVALUES._serialized_start=412 - _TESTMISSINGENUMVALUES._serialized_end=751 - _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 - _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 - _JUSTSTRING._serialized_start=753 - _JUSTSTRING._serialized_end=780 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py deleted file mode 100644 index 0953706bac..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_extensions_dynamic.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) - - DESCRIPTOR._options = None - _DYNAMICMESSAGETYPE._serialized_start=132 - _DYNAMICMESSAGETYPE._serialized_end=163 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_pb2.py deleted file mode 100644 index 1cfa1b7c8b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_pb2.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_extensions.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - ExtendedMessage.RegisterExtension(optional_int_extension) - ExtendedMessage.RegisterExtension(optional_message_extension) - ExtendedMessage.RegisterExtension(repeated_int_extension) - ExtendedMessage.RegisterExtension(repeated_message_extension) - - DESCRIPTOR._options = None - _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None - _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' - _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None - _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' - _NESTEDMESSAGE.fields_by_name['submessage']._options = None - _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' - _TOPLEVELMESSAGE._serialized_start=77 - _TOPLEVELMESSAGE._serialized_end=230 - _NESTEDMESSAGE._serialized_start=232 - _NESTEDMESSAGE._serialized_end=314 - _EXTENDEDMESSAGE._serialized_start=316 - _EXTENDEDMESSAGE._serialized_end=391 - _FOREIGNMESSAGE._serialized_start=393 - _FOREIGNMESSAGE._serialized_end=438 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_messages_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_messages_pb2.py deleted file mode 100644 index d7f7115609..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_messages_pb2.py +++ /dev/null @@ -1,556 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_messages.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - OutOfOrderFields.RegisterExtension(optional_uint64) - OutOfOrderFields.RegisterExtension(optional_int64) - globals()['class'].RegisterExtension(globals()['continue']) - getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) - globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) - - DESCRIPTOR._options = None - _IS._serialized_start=2669 - _IS._serialized_end=2696 - _OUTOFORDERFIELDS._serialized_start=74 - _OUTOFORDERFIELDS._serialized_end=178 - _CLASS._serialized_start=181 - _CLASS._serialized_end=514 - _CLASS_TRY._serialized_start=448 - _CLASS_TRY._serialized_end=476 - _CLASS_FOR._serialized_start=478 - _CLASS_FOR._serialized_end=506 - _EXTENDCLASS._serialized_start=516 - _EXTENDCLASS._serialized_end=579 - _TESTFULLKEYWORD._serialized_start=581 - _TESTFULLKEYWORD._serialized_end=707 - _LOTSNESTEDMESSAGE._serialized_start=710 - _LOTSNESTEDMESSAGE._serialized_end=2667 - _LOTSNESTEDMESSAGE_B0._serialized_start=731 - _LOTSNESTEDMESSAGE_B0._serialized_end=735 - _LOTSNESTEDMESSAGE_B1._serialized_start=737 - _LOTSNESTEDMESSAGE_B1._serialized_end=741 - _LOTSNESTEDMESSAGE_B2._serialized_start=743 - _LOTSNESTEDMESSAGE_B2._serialized_end=747 - _LOTSNESTEDMESSAGE_B3._serialized_start=749 - _LOTSNESTEDMESSAGE_B3._serialized_end=753 - _LOTSNESTEDMESSAGE_B4._serialized_start=755 - _LOTSNESTEDMESSAGE_B4._serialized_end=759 - _LOTSNESTEDMESSAGE_B5._serialized_start=761 - _LOTSNESTEDMESSAGE_B5._serialized_end=765 - _LOTSNESTEDMESSAGE_B6._serialized_start=767 - _LOTSNESTEDMESSAGE_B6._serialized_end=771 - _LOTSNESTEDMESSAGE_B7._serialized_start=773 - _LOTSNESTEDMESSAGE_B7._serialized_end=777 - _LOTSNESTEDMESSAGE_B8._serialized_start=779 - _LOTSNESTEDMESSAGE_B8._serialized_end=783 - _LOTSNESTEDMESSAGE_B9._serialized_start=785 - _LOTSNESTEDMESSAGE_B9._serialized_end=789 - _LOTSNESTEDMESSAGE_B10._serialized_start=791 - _LOTSNESTEDMESSAGE_B10._serialized_end=796 - _LOTSNESTEDMESSAGE_B11._serialized_start=798 - _LOTSNESTEDMESSAGE_B11._serialized_end=803 - _LOTSNESTEDMESSAGE_B12._serialized_start=805 - _LOTSNESTEDMESSAGE_B12._serialized_end=810 - _LOTSNESTEDMESSAGE_B13._serialized_start=812 - _LOTSNESTEDMESSAGE_B13._serialized_end=817 - _LOTSNESTEDMESSAGE_B14._serialized_start=819 - _LOTSNESTEDMESSAGE_B14._serialized_end=824 - _LOTSNESTEDMESSAGE_B15._serialized_start=826 - _LOTSNESTEDMESSAGE_B15._serialized_end=831 - _LOTSNESTEDMESSAGE_B16._serialized_start=833 - _LOTSNESTEDMESSAGE_B16._serialized_end=838 - _LOTSNESTEDMESSAGE_B17._serialized_start=840 - _LOTSNESTEDMESSAGE_B17._serialized_end=845 - _LOTSNESTEDMESSAGE_B18._serialized_start=847 - _LOTSNESTEDMESSAGE_B18._serialized_end=852 - _LOTSNESTEDMESSAGE_B19._serialized_start=854 - _LOTSNESTEDMESSAGE_B19._serialized_end=859 - _LOTSNESTEDMESSAGE_B20._serialized_start=861 - _LOTSNESTEDMESSAGE_B20._serialized_end=866 - _LOTSNESTEDMESSAGE_B21._serialized_start=868 - _LOTSNESTEDMESSAGE_B21._serialized_end=873 - _LOTSNESTEDMESSAGE_B22._serialized_start=875 - _LOTSNESTEDMESSAGE_B22._serialized_end=880 - _LOTSNESTEDMESSAGE_B23._serialized_start=882 - _LOTSNESTEDMESSAGE_B23._serialized_end=887 - _LOTSNESTEDMESSAGE_B24._serialized_start=889 - _LOTSNESTEDMESSAGE_B24._serialized_end=894 - _LOTSNESTEDMESSAGE_B25._serialized_start=896 - _LOTSNESTEDMESSAGE_B25._serialized_end=901 - _LOTSNESTEDMESSAGE_B26._serialized_start=903 - _LOTSNESTEDMESSAGE_B26._serialized_end=908 - _LOTSNESTEDMESSAGE_B27._serialized_start=910 - _LOTSNESTEDMESSAGE_B27._serialized_end=915 - _LOTSNESTEDMESSAGE_B28._serialized_start=917 - _LOTSNESTEDMESSAGE_B28._serialized_end=922 - _LOTSNESTEDMESSAGE_B29._serialized_start=924 - _LOTSNESTEDMESSAGE_B29._serialized_end=929 - _LOTSNESTEDMESSAGE_B30._serialized_start=931 - _LOTSNESTEDMESSAGE_B30._serialized_end=936 - _LOTSNESTEDMESSAGE_B31._serialized_start=938 - _LOTSNESTEDMESSAGE_B31._serialized_end=943 - _LOTSNESTEDMESSAGE_B32._serialized_start=945 - _LOTSNESTEDMESSAGE_B32._serialized_end=950 - _LOTSNESTEDMESSAGE_B33._serialized_start=952 - _LOTSNESTEDMESSAGE_B33._serialized_end=957 - _LOTSNESTEDMESSAGE_B34._serialized_start=959 - _LOTSNESTEDMESSAGE_B34._serialized_end=964 - _LOTSNESTEDMESSAGE_B35._serialized_start=966 - _LOTSNESTEDMESSAGE_B35._serialized_end=971 - _LOTSNESTEDMESSAGE_B36._serialized_start=973 - _LOTSNESTEDMESSAGE_B36._serialized_end=978 - _LOTSNESTEDMESSAGE_B37._serialized_start=980 - _LOTSNESTEDMESSAGE_B37._serialized_end=985 - _LOTSNESTEDMESSAGE_B38._serialized_start=987 - _LOTSNESTEDMESSAGE_B38._serialized_end=992 - _LOTSNESTEDMESSAGE_B39._serialized_start=994 - _LOTSNESTEDMESSAGE_B39._serialized_end=999 - _LOTSNESTEDMESSAGE_B40._serialized_start=1001 - _LOTSNESTEDMESSAGE_B40._serialized_end=1006 - _LOTSNESTEDMESSAGE_B41._serialized_start=1008 - _LOTSNESTEDMESSAGE_B41._serialized_end=1013 - _LOTSNESTEDMESSAGE_B42._serialized_start=1015 - _LOTSNESTEDMESSAGE_B42._serialized_end=1020 - _LOTSNESTEDMESSAGE_B43._serialized_start=1022 - _LOTSNESTEDMESSAGE_B43._serialized_end=1027 - _LOTSNESTEDMESSAGE_B44._serialized_start=1029 - _LOTSNESTEDMESSAGE_B44._serialized_end=1034 - _LOTSNESTEDMESSAGE_B45._serialized_start=1036 - _LOTSNESTEDMESSAGE_B45._serialized_end=1041 - _LOTSNESTEDMESSAGE_B46._serialized_start=1043 - _LOTSNESTEDMESSAGE_B46._serialized_end=1048 - _LOTSNESTEDMESSAGE_B47._serialized_start=1050 - _LOTSNESTEDMESSAGE_B47._serialized_end=1055 - _LOTSNESTEDMESSAGE_B48._serialized_start=1057 - _LOTSNESTEDMESSAGE_B48._serialized_end=1062 - _LOTSNESTEDMESSAGE_B49._serialized_start=1064 - _LOTSNESTEDMESSAGE_B49._serialized_end=1069 - _LOTSNESTEDMESSAGE_B50._serialized_start=1071 - _LOTSNESTEDMESSAGE_B50._serialized_end=1076 - _LOTSNESTEDMESSAGE_B51._serialized_start=1078 - _LOTSNESTEDMESSAGE_B51._serialized_end=1083 - _LOTSNESTEDMESSAGE_B52._serialized_start=1085 - _LOTSNESTEDMESSAGE_B52._serialized_end=1090 - _LOTSNESTEDMESSAGE_B53._serialized_start=1092 - _LOTSNESTEDMESSAGE_B53._serialized_end=1097 - _LOTSNESTEDMESSAGE_B54._serialized_start=1099 - _LOTSNESTEDMESSAGE_B54._serialized_end=1104 - _LOTSNESTEDMESSAGE_B55._serialized_start=1106 - _LOTSNESTEDMESSAGE_B55._serialized_end=1111 - _LOTSNESTEDMESSAGE_B56._serialized_start=1113 - _LOTSNESTEDMESSAGE_B56._serialized_end=1118 - _LOTSNESTEDMESSAGE_B57._serialized_start=1120 - _LOTSNESTEDMESSAGE_B57._serialized_end=1125 - _LOTSNESTEDMESSAGE_B58._serialized_start=1127 - _LOTSNESTEDMESSAGE_B58._serialized_end=1132 - _LOTSNESTEDMESSAGE_B59._serialized_start=1134 - _LOTSNESTEDMESSAGE_B59._serialized_end=1139 - _LOTSNESTEDMESSAGE_B60._serialized_start=1141 - _LOTSNESTEDMESSAGE_B60._serialized_end=1146 - _LOTSNESTEDMESSAGE_B61._serialized_start=1148 - _LOTSNESTEDMESSAGE_B61._serialized_end=1153 - _LOTSNESTEDMESSAGE_B62._serialized_start=1155 - _LOTSNESTEDMESSAGE_B62._serialized_end=1160 - _LOTSNESTEDMESSAGE_B63._serialized_start=1162 - _LOTSNESTEDMESSAGE_B63._serialized_end=1167 - _LOTSNESTEDMESSAGE_B64._serialized_start=1169 - _LOTSNESTEDMESSAGE_B64._serialized_end=1174 - _LOTSNESTEDMESSAGE_B65._serialized_start=1176 - _LOTSNESTEDMESSAGE_B65._serialized_end=1181 - _LOTSNESTEDMESSAGE_B66._serialized_start=1183 - _LOTSNESTEDMESSAGE_B66._serialized_end=1188 - _LOTSNESTEDMESSAGE_B67._serialized_start=1190 - _LOTSNESTEDMESSAGE_B67._serialized_end=1195 - _LOTSNESTEDMESSAGE_B68._serialized_start=1197 - _LOTSNESTEDMESSAGE_B68._serialized_end=1202 - _LOTSNESTEDMESSAGE_B69._serialized_start=1204 - _LOTSNESTEDMESSAGE_B69._serialized_end=1209 - _LOTSNESTEDMESSAGE_B70._serialized_start=1211 - _LOTSNESTEDMESSAGE_B70._serialized_end=1216 - _LOTSNESTEDMESSAGE_B71._serialized_start=1218 - _LOTSNESTEDMESSAGE_B71._serialized_end=1223 - _LOTSNESTEDMESSAGE_B72._serialized_start=1225 - _LOTSNESTEDMESSAGE_B72._serialized_end=1230 - _LOTSNESTEDMESSAGE_B73._serialized_start=1232 - _LOTSNESTEDMESSAGE_B73._serialized_end=1237 - _LOTSNESTEDMESSAGE_B74._serialized_start=1239 - _LOTSNESTEDMESSAGE_B74._serialized_end=1244 - _LOTSNESTEDMESSAGE_B75._serialized_start=1246 - _LOTSNESTEDMESSAGE_B75._serialized_end=1251 - _LOTSNESTEDMESSAGE_B76._serialized_start=1253 - _LOTSNESTEDMESSAGE_B76._serialized_end=1258 - _LOTSNESTEDMESSAGE_B77._serialized_start=1260 - _LOTSNESTEDMESSAGE_B77._serialized_end=1265 - _LOTSNESTEDMESSAGE_B78._serialized_start=1267 - _LOTSNESTEDMESSAGE_B78._serialized_end=1272 - _LOTSNESTEDMESSAGE_B79._serialized_start=1274 - _LOTSNESTEDMESSAGE_B79._serialized_end=1279 - _LOTSNESTEDMESSAGE_B80._serialized_start=1281 - _LOTSNESTEDMESSAGE_B80._serialized_end=1286 - _LOTSNESTEDMESSAGE_B81._serialized_start=1288 - _LOTSNESTEDMESSAGE_B81._serialized_end=1293 - _LOTSNESTEDMESSAGE_B82._serialized_start=1295 - _LOTSNESTEDMESSAGE_B82._serialized_end=1300 - _LOTSNESTEDMESSAGE_B83._serialized_start=1302 - _LOTSNESTEDMESSAGE_B83._serialized_end=1307 - _LOTSNESTEDMESSAGE_B84._serialized_start=1309 - _LOTSNESTEDMESSAGE_B84._serialized_end=1314 - _LOTSNESTEDMESSAGE_B85._serialized_start=1316 - _LOTSNESTEDMESSAGE_B85._serialized_end=1321 - _LOTSNESTEDMESSAGE_B86._serialized_start=1323 - _LOTSNESTEDMESSAGE_B86._serialized_end=1328 - _LOTSNESTEDMESSAGE_B87._serialized_start=1330 - _LOTSNESTEDMESSAGE_B87._serialized_end=1335 - _LOTSNESTEDMESSAGE_B88._serialized_start=1337 - _LOTSNESTEDMESSAGE_B88._serialized_end=1342 - _LOTSNESTEDMESSAGE_B89._serialized_start=1344 - _LOTSNESTEDMESSAGE_B89._serialized_end=1349 - _LOTSNESTEDMESSAGE_B90._serialized_start=1351 - _LOTSNESTEDMESSAGE_B90._serialized_end=1356 - _LOTSNESTEDMESSAGE_B91._serialized_start=1358 - _LOTSNESTEDMESSAGE_B91._serialized_end=1363 - _LOTSNESTEDMESSAGE_B92._serialized_start=1365 - _LOTSNESTEDMESSAGE_B92._serialized_end=1370 - _LOTSNESTEDMESSAGE_B93._serialized_start=1372 - _LOTSNESTEDMESSAGE_B93._serialized_end=1377 - _LOTSNESTEDMESSAGE_B94._serialized_start=1379 - _LOTSNESTEDMESSAGE_B94._serialized_end=1384 - _LOTSNESTEDMESSAGE_B95._serialized_start=1386 - _LOTSNESTEDMESSAGE_B95._serialized_end=1391 - _LOTSNESTEDMESSAGE_B96._serialized_start=1393 - _LOTSNESTEDMESSAGE_B96._serialized_end=1398 - _LOTSNESTEDMESSAGE_B97._serialized_start=1400 - _LOTSNESTEDMESSAGE_B97._serialized_end=1405 - _LOTSNESTEDMESSAGE_B98._serialized_start=1407 - _LOTSNESTEDMESSAGE_B98._serialized_end=1412 - _LOTSNESTEDMESSAGE_B99._serialized_start=1414 - _LOTSNESTEDMESSAGE_B99._serialized_end=1419 - _LOTSNESTEDMESSAGE_B100._serialized_start=1421 - _LOTSNESTEDMESSAGE_B100._serialized_end=1427 - _LOTSNESTEDMESSAGE_B101._serialized_start=1429 - _LOTSNESTEDMESSAGE_B101._serialized_end=1435 - _LOTSNESTEDMESSAGE_B102._serialized_start=1437 - _LOTSNESTEDMESSAGE_B102._serialized_end=1443 - _LOTSNESTEDMESSAGE_B103._serialized_start=1445 - _LOTSNESTEDMESSAGE_B103._serialized_end=1451 - _LOTSNESTEDMESSAGE_B104._serialized_start=1453 - _LOTSNESTEDMESSAGE_B104._serialized_end=1459 - _LOTSNESTEDMESSAGE_B105._serialized_start=1461 - _LOTSNESTEDMESSAGE_B105._serialized_end=1467 - _LOTSNESTEDMESSAGE_B106._serialized_start=1469 - _LOTSNESTEDMESSAGE_B106._serialized_end=1475 - _LOTSNESTEDMESSAGE_B107._serialized_start=1477 - _LOTSNESTEDMESSAGE_B107._serialized_end=1483 - _LOTSNESTEDMESSAGE_B108._serialized_start=1485 - _LOTSNESTEDMESSAGE_B108._serialized_end=1491 - _LOTSNESTEDMESSAGE_B109._serialized_start=1493 - _LOTSNESTEDMESSAGE_B109._serialized_end=1499 - _LOTSNESTEDMESSAGE_B110._serialized_start=1501 - _LOTSNESTEDMESSAGE_B110._serialized_end=1507 - _LOTSNESTEDMESSAGE_B111._serialized_start=1509 - _LOTSNESTEDMESSAGE_B111._serialized_end=1515 - _LOTSNESTEDMESSAGE_B112._serialized_start=1517 - _LOTSNESTEDMESSAGE_B112._serialized_end=1523 - _LOTSNESTEDMESSAGE_B113._serialized_start=1525 - _LOTSNESTEDMESSAGE_B113._serialized_end=1531 - _LOTSNESTEDMESSAGE_B114._serialized_start=1533 - _LOTSNESTEDMESSAGE_B114._serialized_end=1539 - _LOTSNESTEDMESSAGE_B115._serialized_start=1541 - _LOTSNESTEDMESSAGE_B115._serialized_end=1547 - _LOTSNESTEDMESSAGE_B116._serialized_start=1549 - _LOTSNESTEDMESSAGE_B116._serialized_end=1555 - _LOTSNESTEDMESSAGE_B117._serialized_start=1557 - _LOTSNESTEDMESSAGE_B117._serialized_end=1563 - _LOTSNESTEDMESSAGE_B118._serialized_start=1565 - _LOTSNESTEDMESSAGE_B118._serialized_end=1571 - _LOTSNESTEDMESSAGE_B119._serialized_start=1573 - _LOTSNESTEDMESSAGE_B119._serialized_end=1579 - _LOTSNESTEDMESSAGE_B120._serialized_start=1581 - _LOTSNESTEDMESSAGE_B120._serialized_end=1587 - _LOTSNESTEDMESSAGE_B121._serialized_start=1589 - _LOTSNESTEDMESSAGE_B121._serialized_end=1595 - _LOTSNESTEDMESSAGE_B122._serialized_start=1597 - _LOTSNESTEDMESSAGE_B122._serialized_end=1603 - _LOTSNESTEDMESSAGE_B123._serialized_start=1605 - _LOTSNESTEDMESSAGE_B123._serialized_end=1611 - _LOTSNESTEDMESSAGE_B124._serialized_start=1613 - _LOTSNESTEDMESSAGE_B124._serialized_end=1619 - _LOTSNESTEDMESSAGE_B125._serialized_start=1621 - _LOTSNESTEDMESSAGE_B125._serialized_end=1627 - _LOTSNESTEDMESSAGE_B126._serialized_start=1629 - _LOTSNESTEDMESSAGE_B126._serialized_end=1635 - _LOTSNESTEDMESSAGE_B127._serialized_start=1637 - _LOTSNESTEDMESSAGE_B127._serialized_end=1643 - _LOTSNESTEDMESSAGE_B128._serialized_start=1645 - _LOTSNESTEDMESSAGE_B128._serialized_end=1651 - _LOTSNESTEDMESSAGE_B129._serialized_start=1653 - _LOTSNESTEDMESSAGE_B129._serialized_end=1659 - _LOTSNESTEDMESSAGE_B130._serialized_start=1661 - _LOTSNESTEDMESSAGE_B130._serialized_end=1667 - _LOTSNESTEDMESSAGE_B131._serialized_start=1669 - _LOTSNESTEDMESSAGE_B131._serialized_end=1675 - _LOTSNESTEDMESSAGE_B132._serialized_start=1677 - _LOTSNESTEDMESSAGE_B132._serialized_end=1683 - _LOTSNESTEDMESSAGE_B133._serialized_start=1685 - _LOTSNESTEDMESSAGE_B133._serialized_end=1691 - _LOTSNESTEDMESSAGE_B134._serialized_start=1693 - _LOTSNESTEDMESSAGE_B134._serialized_end=1699 - _LOTSNESTEDMESSAGE_B135._serialized_start=1701 - _LOTSNESTEDMESSAGE_B135._serialized_end=1707 - _LOTSNESTEDMESSAGE_B136._serialized_start=1709 - _LOTSNESTEDMESSAGE_B136._serialized_end=1715 - _LOTSNESTEDMESSAGE_B137._serialized_start=1717 - _LOTSNESTEDMESSAGE_B137._serialized_end=1723 - _LOTSNESTEDMESSAGE_B138._serialized_start=1725 - _LOTSNESTEDMESSAGE_B138._serialized_end=1731 - _LOTSNESTEDMESSAGE_B139._serialized_start=1733 - _LOTSNESTEDMESSAGE_B139._serialized_end=1739 - _LOTSNESTEDMESSAGE_B140._serialized_start=1741 - _LOTSNESTEDMESSAGE_B140._serialized_end=1747 - _LOTSNESTEDMESSAGE_B141._serialized_start=1749 - _LOTSNESTEDMESSAGE_B141._serialized_end=1755 - _LOTSNESTEDMESSAGE_B142._serialized_start=1757 - _LOTSNESTEDMESSAGE_B142._serialized_end=1763 - _LOTSNESTEDMESSAGE_B143._serialized_start=1765 - _LOTSNESTEDMESSAGE_B143._serialized_end=1771 - _LOTSNESTEDMESSAGE_B144._serialized_start=1773 - _LOTSNESTEDMESSAGE_B144._serialized_end=1779 - _LOTSNESTEDMESSAGE_B145._serialized_start=1781 - _LOTSNESTEDMESSAGE_B145._serialized_end=1787 - _LOTSNESTEDMESSAGE_B146._serialized_start=1789 - _LOTSNESTEDMESSAGE_B146._serialized_end=1795 - _LOTSNESTEDMESSAGE_B147._serialized_start=1797 - _LOTSNESTEDMESSAGE_B147._serialized_end=1803 - _LOTSNESTEDMESSAGE_B148._serialized_start=1805 - _LOTSNESTEDMESSAGE_B148._serialized_end=1811 - _LOTSNESTEDMESSAGE_B149._serialized_start=1813 - _LOTSNESTEDMESSAGE_B149._serialized_end=1819 - _LOTSNESTEDMESSAGE_B150._serialized_start=1821 - _LOTSNESTEDMESSAGE_B150._serialized_end=1827 - _LOTSNESTEDMESSAGE_B151._serialized_start=1829 - _LOTSNESTEDMESSAGE_B151._serialized_end=1835 - _LOTSNESTEDMESSAGE_B152._serialized_start=1837 - _LOTSNESTEDMESSAGE_B152._serialized_end=1843 - _LOTSNESTEDMESSAGE_B153._serialized_start=1845 - _LOTSNESTEDMESSAGE_B153._serialized_end=1851 - _LOTSNESTEDMESSAGE_B154._serialized_start=1853 - _LOTSNESTEDMESSAGE_B154._serialized_end=1859 - _LOTSNESTEDMESSAGE_B155._serialized_start=1861 - _LOTSNESTEDMESSAGE_B155._serialized_end=1867 - _LOTSNESTEDMESSAGE_B156._serialized_start=1869 - _LOTSNESTEDMESSAGE_B156._serialized_end=1875 - _LOTSNESTEDMESSAGE_B157._serialized_start=1877 - _LOTSNESTEDMESSAGE_B157._serialized_end=1883 - _LOTSNESTEDMESSAGE_B158._serialized_start=1885 - _LOTSNESTEDMESSAGE_B158._serialized_end=1891 - _LOTSNESTEDMESSAGE_B159._serialized_start=1893 - _LOTSNESTEDMESSAGE_B159._serialized_end=1899 - _LOTSNESTEDMESSAGE_B160._serialized_start=1901 - _LOTSNESTEDMESSAGE_B160._serialized_end=1907 - _LOTSNESTEDMESSAGE_B161._serialized_start=1909 - _LOTSNESTEDMESSAGE_B161._serialized_end=1915 - _LOTSNESTEDMESSAGE_B162._serialized_start=1917 - _LOTSNESTEDMESSAGE_B162._serialized_end=1923 - _LOTSNESTEDMESSAGE_B163._serialized_start=1925 - _LOTSNESTEDMESSAGE_B163._serialized_end=1931 - _LOTSNESTEDMESSAGE_B164._serialized_start=1933 - _LOTSNESTEDMESSAGE_B164._serialized_end=1939 - _LOTSNESTEDMESSAGE_B165._serialized_start=1941 - _LOTSNESTEDMESSAGE_B165._serialized_end=1947 - _LOTSNESTEDMESSAGE_B166._serialized_start=1949 - _LOTSNESTEDMESSAGE_B166._serialized_end=1955 - _LOTSNESTEDMESSAGE_B167._serialized_start=1957 - _LOTSNESTEDMESSAGE_B167._serialized_end=1963 - _LOTSNESTEDMESSAGE_B168._serialized_start=1965 - _LOTSNESTEDMESSAGE_B168._serialized_end=1971 - _LOTSNESTEDMESSAGE_B169._serialized_start=1973 - _LOTSNESTEDMESSAGE_B169._serialized_end=1979 - _LOTSNESTEDMESSAGE_B170._serialized_start=1981 - _LOTSNESTEDMESSAGE_B170._serialized_end=1987 - _LOTSNESTEDMESSAGE_B171._serialized_start=1989 - _LOTSNESTEDMESSAGE_B171._serialized_end=1995 - _LOTSNESTEDMESSAGE_B172._serialized_start=1997 - _LOTSNESTEDMESSAGE_B172._serialized_end=2003 - _LOTSNESTEDMESSAGE_B173._serialized_start=2005 - _LOTSNESTEDMESSAGE_B173._serialized_end=2011 - _LOTSNESTEDMESSAGE_B174._serialized_start=2013 - _LOTSNESTEDMESSAGE_B174._serialized_end=2019 - _LOTSNESTEDMESSAGE_B175._serialized_start=2021 - _LOTSNESTEDMESSAGE_B175._serialized_end=2027 - _LOTSNESTEDMESSAGE_B176._serialized_start=2029 - _LOTSNESTEDMESSAGE_B176._serialized_end=2035 - _LOTSNESTEDMESSAGE_B177._serialized_start=2037 - _LOTSNESTEDMESSAGE_B177._serialized_end=2043 - _LOTSNESTEDMESSAGE_B178._serialized_start=2045 - _LOTSNESTEDMESSAGE_B178._serialized_end=2051 - _LOTSNESTEDMESSAGE_B179._serialized_start=2053 - _LOTSNESTEDMESSAGE_B179._serialized_end=2059 - _LOTSNESTEDMESSAGE_B180._serialized_start=2061 - _LOTSNESTEDMESSAGE_B180._serialized_end=2067 - _LOTSNESTEDMESSAGE_B181._serialized_start=2069 - _LOTSNESTEDMESSAGE_B181._serialized_end=2075 - _LOTSNESTEDMESSAGE_B182._serialized_start=2077 - _LOTSNESTEDMESSAGE_B182._serialized_end=2083 - _LOTSNESTEDMESSAGE_B183._serialized_start=2085 - _LOTSNESTEDMESSAGE_B183._serialized_end=2091 - _LOTSNESTEDMESSAGE_B184._serialized_start=2093 - _LOTSNESTEDMESSAGE_B184._serialized_end=2099 - _LOTSNESTEDMESSAGE_B185._serialized_start=2101 - _LOTSNESTEDMESSAGE_B185._serialized_end=2107 - _LOTSNESTEDMESSAGE_B186._serialized_start=2109 - _LOTSNESTEDMESSAGE_B186._serialized_end=2115 - _LOTSNESTEDMESSAGE_B187._serialized_start=2117 - _LOTSNESTEDMESSAGE_B187._serialized_end=2123 - _LOTSNESTEDMESSAGE_B188._serialized_start=2125 - _LOTSNESTEDMESSAGE_B188._serialized_end=2131 - _LOTSNESTEDMESSAGE_B189._serialized_start=2133 - _LOTSNESTEDMESSAGE_B189._serialized_end=2139 - _LOTSNESTEDMESSAGE_B190._serialized_start=2141 - _LOTSNESTEDMESSAGE_B190._serialized_end=2147 - _LOTSNESTEDMESSAGE_B191._serialized_start=2149 - _LOTSNESTEDMESSAGE_B191._serialized_end=2155 - _LOTSNESTEDMESSAGE_B192._serialized_start=2157 - _LOTSNESTEDMESSAGE_B192._serialized_end=2163 - _LOTSNESTEDMESSAGE_B193._serialized_start=2165 - _LOTSNESTEDMESSAGE_B193._serialized_end=2171 - _LOTSNESTEDMESSAGE_B194._serialized_start=2173 - _LOTSNESTEDMESSAGE_B194._serialized_end=2179 - _LOTSNESTEDMESSAGE_B195._serialized_start=2181 - _LOTSNESTEDMESSAGE_B195._serialized_end=2187 - _LOTSNESTEDMESSAGE_B196._serialized_start=2189 - _LOTSNESTEDMESSAGE_B196._serialized_end=2195 - _LOTSNESTEDMESSAGE_B197._serialized_start=2197 - _LOTSNESTEDMESSAGE_B197._serialized_end=2203 - _LOTSNESTEDMESSAGE_B198._serialized_start=2205 - _LOTSNESTEDMESSAGE_B198._serialized_end=2211 - _LOTSNESTEDMESSAGE_B199._serialized_start=2213 - _LOTSNESTEDMESSAGE_B199._serialized_end=2219 - _LOTSNESTEDMESSAGE_B200._serialized_start=2221 - _LOTSNESTEDMESSAGE_B200._serialized_end=2227 - _LOTSNESTEDMESSAGE_B201._serialized_start=2229 - _LOTSNESTEDMESSAGE_B201._serialized_end=2235 - _LOTSNESTEDMESSAGE_B202._serialized_start=2237 - _LOTSNESTEDMESSAGE_B202._serialized_end=2243 - _LOTSNESTEDMESSAGE_B203._serialized_start=2245 - _LOTSNESTEDMESSAGE_B203._serialized_end=2251 - _LOTSNESTEDMESSAGE_B204._serialized_start=2253 - _LOTSNESTEDMESSAGE_B204._serialized_end=2259 - _LOTSNESTEDMESSAGE_B205._serialized_start=2261 - _LOTSNESTEDMESSAGE_B205._serialized_end=2267 - _LOTSNESTEDMESSAGE_B206._serialized_start=2269 - _LOTSNESTEDMESSAGE_B206._serialized_end=2275 - _LOTSNESTEDMESSAGE_B207._serialized_start=2277 - _LOTSNESTEDMESSAGE_B207._serialized_end=2283 - _LOTSNESTEDMESSAGE_B208._serialized_start=2285 - _LOTSNESTEDMESSAGE_B208._serialized_end=2291 - _LOTSNESTEDMESSAGE_B209._serialized_start=2293 - _LOTSNESTEDMESSAGE_B209._serialized_end=2299 - _LOTSNESTEDMESSAGE_B210._serialized_start=2301 - _LOTSNESTEDMESSAGE_B210._serialized_end=2307 - _LOTSNESTEDMESSAGE_B211._serialized_start=2309 - _LOTSNESTEDMESSAGE_B211._serialized_end=2315 - _LOTSNESTEDMESSAGE_B212._serialized_start=2317 - _LOTSNESTEDMESSAGE_B212._serialized_end=2323 - _LOTSNESTEDMESSAGE_B213._serialized_start=2325 - _LOTSNESTEDMESSAGE_B213._serialized_end=2331 - _LOTSNESTEDMESSAGE_B214._serialized_start=2333 - _LOTSNESTEDMESSAGE_B214._serialized_end=2339 - _LOTSNESTEDMESSAGE_B215._serialized_start=2341 - _LOTSNESTEDMESSAGE_B215._serialized_end=2347 - _LOTSNESTEDMESSAGE_B216._serialized_start=2349 - _LOTSNESTEDMESSAGE_B216._serialized_end=2355 - _LOTSNESTEDMESSAGE_B217._serialized_start=2357 - _LOTSNESTEDMESSAGE_B217._serialized_end=2363 - _LOTSNESTEDMESSAGE_B218._serialized_start=2365 - _LOTSNESTEDMESSAGE_B218._serialized_end=2371 - _LOTSNESTEDMESSAGE_B219._serialized_start=2373 - _LOTSNESTEDMESSAGE_B219._serialized_end=2379 - _LOTSNESTEDMESSAGE_B220._serialized_start=2381 - _LOTSNESTEDMESSAGE_B220._serialized_end=2387 - _LOTSNESTEDMESSAGE_B221._serialized_start=2389 - _LOTSNESTEDMESSAGE_B221._serialized_end=2395 - _LOTSNESTEDMESSAGE_B222._serialized_start=2397 - _LOTSNESTEDMESSAGE_B222._serialized_end=2403 - _LOTSNESTEDMESSAGE_B223._serialized_start=2405 - _LOTSNESTEDMESSAGE_B223._serialized_end=2411 - _LOTSNESTEDMESSAGE_B224._serialized_start=2413 - _LOTSNESTEDMESSAGE_B224._serialized_end=2419 - _LOTSNESTEDMESSAGE_B225._serialized_start=2421 - _LOTSNESTEDMESSAGE_B225._serialized_end=2427 - _LOTSNESTEDMESSAGE_B226._serialized_start=2429 - _LOTSNESTEDMESSAGE_B226._serialized_end=2435 - _LOTSNESTEDMESSAGE_B227._serialized_start=2437 - _LOTSNESTEDMESSAGE_B227._serialized_end=2443 - _LOTSNESTEDMESSAGE_B228._serialized_start=2445 - _LOTSNESTEDMESSAGE_B228._serialized_end=2451 - _LOTSNESTEDMESSAGE_B229._serialized_start=2453 - _LOTSNESTEDMESSAGE_B229._serialized_end=2459 - _LOTSNESTEDMESSAGE_B230._serialized_start=2461 - _LOTSNESTEDMESSAGE_B230._serialized_end=2467 - _LOTSNESTEDMESSAGE_B231._serialized_start=2469 - _LOTSNESTEDMESSAGE_B231._serialized_end=2475 - _LOTSNESTEDMESSAGE_B232._serialized_start=2477 - _LOTSNESTEDMESSAGE_B232._serialized_end=2483 - _LOTSNESTEDMESSAGE_B233._serialized_start=2485 - _LOTSNESTEDMESSAGE_B233._serialized_end=2491 - _LOTSNESTEDMESSAGE_B234._serialized_start=2493 - _LOTSNESTEDMESSAGE_B234._serialized_end=2499 - _LOTSNESTEDMESSAGE_B235._serialized_start=2501 - _LOTSNESTEDMESSAGE_B235._serialized_end=2507 - _LOTSNESTEDMESSAGE_B236._serialized_start=2509 - _LOTSNESTEDMESSAGE_B236._serialized_end=2515 - _LOTSNESTEDMESSAGE_B237._serialized_start=2517 - _LOTSNESTEDMESSAGE_B237._serialized_end=2523 - _LOTSNESTEDMESSAGE_B238._serialized_start=2525 - _LOTSNESTEDMESSAGE_B238._serialized_end=2531 - _LOTSNESTEDMESSAGE_B239._serialized_start=2533 - _LOTSNESTEDMESSAGE_B239._serialized_end=2539 - _LOTSNESTEDMESSAGE_B240._serialized_start=2541 - _LOTSNESTEDMESSAGE_B240._serialized_end=2547 - _LOTSNESTEDMESSAGE_B241._serialized_start=2549 - _LOTSNESTEDMESSAGE_B241._serialized_end=2555 - _LOTSNESTEDMESSAGE_B242._serialized_start=2557 - _LOTSNESTEDMESSAGE_B242._serialized_end=2563 - _LOTSNESTEDMESSAGE_B243._serialized_start=2565 - _LOTSNESTEDMESSAGE_B243._serialized_end=2571 - _LOTSNESTEDMESSAGE_B244._serialized_start=2573 - _LOTSNESTEDMESSAGE_B244._serialized_end=2579 - _LOTSNESTEDMESSAGE_B245._serialized_start=2581 - _LOTSNESTEDMESSAGE_B245._serialized_end=2587 - _LOTSNESTEDMESSAGE_B246._serialized_start=2589 - _LOTSNESTEDMESSAGE_B246._serialized_end=2595 - _LOTSNESTEDMESSAGE_B247._serialized_start=2597 - _LOTSNESTEDMESSAGE_B247._serialized_end=2603 - _LOTSNESTEDMESSAGE_B248._serialized_start=2605 - _LOTSNESTEDMESSAGE_B248._serialized_end=2611 - _LOTSNESTEDMESSAGE_B249._serialized_start=2613 - _LOTSNESTEDMESSAGE_B249._serialized_end=2619 - _LOTSNESTEDMESSAGE_B250._serialized_start=2621 - _LOTSNESTEDMESSAGE_B250._serialized_end=2627 - _LOTSNESTEDMESSAGE_B251._serialized_start=2629 - _LOTSNESTEDMESSAGE_B251._serialized_end=2635 - _LOTSNESTEDMESSAGE_B252._serialized_start=2637 - _LOTSNESTEDMESSAGE_B252._serialized_end=2643 - _LOTSNESTEDMESSAGE_B253._serialized_start=2645 - _LOTSNESTEDMESSAGE_B253._serialized_end=2651 - _LOTSNESTEDMESSAGE_B254._serialized_start=2653 - _LOTSNESTEDMESSAGE_B254._serialized_end=2659 - _LOTSNESTEDMESSAGE_B255._serialized_start=2661 - _LOTSNESTEDMESSAGE_B255._serialized_end=2667 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/no_package_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/no_package_pb2.py deleted file mode 100644 index d46dee080a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/no_package_pb2.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/no_package.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _NOPACKAGEENUM._serialized_start=106 - _NOPACKAGEENUM._serialized_end=169 - _NOPACKAGEMESSAGE._serialized_start=45 - _NOPACKAGEMESSAGE._serialized_end=104 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/python_message.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/python_message.py deleted file mode 100644 index 2921d5cb6e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/python_message.py +++ /dev/null @@ -1,1539 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This code is meant to work on Python 2.4 and above only. -# -# TODO(robinson): Helpers for verbose, common checks like seeing if a -# descriptor's cpp_type is CPPTYPE_MESSAGE. - -"""Contains a metaclass and helper functions used to create -protocol message classes from Descriptor objects at runtime. - -Recall that a metaclass is the "type" of a class. -(A class is to a metaclass what an instance is to a class.) - -In this case, we use the GeneratedProtocolMessageType metaclass -to inject all the useful functionality into the classes -output by the protocol compiler at compile-time. - -The upshot of all this is that the real implementation -details for ALL pure-Python protocol buffers are *here in -this file*. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -from io import BytesIO -import struct -import sys -import weakref - -# We use "as" to avoid name collisions with variables. -from google.protobuf.internal import api_implementation -from google.protobuf.internal import containers -from google.protobuf.internal import decoder -from google.protobuf.internal import encoder -from google.protobuf.internal import enum_type_wrapper -from google.protobuf.internal import extension_dict -from google.protobuf.internal import message_listener as message_listener_mod -from google.protobuf.internal import type_checkers -from google.protobuf.internal import well_known_types -from google.protobuf.internal import wire_format -from google.protobuf import descriptor as descriptor_mod -from google.protobuf import message as message_mod -from google.protobuf import text_format - -_FieldDescriptor = descriptor_mod.FieldDescriptor -_AnyFullTypeName = 'google.protobuf.Any' -_ExtensionDict = extension_dict._ExtensionDict - -class GeneratedProtocolMessageType(type): - - """Metaclass for protocol message classes created at runtime from Descriptors. - - We add implementations for all methods described in the Message class. We - also create properties to allow getting/setting all fields in the protocol - message. Finally, we create slots to prevent users from accidentally - "setting" nonexistent fields in the protocol message, which then wouldn't get - serialized / deserialized properly. - - The protocol compiler currently uses this metaclass to create protocol - message classes at runtime. Clients can also manually create their own - classes at runtime, as in this example: - - mydescriptor = Descriptor(.....) - factory = symbol_database.Default() - factory.pool.AddDescriptor(mydescriptor) - MyProtoClass = factory.GetPrototype(mydescriptor) - myproto_instance = MyProtoClass() - myproto.foo_field = 23 - ... - """ - - # Must be consistent with the protocol-compiler code in - # proto2/compiler/internal/generator.*. - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __new__(cls, name, bases, dictionary): - """Custom allocation for runtime-generated class types. - - We override __new__ because this is apparently the only place - where we can meaningfully set __slots__ on the class we're creating(?). - (The interplay between metaclasses and slots is not very well-documented). - - Args: - name: Name of the class (ignored, but required by the - metaclass protocol). - bases: Base classes of the class we're constructing. - (Should be message.Message). We ignore this field, but - it's required by the metaclass protocol - dictionary: The class dictionary of the class we're - constructing. dictionary[_DESCRIPTOR_KEY] must contain - a Descriptor object describing this protocol message - type. - - Returns: - Newly-allocated class. - - Raises: - RuntimeError: Generated code only work with python cpp extension. - """ - descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] - - if isinstance(descriptor, str): - raise RuntimeError('The generated code only work with python cpp ' - 'extension, but it is using pure python runtime.') - - # If a concrete class already exists for this descriptor, don't try to - # create another. Doing so will break any messages that already exist with - # the existing class. - # - # The C++ implementation appears to have its own internal `PyMessageFactory` - # to achieve similar results. - # - # This most commonly happens in `text_format.py` when using descriptors from - # a custom pool; it calls symbol_database.Global().getPrototype() on a - # descriptor which already has an existing concrete class. - new_class = getattr(descriptor, '_concrete_class', None) - if new_class: - return new_class - - if descriptor.full_name in well_known_types.WKTBASES: - bases += (well_known_types.WKTBASES[descriptor.full_name],) - _AddClassAttributesForNestedExtensions(descriptor, dictionary) - _AddSlots(descriptor, dictionary) - - superclass = super(GeneratedProtocolMessageType, cls) - new_class = superclass.__new__(cls, name, bases, dictionary) - return new_class - - def __init__(cls, name, bases, dictionary): - """Here we perform the majority of our work on the class. - We add enum getters, an __init__ method, implementations - of all Message methods, and properties for all fields - in the protocol type. - - Args: - name: Name of the class (ignored, but required by the - metaclass protocol). - bases: Base classes of the class we're constructing. - (Should be message.Message). We ignore this field, but - it's required by the metaclass protocol - dictionary: The class dictionary of the class we're - constructing. dictionary[_DESCRIPTOR_KEY] must contain - a Descriptor object describing this protocol message - type. - """ - descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] - - # If this is an _existing_ class looked up via `_concrete_class` in the - # __new__ method above, then we don't need to re-initialize anything. - existing_class = getattr(descriptor, '_concrete_class', None) - if existing_class: - assert existing_class is cls, ( - 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' - % (descriptor.full_name)) - return - - cls._decoders_by_tag = {} - if (descriptor.has_options and - descriptor.GetOptions().message_set_wire_format): - cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( - decoder.MessageSetItemDecoder(descriptor), None) - - # Attach stuff to each FieldDescriptor for quick lookup later on. - for field in descriptor.fields: - _AttachFieldHelpers(cls, field) - - descriptor._concrete_class = cls # pylint: disable=protected-access - _AddEnumValues(descriptor, cls) - _AddInitMethod(descriptor, cls) - _AddPropertiesForFields(descriptor, cls) - _AddPropertiesForExtensions(descriptor, cls) - _AddStaticMethods(cls) - _AddMessageMethods(descriptor, cls) - _AddPrivateHelperMethods(descriptor, cls) - - superclass = super(GeneratedProtocolMessageType, cls) - superclass.__init__(name, bases, dictionary) - - -# Stateless helpers for GeneratedProtocolMessageType below. -# Outside clients should not access these directly. -# -# I opted not to make any of these methods on the metaclass, to make it more -# clear that I'm not really using any state there and to keep clients from -# thinking that they have direct access to these construction helpers. - - -def _PropertyName(proto_field_name): - """Returns the name of the public property attribute which - clients can use to get and (in some cases) set the value - of a protocol message field. - - Args: - proto_field_name: The protocol message field name, exactly - as it appears (or would appear) in a .proto file. - """ - # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. - # nnorwitz makes my day by writing: - # """ - # FYI. See the keyword module in the stdlib. This could be as simple as: - # - # if keyword.iskeyword(proto_field_name): - # return proto_field_name + "_" - # return proto_field_name - # """ - # Kenton says: The above is a BAD IDEA. People rely on being able to use - # getattr() and setattr() to reflectively manipulate field values. If we - # rename the properties, then every such user has to also make sure to apply - # the same transformation. Note that currently if you name a field "yield", - # you can still access it just fine using getattr/setattr -- it's not even - # that cumbersome to do so. - # TODO(kenton): Remove this method entirely if/when everyone agrees with my - # position. - return proto_field_name - - -def _AddSlots(message_descriptor, dictionary): - """Adds a __slots__ entry to dictionary, containing the names of all valid - attributes for this message type. - - Args: - message_descriptor: A Descriptor instance describing this message type. - dictionary: Class dictionary to which we'll add a '__slots__' entry. - """ - dictionary['__slots__'] = ['_cached_byte_size', - '_cached_byte_size_dirty', - '_fields', - '_unknown_fields', - '_unknown_field_set', - '_is_present_in_parent', - '_listener', - '_listener_for_children', - '__weakref__', - '_oneofs'] - - -def _IsMessageSetExtension(field): - return (field.is_extension and - field.containing_type.has_options and - field.containing_type.GetOptions().message_set_wire_format and - field.type == _FieldDescriptor.TYPE_MESSAGE and - field.label == _FieldDescriptor.LABEL_OPTIONAL) - - -def _IsMapField(field): - return (field.type == _FieldDescriptor.TYPE_MESSAGE and - field.message_type.has_options and - field.message_type.GetOptions().map_entry) - - -def _IsMessageMapField(field): - value_type = field.message_type.fields_by_name['value'] - return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE - - -def _AttachFieldHelpers(cls, field_descriptor): - is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) - is_packable = (is_repeated and - wire_format.IsTypePackable(field_descriptor.type)) - is_proto3 = field_descriptor.containing_type.syntax == 'proto3' - if not is_packable: - is_packed = False - elif field_descriptor.containing_type.syntax == 'proto2': - is_packed = (field_descriptor.has_options and - field_descriptor.GetOptions().packed) - else: - has_packed_false = (field_descriptor.has_options and - field_descriptor.GetOptions().HasField('packed') and - field_descriptor.GetOptions().packed == False) - is_packed = not has_packed_false - is_map_entry = _IsMapField(field_descriptor) - - if is_map_entry: - field_encoder = encoder.MapEncoder(field_descriptor) - sizer = encoder.MapSizer(field_descriptor, - _IsMessageMapField(field_descriptor)) - elif _IsMessageSetExtension(field_descriptor): - field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) - sizer = encoder.MessageSetItemSizer(field_descriptor.number) - else: - field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( - field_descriptor.number, is_repeated, is_packed) - sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( - field_descriptor.number, is_repeated, is_packed) - - field_descriptor._encoder = field_encoder - field_descriptor._sizer = sizer - field_descriptor._default_constructor = _DefaultValueConstructorForField( - field_descriptor) - - def AddDecoder(wiretype, is_packed): - tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) - decode_type = field_descriptor.type - if (decode_type == _FieldDescriptor.TYPE_ENUM and - type_checkers.SupportsOpenEnums(field_descriptor)): - decode_type = _FieldDescriptor.TYPE_INT32 - - oneof_descriptor = None - clear_if_default = False - if field_descriptor.containing_oneof is not None: - oneof_descriptor = field_descriptor - elif (is_proto3 and not is_repeated and - field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): - clear_if_default = True - - if is_map_entry: - is_message_map = _IsMessageMapField(field_descriptor) - - field_decoder = decoder.MapDecoder( - field_descriptor, _GetInitializeDefaultForMap(field_descriptor), - is_message_map) - elif decode_type == _FieldDescriptor.TYPE_STRING: - field_decoder = decoder.StringDecoder( - field_descriptor.number, is_repeated, is_packed, - field_descriptor, field_descriptor._default_constructor, - clear_if_default) - elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( - field_descriptor.number, is_repeated, is_packed, - field_descriptor, field_descriptor._default_constructor) - else: - field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( - field_descriptor.number, is_repeated, is_packed, - # pylint: disable=protected-access - field_descriptor, field_descriptor._default_constructor, - clear_if_default) - - cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) - - AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], - False) - - if is_repeated and wire_format.IsTypePackable(field_descriptor.type): - # To support wire compatibility of adding packed = true, add a decoder for - # packed values regardless of the field's options. - AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) - - -def _AddClassAttributesForNestedExtensions(descriptor, dictionary): - extensions = descriptor.extensions_by_name - for extension_name, extension_field in extensions.items(): - assert extension_name not in dictionary - dictionary[extension_name] = extension_field - - -def _AddEnumValues(descriptor, cls): - """Sets class-level attributes for all enum fields defined in this message. - - Also exporting a class-level object that can name enum values. - - Args: - descriptor: Descriptor object for this message type. - cls: Class we're constructing for this message type. - """ - for enum_type in descriptor.enum_types: - setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) - for enum_value in enum_type.values: - setattr(cls, enum_value.name, enum_value.number) - - -def _GetInitializeDefaultForMap(field): - if field.label != _FieldDescriptor.LABEL_REPEATED: - raise ValueError('map_entry set on non-repeated field %s' % ( - field.name)) - fields_by_name = field.message_type.fields_by_name - key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) - - value_field = fields_by_name['value'] - if _IsMessageMapField(field): - def MakeMessageMapDefault(message): - return containers.MessageMap( - message._listener_for_children, value_field.message_type, key_checker, - field.message_type) - return MakeMessageMapDefault - else: - value_checker = type_checkers.GetTypeChecker(value_field) - def MakePrimitiveMapDefault(message): - return containers.ScalarMap( - message._listener_for_children, key_checker, value_checker, - field.message_type) - return MakePrimitiveMapDefault - -def _DefaultValueConstructorForField(field): - """Returns a function which returns a default value for a field. - - Args: - field: FieldDescriptor object for this field. - - The returned function has one argument: - message: Message instance containing this field, or a weakref proxy - of same. - - That function in turn returns a default value for this field. The default - value may refer back to |message| via a weak reference. - """ - - if _IsMapField(field): - return _GetInitializeDefaultForMap(field) - - if field.label == _FieldDescriptor.LABEL_REPEATED: - if field.has_default_value and field.default_value != []: - raise ValueError('Repeated field default value not empty list: %s' % ( - field.default_value)) - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - # We can't look at _concrete_class yet since it might not have - # been set. (Depends on order in which we initialize the classes). - message_type = field.message_type - def MakeRepeatedMessageDefault(message): - return containers.RepeatedCompositeFieldContainer( - message._listener_for_children, field.message_type) - return MakeRepeatedMessageDefault - else: - type_checker = type_checkers.GetTypeChecker(field) - def MakeRepeatedScalarDefault(message): - return containers.RepeatedScalarFieldContainer( - message._listener_for_children, type_checker) - return MakeRepeatedScalarDefault - - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - # _concrete_class may not yet be initialized. - message_type = field.message_type - def MakeSubMessageDefault(message): - assert getattr(message_type, '_concrete_class', None), ( - 'Uninitialized concrete class found for field %r (message type %r)' - % (field.full_name, message_type.full_name)) - result = message_type._concrete_class() - result._SetListener( - _OneofListener(message, field) - if field.containing_oneof is not None - else message._listener_for_children) - return result - return MakeSubMessageDefault - - def MakeScalarDefault(message): - # TODO(protobuf-team): This may be broken since there may not be - # default_value. Combine with has_default_value somehow. - return field.default_value - return MakeScalarDefault - - -def _ReraiseTypeErrorWithFieldName(message_name, field_name): - """Re-raise the currently-handled TypeError with the field name added.""" - exc = sys.exc_info()[1] - if len(exc.args) == 1 and type(exc) is TypeError: - # simple TypeError; add field name to exception message - exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) - - # re-raise possibly-amended exception with original traceback: - raise exc.with_traceback(sys.exc_info()[2]) - - -def _AddInitMethod(message_descriptor, cls): - """Adds an __init__ method to cls.""" - - def _GetIntegerEnumValue(enum_type, value): - """Convert a string or integer enum value to an integer. - - If the value is a string, it is converted to the enum value in - enum_type with the same name. If the value is not a string, it's - returned as-is. (No conversion or bounds-checking is done.) - """ - if isinstance(value, str): - try: - return enum_type.values_by_name[value].number - except KeyError: - raise ValueError('Enum type %s: unknown label "%s"' % ( - enum_type.full_name, value)) - return value - - def init(self, **kwargs): - self._cached_byte_size = 0 - self._cached_byte_size_dirty = len(kwargs) > 0 - self._fields = {} - # Contains a mapping from oneof field descriptors to the descriptor - # of the currently set field in that oneof field. - self._oneofs = {} - - # _unknown_fields is () when empty for efficiency, and will be turned into - # a list if fields are added. - self._unknown_fields = () - # _unknown_field_set is None when empty for efficiency, and will be - # turned into UnknownFieldSet struct if fields are added. - self._unknown_field_set = None # pylint: disable=protected-access - self._is_present_in_parent = False - self._listener = message_listener_mod.NullMessageListener() - self._listener_for_children = _Listener(self) - for field_name, field_value in kwargs.items(): - field = _GetFieldByName(message_descriptor, field_name) - if field is None: - raise TypeError('%s() got an unexpected keyword argument "%s"' % - (message_descriptor.name, field_name)) - if field_value is None: - # field=None is the same as no field at all. - continue - if field.label == _FieldDescriptor.LABEL_REPEATED: - copy = field._default_constructor(self) - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite - if _IsMapField(field): - if _IsMessageMapField(field): - for key in field_value: - copy[key].MergeFrom(field_value[key]) - else: - copy.update(field_value) - else: - for val in field_value: - if isinstance(val, dict): - copy.add(**val) - else: - copy.add().MergeFrom(val) - else: # Scalar - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - field_value = [_GetIntegerEnumValue(field.enum_type, val) - for val in field_value] - copy.extend(field_value) - self._fields[field] = copy - elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - copy = field._default_constructor(self) - new_val = field_value - if isinstance(field_value, dict): - new_val = field.message_type._concrete_class(**field_value) - try: - copy.MergeFrom(new_val) - except TypeError: - _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) - self._fields[field] = copy - else: - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - field_value = _GetIntegerEnumValue(field.enum_type, field_value) - try: - setattr(self, field_name, field_value) - except TypeError: - _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) - - init.__module__ = None - init.__doc__ = None - cls.__init__ = init - - -def _GetFieldByName(message_descriptor, field_name): - """Returns a field descriptor by field name. - - Args: - message_descriptor: A Descriptor describing all fields in message. - field_name: The name of the field to retrieve. - Returns: - The field descriptor associated with the field name. - """ - try: - return message_descriptor.fields_by_name[field_name] - except KeyError: - raise ValueError('Protocol message %s has no "%s" field.' % - (message_descriptor.name, field_name)) - - -def _AddPropertiesForFields(descriptor, cls): - """Adds properties for all fields in this protocol message type.""" - for field in descriptor.fields: - _AddPropertiesForField(field, cls) - - if descriptor.is_extendable: - # _ExtensionDict is just an adaptor with no state so we allocate a new one - # every time it is accessed. - cls.Extensions = property(lambda self: _ExtensionDict(self)) - - -def _AddPropertiesForField(field, cls): - """Adds a public property for a protocol message field. - Clients can use this property to get and (in the case - of non-repeated scalar fields) directly set the value - of a protocol message field. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - # Catch it if we add other types that we should - # handle specially here. - assert _FieldDescriptor.MAX_CPPTYPE == 10 - - constant_name = field.name.upper() + '_FIELD_NUMBER' - setattr(cls, constant_name, field.number) - - if field.label == _FieldDescriptor.LABEL_REPEATED: - _AddPropertiesForRepeatedField(field, cls) - elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - _AddPropertiesForNonRepeatedCompositeField(field, cls) - else: - _AddPropertiesForNonRepeatedScalarField(field, cls) - - -class _FieldProperty(property): - __slots__ = ('DESCRIPTOR',) - - def __init__(self, descriptor, getter, setter, doc): - property.__init__(self, getter, setter, doc=doc) - self.DESCRIPTOR = descriptor - - -def _AddPropertiesForRepeatedField(field, cls): - """Adds a public property for a "repeated" protocol message field. Clients - can use this property to get the value of the field, which will be either a - RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see - below). - - Note that when clients add values to these containers, we perform - type-checking in the case of repeated scalar fields, and we also set any - necessary "has" bits as a side-effect. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - - def getter(self): - field_value = self._fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - field_value = self._fields.setdefault(field, field_value) - return field_value - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - # We define a setter just so we can throw an exception with a more - # helpful error message. - def setter(self, new_value): - raise AttributeError('Assignment not allowed to repeated field ' - '"%s" in protocol message object.' % proto_field_name) - - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForNonRepeatedScalarField(field, cls): - """Adds a public property for a nonrepeated, scalar protocol message field. - Clients can use this property to get and directly set the value of the field. - Note that when the client sets the value of a field by using this property, - all necessary "has" bits are set as a side-effect, and we also perform - type-checking. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - type_checker = type_checkers.GetTypeChecker(field) - default_value = field.default_value - is_proto3 = field.containing_type.syntax == 'proto3' - - def getter(self): - # TODO(protobuf-team): This may be broken since there may not be - # default_value. Combine with has_default_value somehow. - return self._fields.get(field, default_value) - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - clear_when_set_to_default = is_proto3 and not field.containing_oneof - - def field_setter(self, new_value): - # pylint: disable=protected-access - # Testing the value for truthiness captures all of the proto3 defaults - # (0, 0.0, enum 0, and False). - try: - new_value = type_checker.CheckValue(new_value) - except TypeError as e: - raise TypeError( - 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) - if clear_when_set_to_default and not new_value: - self._fields.pop(field, None) - else: - self._fields[field] = new_value - # Check _cached_byte_size_dirty inline to improve performance, since scalar - # setters are called frequently. - if not self._cached_byte_size_dirty: - self._Modified() - - if field.containing_oneof: - def setter(self, new_value): - field_setter(self, new_value) - self._UpdateOneofState(field) - else: - setter = field_setter - - setter.__module__ = None - setter.__doc__ = 'Setter for %s.' % proto_field_name - - # Add a property to encapsulate the getter/setter. - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForNonRepeatedCompositeField(field, cls): - """Adds a public property for a nonrepeated, composite protocol message field. - A composite field is a "group" or "message" field. - - Clients can use this property to get the value of the field, but cannot - assign to the property directly. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - # TODO(robinson): Remove duplication with similar method - # for non-repeated scalars. - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - - def getter(self): - field_value = self._fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - field_value = self._fields.setdefault(field, field_value) - return field_value - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - # We define a setter just so we can throw an exception with a more - # helpful error message. - def setter(self, new_value): - raise AttributeError('Assignment not allowed to composite field ' - '"%s" in protocol message object.' % proto_field_name) - - # Add a property to encapsulate the getter. - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForExtensions(descriptor, cls): - """Adds properties for all fields in this protocol message type.""" - extensions = descriptor.extensions_by_name - for extension_name, extension_field in extensions.items(): - constant_name = extension_name.upper() + '_FIELD_NUMBER' - setattr(cls, constant_name, extension_field.number) - - # TODO(amauryfa): Migrate all users of these attributes to functions like - # pool.FindExtensionByNumber(descriptor). - if descriptor.file is not None: - # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. - pool = descriptor.file.pool - cls._extensions_by_number = pool._extensions_by_number[descriptor] - cls._extensions_by_name = pool._extensions_by_name[descriptor] - -def _AddStaticMethods(cls): - # TODO(robinson): This probably needs to be thread-safe(?) - def RegisterExtension(extension_handle): - extension_handle.containing_type = cls.DESCRIPTOR - # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. - # pylint: disable=protected-access - cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) - _AttachFieldHelpers(cls, extension_handle) - cls.RegisterExtension = staticmethod(RegisterExtension) - - def FromString(s): - message = cls() - message.MergeFromString(s) - return message - cls.FromString = staticmethod(FromString) - - -def _IsPresent(item): - """Given a (FieldDescriptor, value) tuple from _fields, return true if the - value should be included in the list returned by ListFields().""" - - if item[0].label == _FieldDescriptor.LABEL_REPEATED: - return bool(item[1]) - elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - return item[1]._is_present_in_parent - else: - return True - - -def _AddListFieldsMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def ListFields(self): - all_fields = [item for item in self._fields.items() if _IsPresent(item)] - all_fields.sort(key = lambda item: item[0].number) - return all_fields - - cls.ListFields = ListFields - -_PROTO3_ERROR_TEMPLATE = \ - ('Protocol message %s has no non-repeated submessage field "%s" ' - 'nor marked as optional') -_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' - -def _AddHasFieldMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - is_proto3 = (message_descriptor.syntax == "proto3") - error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE - - hassable_fields = {} - for field in message_descriptor.fields: - if field.label == _FieldDescriptor.LABEL_REPEATED: - continue - # For proto3, only submessages and fields inside a oneof have presence. - if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and - not field.containing_oneof): - continue - hassable_fields[field.name] = field - - # Has methods are supported for oneof descriptors. - for oneof in message_descriptor.oneofs: - hassable_fields[oneof.name] = oneof - - def HasField(self, field_name): - try: - field = hassable_fields[field_name] - except KeyError: - raise ValueError(error_msg % (message_descriptor.full_name, field_name)) - - if isinstance(field, descriptor_mod.OneofDescriptor): - try: - return HasField(self, self._oneofs[field].name) - except KeyError: - return False - else: - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - value = self._fields.get(field) - return value is not None and value._is_present_in_parent - else: - return field in self._fields - - cls.HasField = HasField - - -def _AddClearFieldMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def ClearField(self, field_name): - try: - field = message_descriptor.fields_by_name[field_name] - except KeyError: - try: - field = message_descriptor.oneofs_by_name[field_name] - if field in self._oneofs: - field = self._oneofs[field] - else: - return - except KeyError: - raise ValueError('Protocol message %s has no "%s" field.' % - (message_descriptor.name, field_name)) - - if field in self._fields: - # To match the C++ implementation, we need to invalidate iterators - # for map fields when ClearField() happens. - if hasattr(self._fields[field], 'InvalidateIterators'): - self._fields[field].InvalidateIterators() - - # Note: If the field is a sub-message, its listener will still point - # at us. That's fine, because the worst than can happen is that it - # will call _Modified() and invalidate our byte size. Big deal. - del self._fields[field] - - if self._oneofs.get(field.containing_oneof, None) is field: - del self._oneofs[field.containing_oneof] - - # Always call _Modified() -- even if nothing was changed, this is - # a mutating method, and thus calling it should cause the field to become - # present in the parent message. - self._Modified() - - cls.ClearField = ClearField - - -def _AddClearExtensionMethod(cls): - """Helper for _AddMessageMethods().""" - def ClearExtension(self, extension_handle): - extension_dict._VerifyExtensionHandle(self, extension_handle) - - # Similar to ClearField(), above. - if extension_handle in self._fields: - del self._fields[extension_handle] - self._Modified() - cls.ClearExtension = ClearExtension - - -def _AddHasExtensionMethod(cls): - """Helper for _AddMessageMethods().""" - def HasExtension(self, extension_handle): - extension_dict._VerifyExtensionHandle(self, extension_handle) - if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: - raise KeyError('"%s" is repeated.' % extension_handle.full_name) - - if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - value = self._fields.get(extension_handle) - return value is not None and value._is_present_in_parent - else: - return extension_handle in self._fields - cls.HasExtension = HasExtension - -def _InternalUnpackAny(msg): - """Unpacks Any message and returns the unpacked message. - - This internal method is different from public Any Unpack method which takes - the target message as argument. _InternalUnpackAny method does not have - target message type and need to find the message type in descriptor pool. - - Args: - msg: An Any message to be unpacked. - - Returns: - The unpacked message. - """ - # TODO(amauryfa): Don't use the factory of generated messages. - # To make Any work with custom factories, use the message factory of the - # parent message. - # pylint: disable=g-import-not-at-top - from google.protobuf import symbol_database - factory = symbol_database.Default() - - type_url = msg.type_url - - if not type_url: - return None - - # TODO(haberman): For now we just strip the hostname. Better logic will be - # required. - type_name = type_url.split('/')[-1] - descriptor = factory.pool.FindMessageTypeByName(type_name) - - if descriptor is None: - return None - - message_class = factory.GetPrototype(descriptor) - message = message_class() - - message.ParseFromString(msg.value) - return message - - -def _AddEqualsMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __eq__(self, other): - if (not isinstance(other, message_mod.Message) or - other.DESCRIPTOR != self.DESCRIPTOR): - return False - - if self is other: - return True - - if self.DESCRIPTOR.full_name == _AnyFullTypeName: - any_a = _InternalUnpackAny(self) - any_b = _InternalUnpackAny(other) - if any_a and any_b: - return any_a == any_b - - if not self.ListFields() == other.ListFields(): - return False - - # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, - # then use it for the comparison. - unknown_fields = list(self._unknown_fields) - unknown_fields.sort() - other_unknown_fields = list(other._unknown_fields) - other_unknown_fields.sort() - return unknown_fields == other_unknown_fields - - cls.__eq__ = __eq__ - - -def _AddStrMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __str__(self): - return text_format.MessageToString(self) - cls.__str__ = __str__ - - -def _AddReprMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __repr__(self): - return text_format.MessageToString(self) - cls.__repr__ = __repr__ - - -def _AddUnicodeMethod(unused_message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def __unicode__(self): - return text_format.MessageToString(self, as_utf8=True).decode('utf-8') - cls.__unicode__ = __unicode__ - - -def _BytesForNonRepeatedElement(value, field_number, field_type): - """Returns the number of bytes needed to serialize a non-repeated element. - The returned byte count includes space for tag information and any - other additional space associated with serializing value. - - Args: - value: Value we're serializing. - field_number: Field number of this value. (Since the field number - is stored as part of a varint-encoded tag, this has an impact - on the total bytes required to serialize the value). - field_type: The type of the field. One of the TYPE_* constants - within FieldDescriptor. - """ - try: - fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] - return fn(field_number, value) - except KeyError: - raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) - - -def _AddByteSizeMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def ByteSize(self): - if not self._cached_byte_size_dirty: - return self._cached_byte_size - - size = 0 - descriptor = self.DESCRIPTOR - if descriptor.GetOptions().map_entry: - # Fields of map entry should always be serialized. - size = descriptor.fields_by_name['key']._sizer(self.key) - size += descriptor.fields_by_name['value']._sizer(self.value) - else: - for field_descriptor, field_value in self.ListFields(): - size += field_descriptor._sizer(field_value) - for tag_bytes, value_bytes in self._unknown_fields: - size += len(tag_bytes) + len(value_bytes) - - self._cached_byte_size = size - self._cached_byte_size_dirty = False - self._listener_for_children.dirty = False - return size - - cls.ByteSize = ByteSize - - -def _AddSerializeToStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def SerializeToString(self, **kwargs): - # Check if the message has all of its required fields set. - if not self.IsInitialized(): - raise message_mod.EncodeError( - 'Message %s is missing required fields: %s' % ( - self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) - return self.SerializePartialToString(**kwargs) - cls.SerializeToString = SerializeToString - - -def _AddSerializePartialToStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def SerializePartialToString(self, **kwargs): - out = BytesIO() - self._InternalSerialize(out.write, **kwargs) - return out.getvalue() - cls.SerializePartialToString = SerializePartialToString - - def InternalSerialize(self, write_bytes, deterministic=None): - if deterministic is None: - deterministic = ( - api_implementation.IsPythonDefaultSerializationDeterministic()) - else: - deterministic = bool(deterministic) - - descriptor = self.DESCRIPTOR - if descriptor.GetOptions().map_entry: - # Fields of map entry should always be serialized. - descriptor.fields_by_name['key']._encoder( - write_bytes, self.key, deterministic) - descriptor.fields_by_name['value']._encoder( - write_bytes, self.value, deterministic) - else: - for field_descriptor, field_value in self.ListFields(): - field_descriptor._encoder(write_bytes, field_value, deterministic) - for tag_bytes, value_bytes in self._unknown_fields: - write_bytes(tag_bytes) - write_bytes(value_bytes) - cls._InternalSerialize = InternalSerialize - - -def _AddMergeFromStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def MergeFromString(self, serialized): - serialized = memoryview(serialized) - length = len(serialized) - try: - if self._InternalParse(serialized, 0, length) != length: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise message_mod.DecodeError('Unexpected end-group tag.') - except (IndexError, TypeError): - # Now ord(buf[p:p+1]) == ord('') gets TypeError. - raise message_mod.DecodeError('Truncated message.') - except struct.error as e: - raise message_mod.DecodeError(e) - return length # Return this for legacy reasons. - cls.MergeFromString = MergeFromString - - local_ReadTag = decoder.ReadTag - local_SkipField = decoder.SkipField - decoders_by_tag = cls._decoders_by_tag - - def InternalParse(self, buffer, pos, end): - """Create a message from serialized bytes. - - Args: - self: Message, instance of the proto message object. - buffer: memoryview of the serialized data. - pos: int, position to start in the serialized data. - end: int, end position of the serialized data. - - Returns: - Message object. - """ - # Guard against internal misuse, since this function is called internally - # quite extensively, and its easy to accidentally pass bytes. - assert isinstance(buffer, memoryview) - self._Modified() - field_dict = self._fields - # pylint: disable=protected-access - unknown_field_set = self._unknown_field_set - while pos != end: - (tag_bytes, new_pos) = local_ReadTag(buffer, pos) - field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) - if field_decoder is None: - if not self._unknown_fields: # pylint: disable=protected-access - self._unknown_fields = [] # pylint: disable=protected-access - if unknown_field_set is None: - # pylint: disable=protected-access - self._unknown_field_set = containers.UnknownFieldSet() - # pylint: disable=protected-access - unknown_field_set = self._unknown_field_set - # pylint: disable=protected-access - (tag, _) = decoder._DecodeVarint(tag_bytes, 0) - field_number, wire_type = wire_format.UnpackTag(tag) - if field_number == 0: - raise message_mod.DecodeError('Field number 0 is illegal.') - # TODO(jieluo): remove old_pos. - old_pos = new_pos - (data, new_pos) = decoder._DecodeUnknownField( - buffer, new_pos, wire_type) # pylint: disable=protected-access - if new_pos == -1: - return pos - # pylint: disable=protected-access - unknown_field_set._add(field_number, wire_type, data) - # TODO(jieluo): remove _unknown_fields. - new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) - if new_pos == -1: - return pos - self._unknown_fields.append( - (tag_bytes, buffer[old_pos:new_pos].tobytes())) - pos = new_pos - else: - pos = field_decoder(buffer, new_pos, end, self, field_dict) - if field_desc: - self._UpdateOneofState(field_desc) - return pos - cls._InternalParse = InternalParse - - -def _AddIsInitializedMethod(message_descriptor, cls): - """Adds the IsInitialized and FindInitializationError methods to the - protocol message class.""" - - required_fields = [field for field in message_descriptor.fields - if field.label == _FieldDescriptor.LABEL_REQUIRED] - - def IsInitialized(self, errors=None): - """Checks if all required fields of a message are set. - - Args: - errors: A list which, if provided, will be populated with the field - paths of all missing required fields. - - Returns: - True iff the specified message has all required fields set. - """ - - # Performance is critical so we avoid HasField() and ListFields(). - - for field in required_fields: - if (field not in self._fields or - (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and - not self._fields[field]._is_present_in_parent)): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - - for field, value in list(self._fields.items()): # dict can change size! - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if field.label == _FieldDescriptor.LABEL_REPEATED: - if (field.message_type.has_options and - field.message_type.GetOptions().map_entry): - continue - for element in value: - if not element.IsInitialized(): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - elif value._is_present_in_parent and not value.IsInitialized(): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - - return True - - cls.IsInitialized = IsInitialized - - def FindInitializationErrors(self): - """Finds required fields which are not initialized. - - Returns: - A list of strings. Each string is a path to an uninitialized field from - the top-level message, e.g. "foo.bar[5].baz". - """ - - errors = [] # simplify things - - for field in required_fields: - if not self.HasField(field.name): - errors.append(field.name) - - for field, value in self.ListFields(): - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if field.is_extension: - name = '(%s)' % field.full_name - else: - name = field.name - - if _IsMapField(field): - if _IsMessageMapField(field): - for key in value: - element = value[key] - prefix = '%s[%s].' % (name, key) - sub_errors = element.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - else: - # ScalarMaps can't have any initialization errors. - pass - elif field.label == _FieldDescriptor.LABEL_REPEATED: - for i in range(len(value)): - element = value[i] - prefix = '%s[%d].' % (name, i) - sub_errors = element.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - else: - prefix = name + '.' - sub_errors = value.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - - return errors - - cls.FindInitializationErrors = FindInitializationErrors - - -def _FullyQualifiedClassName(klass): - module = klass.__module__ - name = getattr(klass, '__qualname__', klass.__name__) - if module in (None, 'builtins', '__builtin__'): - return name - return module + '.' + name - - -def _AddMergeFromMethod(cls): - LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED - CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE - - def MergeFrom(self, msg): - if not isinstance(msg, cls): - raise TypeError( - 'Parameter to MergeFrom() must be instance of same class: ' - 'expected %s got %s.' % (_FullyQualifiedClassName(cls), - _FullyQualifiedClassName(msg.__class__))) - - assert msg is not self - self._Modified() - - fields = self._fields - - for field, value in msg._fields.items(): - if field.label == LABEL_REPEATED: - field_value = fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - fields[field] = field_value - field_value.MergeFrom(value) - elif field.cpp_type == CPPTYPE_MESSAGE: - if value._is_present_in_parent: - field_value = fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - fields[field] = field_value - field_value.MergeFrom(value) - else: - self._fields[field] = value - if field.containing_oneof: - self._UpdateOneofState(field) - - if msg._unknown_fields: - if not self._unknown_fields: - self._unknown_fields = [] - self._unknown_fields.extend(msg._unknown_fields) - # pylint: disable=protected-access - if self._unknown_field_set is None: - self._unknown_field_set = containers.UnknownFieldSet() - self._unknown_field_set._extend(msg._unknown_field_set) - - cls.MergeFrom = MergeFrom - - -def _AddWhichOneofMethod(message_descriptor, cls): - def WhichOneof(self, oneof_name): - """Returns the name of the currently set field inside a oneof, or None.""" - try: - field = message_descriptor.oneofs_by_name[oneof_name] - except KeyError: - raise ValueError( - 'Protocol message has no oneof "%s" field.' % oneof_name) - - nested_field = self._oneofs.get(field, None) - if nested_field is not None and self.HasField(nested_field.name): - return nested_field.name - else: - return None - - cls.WhichOneof = WhichOneof - - -def _Clear(self): - # Clear fields. - self._fields = {} - self._unknown_fields = () - # pylint: disable=protected-access - if self._unknown_field_set is not None: - self._unknown_field_set._clear() - self._unknown_field_set = None - - self._oneofs = {} - self._Modified() - - -def _UnknownFields(self): - if self._unknown_field_set is None: # pylint: disable=protected-access - # pylint: disable=protected-access - self._unknown_field_set = containers.UnknownFieldSet() - return self._unknown_field_set # pylint: disable=protected-access - - -def _DiscardUnknownFields(self): - self._unknown_fields = [] - self._unknown_field_set = None # pylint: disable=protected-access - for field, value in self.ListFields(): - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if _IsMapField(field): - if _IsMessageMapField(field): - for key in value: - value[key].DiscardUnknownFields() - elif field.label == _FieldDescriptor.LABEL_REPEATED: - for sub_message in value: - sub_message.DiscardUnknownFields() - else: - value.DiscardUnknownFields() - - -def _SetListener(self, listener): - if listener is None: - self._listener = message_listener_mod.NullMessageListener() - else: - self._listener = listener - - -def _AddMessageMethods(message_descriptor, cls): - """Adds implementations of all Message methods to cls.""" - _AddListFieldsMethod(message_descriptor, cls) - _AddHasFieldMethod(message_descriptor, cls) - _AddClearFieldMethod(message_descriptor, cls) - if message_descriptor.is_extendable: - _AddClearExtensionMethod(cls) - _AddHasExtensionMethod(cls) - _AddEqualsMethod(message_descriptor, cls) - _AddStrMethod(message_descriptor, cls) - _AddReprMethod(message_descriptor, cls) - _AddUnicodeMethod(message_descriptor, cls) - _AddByteSizeMethod(message_descriptor, cls) - _AddSerializeToStringMethod(message_descriptor, cls) - _AddSerializePartialToStringMethod(message_descriptor, cls) - _AddMergeFromStringMethod(message_descriptor, cls) - _AddIsInitializedMethod(message_descriptor, cls) - _AddMergeFromMethod(cls) - _AddWhichOneofMethod(message_descriptor, cls) - # Adds methods which do not depend on cls. - cls.Clear = _Clear - cls.UnknownFields = _UnknownFields - cls.DiscardUnknownFields = _DiscardUnknownFields - cls._SetListener = _SetListener - - -def _AddPrivateHelperMethods(message_descriptor, cls): - """Adds implementation of private helper methods to cls.""" - - def Modified(self): - """Sets the _cached_byte_size_dirty bit to true, - and propagates this to our listener iff this was a state change. - """ - - # Note: Some callers check _cached_byte_size_dirty before calling - # _Modified() as an extra optimization. So, if this method is ever - # changed such that it does stuff even when _cached_byte_size_dirty is - # already true, the callers need to be updated. - if not self._cached_byte_size_dirty: - self._cached_byte_size_dirty = True - self._listener_for_children.dirty = True - self._is_present_in_parent = True - self._listener.Modified() - - def _UpdateOneofState(self, field): - """Sets field as the active field in its containing oneof. - - Will also delete currently active field in the oneof, if it is different - from the argument. Does not mark the message as modified. - """ - other_field = self._oneofs.setdefault(field.containing_oneof, field) - if other_field is not field: - del self._fields[other_field] - self._oneofs[field.containing_oneof] = field - - cls._Modified = Modified - cls.SetInParent = Modified - cls._UpdateOneofState = _UpdateOneofState - - -class _Listener(object): - - """MessageListener implementation that a parent message registers with its - child message. - - In order to support semantics like: - - foo.bar.baz.qux = 23 - assert foo.HasField('bar') - - ...child objects must have back references to their parents. - This helper class is at the heart of this support. - """ - - def __init__(self, parent_message): - """Args: - parent_message: The message whose _Modified() method we should call when - we receive Modified() messages. - """ - # This listener establishes a back reference from a child (contained) object - # to its parent (containing) object. We make this a weak reference to avoid - # creating cyclic garbage when the client finishes with the 'parent' object - # in the tree. - if isinstance(parent_message, weakref.ProxyType): - self._parent_message_weakref = parent_message - else: - self._parent_message_weakref = weakref.proxy(parent_message) - - # As an optimization, we also indicate directly on the listener whether - # or not the parent message is dirty. This way we can avoid traversing - # up the tree in the common case. - self.dirty = False - - def Modified(self): - if self.dirty: - return - try: - # Propagate the signal to our parents iff this is the first field set. - self._parent_message_weakref._Modified() - except ReferenceError: - # We can get here if a client has kept a reference to a child object, - # and is now setting a field on it, but the child's parent has been - # garbage-collected. This is not an error. - pass - - -class _OneofListener(_Listener): - """Special listener implementation for setting composite oneof fields.""" - - def __init__(self, parent_message, field): - """Args: - parent_message: The message whose _Modified() method we should call when - we receive Modified() messages. - field: The descriptor of the field being set in the parent message. - """ - super(_OneofListener, self).__init__(parent_message) - self._field = field - - def Modified(self): - """Also updates the state of the containing oneof in the parent message.""" - try: - self._parent_message_weakref._UpdateOneofState(self._field) - super(_OneofListener, self).Modified() - except ReferenceError: - pass diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/type_checkers.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/type_checkers.py deleted file mode 100644 index a53e71fe8e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/type_checkers.py +++ /dev/null @@ -1,435 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides type checking routines. - -This module defines type checking utilities in the forms of dictionaries: - -VALUE_CHECKERS: A dictionary of field types and a value validation object. -TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing - function. -TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization - function. -FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their - corresponding wire types. -TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization - function. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import ctypes -import numbers - -from google.protobuf.internal import decoder -from google.protobuf.internal import encoder -from google.protobuf.internal import wire_format -from google.protobuf import descriptor - -_FieldDescriptor = descriptor.FieldDescriptor - - -def TruncateToFourByteFloat(original): - return ctypes.c_float(original).value - - -def ToShortestFloat(original): - """Returns the shortest float that has same value in wire.""" - # All 4 byte floats have between 6 and 9 significant digits, so we - # start with 6 as the lower bound. - # It has to be iterative because use '.9g' directly can not get rid - # of the noises for most values. For example if set a float_field=0.9 - # use '.9g' will print 0.899999976. - precision = 6 - rounded = float('{0:.{1}g}'.format(original, precision)) - while TruncateToFourByteFloat(rounded) != original: - precision += 1 - rounded = float('{0:.{1}g}'.format(original, precision)) - return rounded - - -def SupportsOpenEnums(field_descriptor): - return field_descriptor.containing_type.syntax == 'proto3' - - -def GetTypeChecker(field): - """Returns a type checker for a message field of the specified types. - - Args: - field: FieldDescriptor object for this field. - - Returns: - An instance of TypeChecker which can be used to verify the types - of values assigned to a field of the specified type. - """ - if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and - field.type == _FieldDescriptor.TYPE_STRING): - return UnicodeValueChecker() - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - if SupportsOpenEnums(field): - # When open enums are supported, any int32 can be assigned. - return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] - else: - return EnumValueChecker(field.enum_type) - return _VALUE_CHECKERS[field.cpp_type] - - -# None of the typecheckers below make any attempt to guard against people -# subclassing builtin types and doing weird things. We're not trying to -# protect against malicious clients here, just people accidentally shooting -# themselves in the foot in obvious ways. -class TypeChecker(object): - - """Type checker used to catch type errors as early as possible - when the client is setting scalar fields in protocol messages. - """ - - def __init__(self, *acceptable_types): - self._acceptable_types = acceptable_types - - def CheckValue(self, proposed_value): - """Type check the provided value and return it. - - The returned value might have been normalized to another type. - """ - if not isinstance(proposed_value, self._acceptable_types): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), self._acceptable_types)) - raise TypeError(message) - return proposed_value - - -class TypeCheckerWithDefault(TypeChecker): - - def __init__(self, default_value, *acceptable_types): - TypeChecker.__init__(self, *acceptable_types) - self._default_value = default_value - - def DefaultValue(self): - return self._default_value - - -class BoolValueChecker(object): - """Type checker used for bool fields.""" - - def CheckValue(self, proposed_value): - if not hasattr(proposed_value, '__index__') or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (bool, int))) - raise TypeError(message) - return bool(proposed_value) - - def DefaultValue(self): - return False - - -# IntValueChecker and its subclasses perform integer type-checks -# and bounds-checks. -class IntValueChecker(object): - - """Checker used for integer fields. Performs type-check and range check.""" - - def CheckValue(self, proposed_value): - if not hasattr(proposed_value, '__index__') or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (int,))) - raise TypeError(message) - - if not self._MIN <= int(proposed_value) <= self._MAX: - raise ValueError('Value out of range: %d' % proposed_value) - # We force all values to int to make alternate implementations where the - # distinction is more significant (e.g. the C++ implementation) simpler. - proposed_value = int(proposed_value) - return proposed_value - - def DefaultValue(self): - return 0 - - -class EnumValueChecker(object): - - """Checker used for enum fields. Performs type-check and range check.""" - - def __init__(self, enum_type): - self._enum_type = enum_type - - def CheckValue(self, proposed_value): - if not isinstance(proposed_value, numbers.Integral): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (int,))) - raise TypeError(message) - if int(proposed_value) not in self._enum_type.values_by_number: - raise ValueError('Unknown enum value: %d' % proposed_value) - return proposed_value - - def DefaultValue(self): - return self._enum_type.values[0].number - - -class UnicodeValueChecker(object): - - """Checker used for string fields. - - Always returns a unicode value, even if the input is of type str. - """ - - def CheckValue(self, proposed_value): - if not isinstance(proposed_value, (bytes, str)): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (bytes, str))) - raise TypeError(message) - - # If the value is of type 'bytes' make sure that it is valid UTF-8 data. - if isinstance(proposed_value, bytes): - try: - proposed_value = proposed_value.decode('utf-8') - except UnicodeDecodeError: - raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' - 'encoding. Non-UTF-8 strings must be converted to ' - 'unicode objects before being added.' % - (proposed_value)) - else: - try: - proposed_value.encode('utf8') - except UnicodeEncodeError: - raise ValueError('%.1024r isn\'t a valid unicode string and ' - 'can\'t be encoded in UTF-8.'% - (proposed_value)) - - return proposed_value - - def DefaultValue(self): - return u"" - - -class Int32ValueChecker(IntValueChecker): - # We're sure to use ints instead of longs here since comparison may be more - # efficient. - _MIN = -2147483648 - _MAX = 2147483647 - - -class Uint32ValueChecker(IntValueChecker): - _MIN = 0 - _MAX = (1 << 32) - 1 - - -class Int64ValueChecker(IntValueChecker): - _MIN = -(1 << 63) - _MAX = (1 << 63) - 1 - - -class Uint64ValueChecker(IntValueChecker): - _MIN = 0 - _MAX = (1 << 64) - 1 - - -# The max 4 bytes float is about 3.4028234663852886e+38 -_FLOAT_MAX = float.fromhex('0x1.fffffep+127') -_FLOAT_MIN = -_FLOAT_MAX -_INF = float('inf') -_NEG_INF = float('-inf') - - -class DoubleValueChecker(object): - """Checker used for double fields. - - Performs type-check and range check. - """ - - def CheckValue(self, proposed_value): - """Check and convert proposed_value to float.""" - if (not hasattr(proposed_value, '__float__') and - not hasattr(proposed_value, '__index__')) or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: int, float' % - (proposed_value, type(proposed_value))) - raise TypeError(message) - return float(proposed_value) - - def DefaultValue(self): - return 0.0 - - -class FloatValueChecker(DoubleValueChecker): - """Checker used for float fields. - - Performs type-check and range check. - - Values exceeding a 32-bit float will be converted to inf/-inf. - """ - - def CheckValue(self, proposed_value): - """Check and convert proposed_value to float.""" - converted_value = super().CheckValue(proposed_value) - # This inf rounding matches the C++ proto SafeDoubleToFloat logic. - if converted_value > _FLOAT_MAX: - return _INF - if converted_value < _FLOAT_MIN: - return _NEG_INF - - return TruncateToFourByteFloat(converted_value) - -# Type-checkers for all scalar CPPTYPEs. -_VALUE_CHECKERS = { - _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), - _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), - _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), - _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), - _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), - _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), - _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), - _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), -} - - -# Map from field type to a function F, such that F(field_num, value) -# gives the total byte size for a value of the given type. This -# byte size includes tag information and any other additional space -# associated with serializing "value". -TYPE_TO_BYTE_SIZE_FN = { - _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, - _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, - _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, - _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, - _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, - _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, - _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, - _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, - _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, - _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, - _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, - _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, - _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, - _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, - _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, - _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, - _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, - _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize - } - - -# Maps from field types to encoder constructors. -TYPE_TO_ENCODER = { - _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, - _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, - _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, - _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, - _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, - _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, - _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, - _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, - _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, - _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, - _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, - _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, - _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, - _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, - _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, - _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, - _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, - _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, - } - - -# Maps from field types to sizer constructors. -TYPE_TO_SIZER = { - _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, - _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, - _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, - _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, - _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, - _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, - _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, - _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, - _FieldDescriptor.TYPE_STRING: encoder.StringSizer, - _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, - _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, - _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, - _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, - _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, - _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, - _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, - _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, - _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, - } - - -# Maps from field type to a decoder constructor. -TYPE_TO_DECODER = { - _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, - _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, - _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, - _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, - _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, - _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, - _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, - _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, - _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, - _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, - _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, - _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, - _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, - _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, - _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, - _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, - _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, - _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, - } - -# Maps from field type to expected wiretype. -FIELD_TYPE_TO_WIRE_TYPE = { - _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_STRING: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, - _FieldDescriptor.TYPE_MESSAGE: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_BYTES: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, - } diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/well_known_types.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/well_known_types.py deleted file mode 100644 index b581ab750a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/well_known_types.py +++ /dev/null @@ -1,878 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains well known classes. - -This files defines well known classes which need extra maintenance including: - - Any - - Duration - - FieldMask - - Struct - - Timestamp -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - -import calendar -import collections.abc -import datetime - -from google.protobuf.descriptor import FieldDescriptor - -_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' -_NANOS_PER_SECOND = 1000000000 -_NANOS_PER_MILLISECOND = 1000000 -_NANOS_PER_MICROSECOND = 1000 -_MILLIS_PER_SECOND = 1000 -_MICROS_PER_SECOND = 1000000 -_SECONDS_PER_DAY = 24 * 3600 -_DURATION_SECONDS_MAX = 315576000000 - - -class Any(object): - """Class for Any Message type.""" - - __slots__ = () - - def Pack(self, msg, type_url_prefix='type.googleapis.com/', - deterministic=None): - """Packs the specified message into current Any message.""" - if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': - self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) - else: - self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) - self.value = msg.SerializeToString(deterministic=deterministic) - - def Unpack(self, msg): - """Unpacks the current Any message into specified message.""" - descriptor = msg.DESCRIPTOR - if not self.Is(descriptor): - return False - msg.ParseFromString(self.value) - return True - - def TypeName(self): - """Returns the protobuf type name of the inner message.""" - # Only last part is to be used: b/25630112 - return self.type_url.split('/')[-1] - - def Is(self, descriptor): - """Checks if this Any represents the given protobuf type.""" - return '/' in self.type_url and self.TypeName() == descriptor.full_name - - -_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) -_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( - 0, tz=datetime.timezone.utc) - - -class Timestamp(object): - """Class for Timestamp message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts Timestamp to RFC 3339 date string format. - - Returns: - A string converted from timestamp. The string is always Z-normalized - and uses 3, 6 or 9 fractional digits as required to represent the - exact time. Example of the return format: '1972-01-01T10:00:20.021Z' - """ - nanos = self.nanos % _NANOS_PER_SECOND - total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND - seconds = total_sec % _SECONDS_PER_DAY - days = (total_sec - seconds) // _SECONDS_PER_DAY - dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) - - result = dt.isoformat() - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return result + 'Z' - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return result + '.%03dZ' % (nanos / 1e6) - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return result + '.%06dZ' % (nanos / 1e3) - # Serialize 9 fractional digits. - return result + '.%09dZ' % nanos - - def FromJsonString(self, value): - """Parse a RFC 3339 date string format to Timestamp. - - Args: - value: A date string. Any fractional digits (or none) and any offset are - accepted as long as they fit into nano-seconds precision. - Example of accepted format: '1972-01-01T10:00:20.021-05:00' - - Raises: - ValueError: On parsing problems. - """ - if not isinstance(value, str): - raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) - timezone_offset = value.find('Z') - if timezone_offset == -1: - timezone_offset = value.find('+') - if timezone_offset == -1: - timezone_offset = value.rfind('-') - if timezone_offset == -1: - raise ValueError( - 'Failed to parse timestamp: missing valid timezone offset.') - time_value = value[0:timezone_offset] - # Parse datetime and nanos. - point_position = time_value.find('.') - if point_position == -1: - second_value = time_value - nano_value = '' - else: - second_value = time_value[:point_position] - nano_value = time_value[point_position + 1:] - if 't' in second_value: - raise ValueError( - 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' - 'lowercase \'t\' is not accepted'.format(second_value)) - date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) - td = date_object - datetime.datetime(1970, 1, 1) - seconds = td.seconds + td.days * _SECONDS_PER_DAY - if len(nano_value) > 9: - raise ValueError( - 'Failed to parse Timestamp: nanos {0} more than ' - '9 fractional digits.'.format(nano_value)) - if nano_value: - nanos = round(float('0.' + nano_value) * 1e9) - else: - nanos = 0 - # Parse timezone offsets. - if value[timezone_offset] == 'Z': - if len(value) != timezone_offset + 1: - raise ValueError('Failed to parse timestamp: invalid trailing' - ' data {0}.'.format(value)) - else: - timezone = value[timezone_offset:] - pos = timezone.find(':') - if pos == -1: - raise ValueError( - 'Invalid timezone offset value: {0}.'.format(timezone)) - if timezone[0] == '+': - seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 - else: - seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 - # Set seconds and nanos - self.seconds = int(seconds) - self.nanos = int(nanos) - - def GetCurrentTime(self): - """Get the current UTC into Timestamp.""" - self.FromDatetime(datetime.datetime.utcnow()) - - def ToNanoseconds(self): - """Converts Timestamp to nanoseconds since epoch.""" - return self.seconds * _NANOS_PER_SECOND + self.nanos - - def ToMicroseconds(self): - """Converts Timestamp to microseconds since epoch.""" - return (self.seconds * _MICROS_PER_SECOND + - self.nanos // _NANOS_PER_MICROSECOND) - - def ToMilliseconds(self): - """Converts Timestamp to milliseconds since epoch.""" - return (self.seconds * _MILLIS_PER_SECOND + - self.nanos // _NANOS_PER_MILLISECOND) - - def ToSeconds(self): - """Converts Timestamp to seconds since epoch.""" - return self.seconds - - def FromNanoseconds(self, nanos): - """Converts nanoseconds since epoch to Timestamp.""" - self.seconds = nanos // _NANOS_PER_SECOND - self.nanos = nanos % _NANOS_PER_SECOND - - def FromMicroseconds(self, micros): - """Converts microseconds since epoch to Timestamp.""" - self.seconds = micros // _MICROS_PER_SECOND - self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND - - def FromMilliseconds(self, millis): - """Converts milliseconds since epoch to Timestamp.""" - self.seconds = millis // _MILLIS_PER_SECOND - self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND - - def FromSeconds(self, seconds): - """Converts seconds since epoch to Timestamp.""" - self.seconds = seconds - self.nanos = 0 - - def ToDatetime(self, tzinfo=None): - """Converts Timestamp to a datetime. - - Args: - tzinfo: A datetime.tzinfo subclass; defaults to None. - - Returns: - If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone - information, i.e. not aware that it's UTC). - - Otherwise, returns a timezone-aware datetime in the input timezone. - """ - delta = datetime.timedelta( - seconds=self.seconds, - microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) - if tzinfo is None: - return _EPOCH_DATETIME_NAIVE + delta - else: - return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta - - def FromDatetime(self, dt): - """Converts datetime to Timestamp. - - Args: - dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. - """ - # Using this guide: http://wiki.python.org/moin/WorkingWithTime - # And this conversion guide: http://docs.python.org/library/time.html - - # Turn the date parameter into a tuple (struct_time) that can then be - # manipulated into a long value of seconds. During the conversion from - # struct_time to long, the source date in UTC, and so it follows that the - # correct transformation is calendar.timegm() - self.seconds = calendar.timegm(dt.utctimetuple()) - self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND - - -class Duration(object): - """Class for Duration message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts Duration to string format. - - Returns: - A string converted from self. The string format will contains - 3, 6, or 9 fractional digits depending on the precision required to - represent the exact Duration value. For example: "1s", "1.010s", - "1.000000100s", "-3.100s" - """ - _CheckDurationValid(self.seconds, self.nanos) - if self.seconds < 0 or self.nanos < 0: - result = '-' - seconds = - self.seconds + int((0 - self.nanos) // 1e9) - nanos = (0 - self.nanos) % 1e9 - else: - result = '' - seconds = self.seconds + int(self.nanos // 1e9) - nanos = self.nanos % 1e9 - result += '%d' % seconds - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return result + 's' - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return result + '.%03ds' % (nanos / 1e6) - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return result + '.%06ds' % (nanos / 1e3) - # Serialize 9 fractional digits. - return result + '.%09ds' % nanos - - def FromJsonString(self, value): - """Converts a string to Duration. - - Args: - value: A string to be converted. The string must end with 's'. Any - fractional digits (or none) are accepted as long as they fit into - precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s - - Raises: - ValueError: On parsing problems. - """ - if not isinstance(value, str): - raise ValueError('Duration JSON value not a string: {!r}'.format(value)) - if len(value) < 1 or value[-1] != 's': - raise ValueError( - 'Duration must end with letter "s": {0}.'.format(value)) - try: - pos = value.find('.') - if pos == -1: - seconds = int(value[:-1]) - nanos = 0 - else: - seconds = int(value[:pos]) - if value[0] == '-': - nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) - else: - nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) - _CheckDurationValid(seconds, nanos) - self.seconds = seconds - self.nanos = nanos - except ValueError as e: - raise ValueError( - 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) - - def ToNanoseconds(self): - """Converts a Duration to nanoseconds.""" - return self.seconds * _NANOS_PER_SECOND + self.nanos - - def ToMicroseconds(self): - """Converts a Duration to microseconds.""" - micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) - return self.seconds * _MICROS_PER_SECOND + micros - - def ToMilliseconds(self): - """Converts a Duration to milliseconds.""" - millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) - return self.seconds * _MILLIS_PER_SECOND + millis - - def ToSeconds(self): - """Converts a Duration to seconds.""" - return self.seconds - - def FromNanoseconds(self, nanos): - """Converts nanoseconds to Duration.""" - self._NormalizeDuration(nanos // _NANOS_PER_SECOND, - nanos % _NANOS_PER_SECOND) - - def FromMicroseconds(self, micros): - """Converts microseconds to Duration.""" - self._NormalizeDuration( - micros // _MICROS_PER_SECOND, - (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) - - def FromMilliseconds(self, millis): - """Converts milliseconds to Duration.""" - self._NormalizeDuration( - millis // _MILLIS_PER_SECOND, - (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) - - def FromSeconds(self, seconds): - """Converts seconds to Duration.""" - self.seconds = seconds - self.nanos = 0 - - def ToTimedelta(self): - """Converts Duration to timedelta.""" - return datetime.timedelta( - seconds=self.seconds, microseconds=_RoundTowardZero( - self.nanos, _NANOS_PER_MICROSECOND)) - - def FromTimedelta(self, td): - """Converts timedelta to Duration.""" - self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, - td.microseconds * _NANOS_PER_MICROSECOND) - - def _NormalizeDuration(self, seconds, nanos): - """Set Duration by seconds and nanos.""" - # Force nanos to be negative if the duration is negative. - if seconds < 0 and nanos > 0: - seconds += 1 - nanos -= _NANOS_PER_SECOND - self.seconds = seconds - self.nanos = nanos - - -def _CheckDurationValid(seconds, nanos): - if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: - raise ValueError( - 'Duration is not valid: Seconds {0} must be in range ' - '[-315576000000, 315576000000].'.format(seconds)) - if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: - raise ValueError( - 'Duration is not valid: Nanos {0} must be in range ' - '[-999999999, 999999999].'.format(nanos)) - if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): - raise ValueError( - 'Duration is not valid: Sign mismatch.') - - -def _RoundTowardZero(value, divider): - """Truncates the remainder part after division.""" - # For some languages, the sign of the remainder is implementation - # dependent if any of the operands is negative. Here we enforce - # "rounded toward zero" semantics. For example, for (-5) / 2 an - # implementation may give -3 as the result with the remainder being - # 1. This function ensures we always return -2 (closer to zero). - result = value // divider - remainder = value % divider - if result < 0 and remainder > 0: - return result + 1 - else: - return result - - -class FieldMask(object): - """Class for FieldMask message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts FieldMask to string according to proto3 JSON spec.""" - camelcase_paths = [] - for path in self.paths: - camelcase_paths.append(_SnakeCaseToCamelCase(path)) - return ','.join(camelcase_paths) - - def FromJsonString(self, value): - """Converts string to FieldMask according to proto3 JSON spec.""" - if not isinstance(value, str): - raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) - self.Clear() - if value: - for path in value.split(','): - self.paths.append(_CamelCaseToSnakeCase(path)) - - def IsValidForDescriptor(self, message_descriptor): - """Checks whether the FieldMask is valid for Message Descriptor.""" - for path in self.paths: - if not _IsValidPath(message_descriptor, path): - return False - return True - - def AllFieldsFromDescriptor(self, message_descriptor): - """Gets all direct fields of Message Descriptor to FieldMask.""" - self.Clear() - for field in message_descriptor.fields: - self.paths.append(field.name) - - def CanonicalFormFromMask(self, mask): - """Converts a FieldMask to the canonical form. - - Removes paths that are covered by another path. For example, - "foo.bar" is covered by "foo" and will be removed if "foo" - is also in the FieldMask. Then sorts all paths in alphabetical order. - - Args: - mask: The original FieldMask to be converted. - """ - tree = _FieldMaskTree(mask) - tree.ToFieldMask(self) - - def Union(self, mask1, mask2): - """Merges mask1 and mask2 into this FieldMask.""" - _CheckFieldMaskMessage(mask1) - _CheckFieldMaskMessage(mask2) - tree = _FieldMaskTree(mask1) - tree.MergeFromFieldMask(mask2) - tree.ToFieldMask(self) - - def Intersect(self, mask1, mask2): - """Intersects mask1 and mask2 into this FieldMask.""" - _CheckFieldMaskMessage(mask1) - _CheckFieldMaskMessage(mask2) - tree = _FieldMaskTree(mask1) - intersection = _FieldMaskTree() - for path in mask2.paths: - tree.IntersectPath(path, intersection) - intersection.ToFieldMask(self) - - def MergeMessage( - self, source, destination, - replace_message_field=False, replace_repeated_field=False): - """Merges fields specified in FieldMask from source to destination. - - Args: - source: Source message. - destination: The destination message to be merged into. - replace_message_field: Replace message field if True. Merge message - field if False. - replace_repeated_field: Replace repeated field if True. Append - elements of repeated field if False. - """ - tree = _FieldMaskTree(self) - tree.MergeMessage( - source, destination, replace_message_field, replace_repeated_field) - - -def _IsValidPath(message_descriptor, path): - """Checks whether the path is valid for Message Descriptor.""" - parts = path.split('.') - last = parts.pop() - for name in parts: - field = message_descriptor.fields_by_name.get(name) - if (field is None or - field.label == FieldDescriptor.LABEL_REPEATED or - field.type != FieldDescriptor.TYPE_MESSAGE): - return False - message_descriptor = field.message_type - return last in message_descriptor.fields_by_name - - -def _CheckFieldMaskMessage(message): - """Raises ValueError if message is not a FieldMask.""" - message_descriptor = message.DESCRIPTOR - if (message_descriptor.name != 'FieldMask' or - message_descriptor.file.name != 'google/protobuf/field_mask.proto'): - raise ValueError('Message {0} is not a FieldMask.'.format( - message_descriptor.full_name)) - - -def _SnakeCaseToCamelCase(path_name): - """Converts a path name from snake_case to camelCase.""" - result = [] - after_underscore = False - for c in path_name: - if c.isupper(): - raise ValueError( - 'Fail to print FieldMask to Json string: Path name ' - '{0} must not contain uppercase letters.'.format(path_name)) - if after_underscore: - if c.islower(): - result.append(c.upper()) - after_underscore = False - else: - raise ValueError( - 'Fail to print FieldMask to Json string: The ' - 'character after a "_" must be a lowercase letter ' - 'in path name {0}.'.format(path_name)) - elif c == '_': - after_underscore = True - else: - result += c - - if after_underscore: - raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' - 'in path name {0}.'.format(path_name)) - return ''.join(result) - - -def _CamelCaseToSnakeCase(path_name): - """Converts a field name from camelCase to snake_case.""" - result = [] - for c in path_name: - if c == '_': - raise ValueError('Fail to parse FieldMask: Path name ' - '{0} must not contain "_"s.'.format(path_name)) - if c.isupper(): - result += '_' - result += c.lower() - else: - result += c - return ''.join(result) - - -class _FieldMaskTree(object): - """Represents a FieldMask in a tree structure. - - For example, given a FieldMask "foo.bar,foo.baz,bar.baz", - the FieldMaskTree will be: - [_root] -+- foo -+- bar - | | - | +- baz - | - +- bar --- baz - In the tree, each leaf node represents a field path. - """ - - __slots__ = ('_root',) - - def __init__(self, field_mask=None): - """Initializes the tree by FieldMask.""" - self._root = {} - if field_mask: - self.MergeFromFieldMask(field_mask) - - def MergeFromFieldMask(self, field_mask): - """Merges a FieldMask to the tree.""" - for path in field_mask.paths: - self.AddPath(path) - - def AddPath(self, path): - """Adds a field path into the tree. - - If the field path to add is a sub-path of an existing field path - in the tree (i.e., a leaf node), it means the tree already matches - the given path so nothing will be added to the tree. If the path - matches an existing non-leaf node in the tree, that non-leaf node - will be turned into a leaf node with all its children removed because - the path matches all the node's children. Otherwise, a new path will - be added. - - Args: - path: The field path to add. - """ - node = self._root - for name in path.split('.'): - if name not in node: - node[name] = {} - elif not node[name]: - # Pre-existing empty node implies we already have this entire tree. - return - node = node[name] - # Remove any sub-trees we might have had. - node.clear() - - def ToFieldMask(self, field_mask): - """Converts the tree to a FieldMask.""" - field_mask.Clear() - _AddFieldPaths(self._root, '', field_mask) - - def IntersectPath(self, path, intersection): - """Calculates the intersection part of a field path with this tree. - - Args: - path: The field path to calculates. - intersection: The out tree to record the intersection part. - """ - node = self._root - for name in path.split('.'): - if name not in node: - return - elif not node[name]: - intersection.AddPath(path) - return - node = node[name] - intersection.AddLeafNodes(path, node) - - def AddLeafNodes(self, prefix, node): - """Adds leaf nodes begin with prefix to this tree.""" - if not node: - self.AddPath(prefix) - for name in node: - child_path = prefix + '.' + name - self.AddLeafNodes(child_path, node[name]) - - def MergeMessage( - self, source, destination, - replace_message, replace_repeated): - """Merge all fields specified by this tree from source to destination.""" - _MergeMessage( - self._root, source, destination, replace_message, replace_repeated) - - -def _StrConvert(value): - """Converts value to str if it is not.""" - # This file is imported by c extension and some methods like ClearField - # requires string for the field name. py2/py3 has different text - # type and may use unicode. - if not isinstance(value, str): - return value.encode('utf-8') - return value - - -def _MergeMessage( - node, source, destination, replace_message, replace_repeated): - """Merge all fields specified by a sub-tree from source to destination.""" - source_descriptor = source.DESCRIPTOR - for name in node: - child = node[name] - field = source_descriptor.fields_by_name[name] - if field is None: - raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( - name, source_descriptor.full_name)) - if child: - # Sub-paths are only allowed for singular message fields. - if (field.label == FieldDescriptor.LABEL_REPEATED or - field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): - raise ValueError('Error: Field {0} in message {1} is not a singular ' - 'message field and cannot have sub-fields.'.format( - name, source_descriptor.full_name)) - if source.HasField(name): - _MergeMessage( - child, getattr(source, name), getattr(destination, name), - replace_message, replace_repeated) - continue - if field.label == FieldDescriptor.LABEL_REPEATED: - if replace_repeated: - destination.ClearField(_StrConvert(name)) - repeated_source = getattr(source, name) - repeated_destination = getattr(destination, name) - repeated_destination.MergeFrom(repeated_source) - else: - if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - if replace_message: - destination.ClearField(_StrConvert(name)) - if source.HasField(name): - getattr(destination, name).MergeFrom(getattr(source, name)) - else: - setattr(destination, name, getattr(source, name)) - - -def _AddFieldPaths(node, prefix, field_mask): - """Adds the field paths descended from node to field_mask.""" - if not node and prefix: - field_mask.paths.append(prefix) - return - for name in sorted(node): - if prefix: - child_path = prefix + '.' + name - else: - child_path = name - _AddFieldPaths(node[name], child_path, field_mask) - - -def _SetStructValue(struct_value, value): - if value is None: - struct_value.null_value = 0 - elif isinstance(value, bool): - # Note: this check must come before the number check because in Python - # True and False are also considered numbers. - struct_value.bool_value = value - elif isinstance(value, str): - struct_value.string_value = value - elif isinstance(value, (int, float)): - struct_value.number_value = value - elif isinstance(value, (dict, Struct)): - struct_value.struct_value.Clear() - struct_value.struct_value.update(value) - elif isinstance(value, (list, ListValue)): - struct_value.list_value.Clear() - struct_value.list_value.extend(value) - else: - raise ValueError('Unexpected type') - - -def _GetStructValue(struct_value): - which = struct_value.WhichOneof('kind') - if which == 'struct_value': - return struct_value.struct_value - elif which == 'null_value': - return None - elif which == 'number_value': - return struct_value.number_value - elif which == 'string_value': - return struct_value.string_value - elif which == 'bool_value': - return struct_value.bool_value - elif which == 'list_value': - return struct_value.list_value - elif which is None: - raise ValueError('Value not set') - - -class Struct(object): - """Class for Struct message type.""" - - __slots__ = () - - def __getitem__(self, key): - return _GetStructValue(self.fields[key]) - - def __contains__(self, item): - return item in self.fields - - def __setitem__(self, key, value): - _SetStructValue(self.fields[key], value) - - def __delitem__(self, key): - del self.fields[key] - - def __len__(self): - return len(self.fields) - - def __iter__(self): - return iter(self.fields) - - def keys(self): # pylint: disable=invalid-name - return self.fields.keys() - - def values(self): # pylint: disable=invalid-name - return [self[key] for key in self] - - def items(self): # pylint: disable=invalid-name - return [(key, self[key]) for key in self] - - def get_or_create_list(self, key): - """Returns a list for this key, creating if it didn't exist already.""" - if not self.fields[key].HasField('list_value'): - # Clear will mark list_value modified which will indeed create a list. - self.fields[key].list_value.Clear() - return self.fields[key].list_value - - def get_or_create_struct(self, key): - """Returns a struct for this key, creating if it didn't exist already.""" - if not self.fields[key].HasField('struct_value'): - # Clear will mark struct_value modified which will indeed create a struct. - self.fields[key].struct_value.Clear() - return self.fields[key].struct_value - - def update(self, dictionary): # pylint: disable=invalid-name - for key, value in dictionary.items(): - _SetStructValue(self.fields[key], value) - -collections.abc.MutableMapping.register(Struct) - - -class ListValue(object): - """Class for ListValue message type.""" - - __slots__ = () - - def __len__(self): - return len(self.values) - - def append(self, value): - _SetStructValue(self.values.add(), value) - - def extend(self, elem_seq): - for value in elem_seq: - self.append(value) - - def __getitem__(self, index): - """Retrieves item by the specified index.""" - return _GetStructValue(self.values.__getitem__(index)) - - def __setitem__(self, index, value): - _SetStructValue(self.values.__getitem__(index), value) - - def __delitem__(self, key): - del self.values[key] - - def items(self): - for i in range(len(self)): - yield self[i] - - def add_struct(self): - """Appends and returns a struct value as the next value in the list.""" - struct_value = self.values.add().struct_value - # Clear will mark struct_value modified which will indeed create a struct. - struct_value.Clear() - return struct_value - - def add_list(self): - """Appends and returns a list value as the next value in the list.""" - list_value = self.values.add().list_value - # Clear will mark list_value modified which will indeed create a list. - list_value.Clear() - return list_value - -collections.abc.MutableSequence.register(ListValue) - - -WKTBASES = { - 'google.protobuf.Any': Any, - 'google.protobuf.Duration': Duration, - 'google.protobuf.FieldMask': FieldMask, - 'google.protobuf.ListValue': ListValue, - 'google.protobuf.Struct': Struct, - 'google.protobuf.Timestamp': Timestamp, -} diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/wire_format.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/wire_format.py deleted file mode 100644 index 883f525585..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/wire_format.py +++ /dev/null @@ -1,268 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Constants and static functions to support protocol buffer wire format.""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import struct -from google.protobuf import descriptor -from google.protobuf import message - - -TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. -TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 - -# These numbers identify the wire type of a protocol buffer value. -# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded -# tag-and-type to store one of these WIRETYPE_* constants. -# These values must match WireType enum in google/protobuf/wire_format.h. -WIRETYPE_VARINT = 0 -WIRETYPE_FIXED64 = 1 -WIRETYPE_LENGTH_DELIMITED = 2 -WIRETYPE_START_GROUP = 3 -WIRETYPE_END_GROUP = 4 -WIRETYPE_FIXED32 = 5 -_WIRETYPE_MAX = 5 - - -# Bounds for various integer types. -INT32_MAX = int((1 << 31) - 1) -INT32_MIN = int(-(1 << 31)) -UINT32_MAX = (1 << 32) - 1 - -INT64_MAX = (1 << 63) - 1 -INT64_MIN = -(1 << 63) -UINT64_MAX = (1 << 64) - 1 - -# "struct" format strings that will encode/decode the specified formats. -FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) - - -def ZigZagEncode(value): - """ZigZag Transform: Encodes signed integers so that they can be - effectively used with varint encoding. See wire_format.h for - more details. - """ - if value >= 0: - return value << 1 - return (value << 1) ^ (~0) - - -def ZigZagDecode(value): - """Inverse of ZigZagEncode().""" - if not value & 0x1: - return value >> 1 - return (value >> 1) ^ (~0) - - - -# The *ByteSize() functions below return the number of bytes required to -# serialize "field number + type" information and then serialize the value. - - -def Int32ByteSize(field_number, int32): - return Int64ByteSize(field_number, int32) - - -def Int32ByteSizeNoTag(int32): - return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) - - -def Int64ByteSize(field_number, int64): - # Have to convert to uint before calling UInt64ByteSize(). - return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) - - -def UInt32ByteSize(field_number, uint32): - return UInt64ByteSize(field_number, uint32) - - -def UInt64ByteSize(field_number, uint64): - return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) - - -def SInt32ByteSize(field_number, int32): - return UInt32ByteSize(field_number, ZigZagEncode(int32)) - - -def SInt64ByteSize(field_number, int64): - return UInt64ByteSize(field_number, ZigZagEncode(int64)) - - -def Fixed32ByteSize(field_number, fixed32): - return TagByteSize(field_number) + 4 - - -def Fixed64ByteSize(field_number, fixed64): - return TagByteSize(field_number) + 8 - - -def SFixed32ByteSize(field_number, sfixed32): - return TagByteSize(field_number) + 4 - - -def SFixed64ByteSize(field_number, sfixed64): - return TagByteSize(field_number) + 8 - - -def FloatByteSize(field_number, flt): - return TagByteSize(field_number) + 4 - - -def DoubleByteSize(field_number, double): - return TagByteSize(field_number) + 8 - - -def BoolByteSize(field_number, b): - return TagByteSize(field_number) + 1 - - -def EnumByteSize(field_number, enum): - return UInt32ByteSize(field_number, enum) - - -def StringByteSize(field_number, string): - return BytesByteSize(field_number, string.encode('utf-8')) - - -def BytesByteSize(field_number, b): - return (TagByteSize(field_number) - + _VarUInt64ByteSizeNoTag(len(b)) - + len(b)) - - -def GroupByteSize(field_number, message): - return (2 * TagByteSize(field_number) # START and END group. - + message.ByteSize()) - - -def MessageByteSize(field_number, message): - return (TagByteSize(field_number) - + _VarUInt64ByteSizeNoTag(message.ByteSize()) - + message.ByteSize()) - - -def MessageSetItemByteSize(field_number, msg): - # First compute the sizes of the tags. - # There are 2 tags for the beginning and ending of the repeated group, that - # is field number 1, one with field number 2 (type_id) and one with field - # number 3 (message). - total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) - - # Add the number of bytes for type_id. - total_size += _VarUInt64ByteSizeNoTag(field_number) - - message_size = msg.ByteSize() - - # The number of bytes for encoding the length of the message. - total_size += _VarUInt64ByteSizeNoTag(message_size) - - # The size of the message. - total_size += message_size - return total_size - - -def TagByteSize(field_number): - """Returns the bytes required to serialize a tag with this field number.""" - # Just pass in type 0, since the type won't affect the tag+type size. - return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) - - -# Private helper function for the *ByteSize() functions above. - -def _VarUInt64ByteSizeNoTag(uint64): - """Returns the number of bytes required to serialize a single varint - using boundary value comparisons. (unrolled loop optimization -WPierce) - uint64 must be unsigned. - """ - if uint64 <= 0x7f: return 1 - if uint64 <= 0x3fff: return 2 - if uint64 <= 0x1fffff: return 3 - if uint64 <= 0xfffffff: return 4 - if uint64 <= 0x7ffffffff: return 5 - if uint64 <= 0x3ffffffffff: return 6 - if uint64 <= 0x1ffffffffffff: return 7 - if uint64 <= 0xffffffffffffff: return 8 - if uint64 <= 0x7fffffffffffffff: return 9 - if uint64 > UINT64_MAX: - raise message.EncodeError('Value out of range: %d' % uint64) - return 10 - - -NON_PACKABLE_TYPES = ( - descriptor.FieldDescriptor.TYPE_STRING, - descriptor.FieldDescriptor.TYPE_GROUP, - descriptor.FieldDescriptor.TYPE_MESSAGE, - descriptor.FieldDescriptor.TYPE_BYTES -) - - -def IsTypePackable(field_type): - """Return true iff packable = true is valid for fields of this type. - - Args: - field_type: a FieldDescriptor::Type value. - - Returns: - True iff fields of this type are packable. - """ - return field_type not in NON_PACKABLE_TYPES diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/json_format.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/json_format.py deleted file mode 100644 index 5024ed89d7..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/json_format.py +++ /dev/null @@ -1,912 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains routines for printing protocol messages in JSON format. - -Simple usage example: - - # Create a proto object and serialize it to a json format string. - message = my_proto_pb2.MyMessage(foo='bar') - json_string = json_format.MessageToJson(message) - - # Parse a json format string to proto object. - message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - - -import base64 -from collections import OrderedDict -import json -import math -from operator import methodcaller -import re -import sys - -from google.protobuf.internal import type_checkers -from google.protobuf import descriptor -from google.protobuf import symbol_database - - -_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' -_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, - descriptor.FieldDescriptor.CPPTYPE_UINT32, - descriptor.FieldDescriptor.CPPTYPE_INT64, - descriptor.FieldDescriptor.CPPTYPE_UINT64]) -_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, - descriptor.FieldDescriptor.CPPTYPE_UINT64]) -_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, - descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) -_INFINITY = 'Infinity' -_NEG_INFINITY = '-Infinity' -_NAN = 'NaN' - -_UNPAIRED_SURROGATE_PATTERN = re.compile( - u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: - raise ParseError('Message too deep. Max recursion depth is {0}'.format( - self.max_recursion_depth)) - message_descriptor = message.DESCRIPTOR - full_name = message_descriptor.full_name - if not path: - path = message_descriptor.name - if _IsWrapperMessage(message_descriptor): - self._ConvertWrapperMessage(value, message, path) - elif full_name in _WKTJSONMETHODS: - methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) - else: - self._ConvertFieldValuePair(value, message, path) - self.recursion_depth -= 1 - - def _ConvertFieldValuePair(self, js, message, path): - """Convert field value pairs into regular message. - - Args: - js: A JSON object to convert the field value pairs. - message: A regular protocol message to record the data. - path: parent path to log parse error info. - - Raises: - ParseError: In case of problems converting. - """ - names = [] - message_descriptor = message.DESCRIPTOR - fields_by_json_name = dict((f.json_name, f) - for f in message_descriptor.fields) - for name in js: - try: - field = fields_by_json_name.get(name, None) - if not field: - field = message_descriptor.fields_by_name.get(name, None) - if not field and _VALID_EXTENSION_NAME.match(name): - if not message_descriptor.is_extendable: - raise ParseError( - 'Message type {0} does not have extensions at {1}'.format( - message_descriptor.full_name, path)) - identifier = name[1:-1] # strip [] brackets - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(identifier) - # pylint: enable=protected-access - if not field: - # Try looking for extension by the message type name, dropping the - # field name following the final . separator in full_name. - identifier = '.'.join(identifier.split('.')[:-1]) - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(identifier) - # pylint: enable=protected-access - if not field: - if self.ignore_unknown_fields: - continue - raise ParseError( - ('Message type "{0}" has no field named "{1}" at "{2}".\n' - ' Available Fields(except extensions): "{3}"').format( - message_descriptor.full_name, name, path, - [f.json_name for f in message_descriptor.fields])) - if name in names: - raise ParseError('Message type "{0}" should not have multiple ' - '"{1}" fields at "{2}".'.format( - message.DESCRIPTOR.full_name, name, path)) - names.append(name) - value = js[name] - # Check no other oneof field is parsed. - if field.containing_oneof is not None and value is not None: - oneof_name = field.containing_oneof.name - if oneof_name in names: - raise ParseError('Message type "{0}" should not have multiple ' - '"{1}" oneof fields at "{2}".'.format( - message.DESCRIPTOR.full_name, oneof_name, - path)) - names.append(oneof_name) - - if value is None: - if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE - and field.message_type.full_name == 'google.protobuf.Value'): - sub_message = getattr(message, field.name) - sub_message.null_value = 0 - elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM - and field.enum_type.full_name == 'google.protobuf.NullValue'): - setattr(message, field.name, 0) - else: - message.ClearField(field.name) - continue - - # Parse field value. - if _IsMapEntry(field): - message.ClearField(field.name) - self._ConvertMapFieldValue(value, message, field, - '{0}.{1}'.format(path, name)) - elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - message.ClearField(field.name) - if not isinstance(value, list): - raise ParseError('repeated field {0} must be in [] which is ' - '{1} at {2}'.format(name, value, path)) - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - # Repeated message field. - for index, item in enumerate(value): - sub_message = getattr(message, field.name).add() - # None is a null_value in Value. - if (item is None and - sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): - raise ParseError('null is not allowed to be used as an element' - ' in a repeated field at {0}.{1}[{2}]'.format( - path, name, index)) - self.ConvertMessage(item, sub_message, - '{0}.{1}[{2}]'.format(path, name, index)) - else: - # Repeated scalar field. - for index, item in enumerate(value): - if item is None: - raise ParseError('null is not allowed to be used as an element' - ' in a repeated field at {0}.{1}[{2}]'.format( - path, name, index)) - getattr(message, field.name).append( - _ConvertScalarFieldValue( - item, field, '{0}.{1}[{2}]'.format(path, name, index))) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - if field.is_extension: - sub_message = message.Extensions[field] - else: - sub_message = getattr(message, field.name) - sub_message.SetInParent() - self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) - else: - if field.is_extension: - message.Extensions[field] = _ConvertScalarFieldValue( - value, field, '{0}.{1}'.format(path, name)) - else: - setattr( - message, field.name, - _ConvertScalarFieldValue(value, field, - '{0}.{1}'.format(path, name))) - except ParseError as e: - if field and field.containing_oneof is None: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - else: - raise ParseError(str(e)) - except ValueError as e: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - except TypeError as e: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - - def _ConvertAnyMessage(self, value, message, path): - """Convert a JSON representation into Any message.""" - if isinstance(value, dict) and not value: - return - try: - type_url = value['@type'] - except KeyError: - raise ParseError( - '@type is missing when parsing any message at {0}'.format(path)) - - try: - sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) - except TypeError as e: - raise ParseError('{0} at {1}'.format(e, path)) - message_descriptor = sub_message.DESCRIPTOR - full_name = message_descriptor.full_name - if _IsWrapperMessage(message_descriptor): - self._ConvertWrapperMessage(value['value'], sub_message, - '{0}.value'.format(path)) - elif full_name in _WKTJSONMETHODS: - methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, - '{0}.value'.format(path))( - self) - else: - del value['@type'] - self._ConvertFieldValuePair(value, sub_message, path) - value['@type'] = type_url - # Sets Any message - message.value = sub_message.SerializeToString() - message.type_url = type_url - - def _ConvertGenericMessage(self, value, message, path): - """Convert a JSON representation into message with FromJsonString.""" - # Duration, Timestamp, FieldMask have a FromJsonString method to do the - # conversion. Users can also call the method directly. - try: - message.FromJsonString(value) - except ValueError as e: - raise ParseError('{0} at {1}'.format(e, path)) - - def _ConvertValueMessage(self, value, message, path): - """Convert a JSON representation into Value message.""" - if isinstance(value, dict): - self._ConvertStructMessage(value, message.struct_value, path) - elif isinstance(value, list): - self._ConvertListValueMessage(value, message.list_value, path) - elif value is None: - message.null_value = 0 - elif isinstance(value, bool): - message.bool_value = value - elif isinstance(value, str): - message.string_value = value - elif isinstance(value, _INT_OR_FLOAT): - message.number_value = value - else: - raise ParseError('Value {0} has unexpected type {1} at {2}'.format( - value, type(value), path)) - - def _ConvertListValueMessage(self, value, message, path): - """Convert a JSON representation into ListValue message.""" - if not isinstance(value, list): - raise ParseError('ListValue must be in [] which is {0} at {1}'.format( - value, path)) - message.ClearField('values') - for index, item in enumerate(value): - self._ConvertValueMessage(item, message.values.add(), - '{0}[{1}]'.format(path, index)) - - def _ConvertStructMessage(self, value, message, path): - """Convert a JSON representation into Struct message.""" - if not isinstance(value, dict): - raise ParseError('Struct must be in a dict which is {0} at {1}'.format( - value, path)) - # Clear will mark the struct as modified so it will be created even if - # there are no values. - message.Clear() - for key in value: - self._ConvertValueMessage(value[key], message.fields[key], - '{0}.{1}'.format(path, key)) - return - - def _ConvertWrapperMessage(self, value, message, path): - """Convert a JSON representation into Wrapper message.""" - field = message.DESCRIPTOR.fields_by_name['value'] - setattr( - message, 'value', - _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) - - def _ConvertMapFieldValue(self, value, message, field, path): - """Convert map field value for a message map field. - - Args: - value: A JSON object to convert the map field value. - message: A protocol message to record the converted data. - field: The descriptor of the map field to be converted. - path: parent path to log parse error info. - - Raises: - ParseError: In case of convert problems. - """ - if not isinstance(value, dict): - raise ParseError( - 'Map field {0} must be in a dict which is {1} at {2}'.format( - field.name, value, path)) - key_field = field.message_type.fields_by_name['key'] - value_field = field.message_type.fields_by_name['value'] - for key in value: - key_value = _ConvertScalarFieldValue(key, key_field, - '{0}.key'.format(path), True) - if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - self.ConvertMessage(value[key], - getattr(message, field.name)[key_value], - '{0}[{1}]'.format(path, key_value)) - else: - getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( - value[key], value_field, path='{0}[{1}]'.format(path, key_value)) - - -def _ConvertScalarFieldValue(value, field, path, require_str=False): - """Convert a single scalar field value. - - Args: - value: A scalar value to convert the scalar field value. - field: The descriptor of the field to convert. - path: parent path to log parse error info. - require_str: If True, the field value must be a str. - - Returns: - The converted scalar field value - - Raises: - ParseError: In case of convert problems. - """ - try: - if field.cpp_type in _INT_TYPES: - return _ConvertInteger(value) - elif field.cpp_type in _FLOAT_TYPES: - return _ConvertFloat(value, field) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: - return _ConvertBool(value, require_str) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: - if field.type == descriptor.FieldDescriptor.TYPE_BYTES: - if isinstance(value, str): - encoded = value.encode('utf-8') - else: - encoded = value - # Add extra padding '=' - padded_value = encoded + b'=' * (4 - len(encoded) % 4) - return base64.urlsafe_b64decode(padded_value) - else: - # Checking for unpaired surrogates appears to be unreliable, - # depending on the specific Python version, so we check manually. - if _UNPAIRED_SURROGATE_PATTERN.search(value): - raise ParseError('Unpaired surrogate') - return value - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: - # Convert an enum value. - enum_value = field.enum_type.values_by_name.get(value, None) - if enum_value is None: - try: - number = int(value) - enum_value = field.enum_type.values_by_number.get(number, None) - except ValueError: - raise ParseError('Invalid enum value {0} for enum type {1}'.format( - value, field.enum_type.full_name)) - if enum_value is None: - if field.file.syntax == 'proto3': - # Proto3 accepts unknown enums. - return number - raise ParseError('Invalid enum value {0} for enum type {1}'.format( - value, field.enum_type.full_name)) - return enum_value.number - except ParseError as e: - raise ParseError('{0} at {1}'.format(e, path)) - - -def _ConvertInteger(value): - """Convert an integer. - - Args: - value: A scalar value to convert. - - Returns: - The integer value. - - Raises: - ParseError: If an integer couldn't be consumed. - """ - if isinstance(value, float) and not value.is_integer(): - raise ParseError('Couldn\'t parse integer: {0}'.format(value)) - - if isinstance(value, str) and value.find(' ') != -1: - raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) - - if isinstance(value, bool): - raise ParseError('Bool value {0} is not acceptable for ' - 'integer field'.format(value)) - - return int(value) - - -def _ConvertFloat(value, field): - """Convert an floating point number.""" - if isinstance(value, float): - if math.isnan(value): - raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') - if math.isinf(value): - if value > 0: - raise ParseError('Couldn\'t parse Infinity or value too large, ' - 'use quoted "Infinity" instead') - else: - raise ParseError('Couldn\'t parse -Infinity or value too small, ' - 'use quoted "-Infinity" instead') - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: - # pylint: disable=protected-access - if value > type_checkers._FLOAT_MAX: - raise ParseError('Float value too large') - # pylint: disable=protected-access - if value < type_checkers._FLOAT_MIN: - raise ParseError('Float value too small') - if value == 'nan': - raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') - try: - # Assume Python compatible syntax. - return float(value) - except ValueError: - # Check alternative spellings. - if value == _NEG_INFINITY: - return float('-inf') - elif value == _INFINITY: - return float('inf') - elif value == _NAN: - return float('nan') - else: - raise ParseError('Couldn\'t parse float: {0}'.format(value)) - - -def _ConvertBool(value, require_str): - """Convert a boolean value. - - Args: - value: A scalar value to convert. - require_str: If True, value must be a str. - - Returns: - The bool parsed. - - Raises: - ParseError: If a boolean value couldn't be consumed. - """ - if require_str: - if value == 'true': - return True - elif value == 'false': - return False - else: - raise ParseError('Expected "true" or "false", not {0}'.format(value)) - - if not isinstance(value, bool): - raise ParseError('Expected true or false without quotes') - return value - -_WKTJSONMETHODS = { - 'google.protobuf.Any': ['_AnyMessageToJsonObject', - '_ConvertAnyMessage'], - 'google.protobuf.Duration': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', - '_ConvertListValueMessage'], - 'google.protobuf.Struct': ['_StructMessageToJsonObject', - '_ConvertStructMessage'], - 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.Value': ['_ValueMessageToJsonObject', - '_ConvertValueMessage'] -} diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message.py deleted file mode 100644 index 76c6802f70..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message.py +++ /dev/null @@ -1,424 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# TODO(robinson): We should just make these methods all "pure-virtual" and move -# all implementation out, into reflection.py for now. - - -"""Contains an abstract base class for protocol messages.""" - -__author__ = 'robinson@google.com (Will Robinson)' - -class Error(Exception): - """Base error type for this module.""" - pass - - -class DecodeError(Error): - """Exception raised when deserializing messages.""" - pass - - -class EncodeError(Error): - """Exception raised when serializing messages.""" - pass - - -class Message(object): - - """Abstract base class for protocol messages. - - Protocol message classes are almost always generated by the protocol - compiler. These generated types subclass Message and implement the methods - shown below. - """ - - # TODO(robinson): Link to an HTML document here. - - # TODO(robinson): Document that instances of this class will also - # have an Extensions attribute with __getitem__ and __setitem__. - # Again, not sure how to best convey this. - - # TODO(robinson): Document that the class must also have a static - # RegisterExtension(extension_field) method. - # Not sure how to best express at this point. - - # TODO(robinson): Document these fields and methods. - - __slots__ = [] - - #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. - DESCRIPTOR = None - - def __deepcopy__(self, memo=None): - clone = type(self)() - clone.MergeFrom(self) - return clone - - def __eq__(self, other_msg): - """Recursively compares two messages by value and structure.""" - raise NotImplementedError - - def __ne__(self, other_msg): - # Can't just say self != other_msg, since that would infinitely recurse. :) - return not self == other_msg - - def __hash__(self): - raise TypeError('unhashable object') - - def __str__(self): - """Outputs a human-readable representation of the message.""" - raise NotImplementedError - - def __unicode__(self): - """Outputs a human-readable representation of the message.""" - raise NotImplementedError - - def MergeFrom(self, other_msg): - """Merges the contents of the specified message into current message. - - This method merges the contents of the specified message into the current - message. Singular fields that are set in the specified message overwrite - the corresponding fields in the current message. Repeated fields are - appended. Singular sub-messages and groups are recursively merged. - - Args: - other_msg (Message): A message to merge into the current message. - """ - raise NotImplementedError - - def CopyFrom(self, other_msg): - """Copies the content of the specified message into the current message. - - The method clears the current message and then merges the specified - message using MergeFrom. - - Args: - other_msg (Message): A message to copy into the current one. - """ - if self is other_msg: - return - self.Clear() - self.MergeFrom(other_msg) - - def Clear(self): - """Clears all data that was set in the message.""" - raise NotImplementedError - - def SetInParent(self): - """Mark this as present in the parent. - - This normally happens automatically when you assign a field of a - sub-message, but sometimes you want to make the sub-message - present while keeping it empty. If you find yourself using this, - you may want to reconsider your design. - """ - raise NotImplementedError - - def IsInitialized(self): - """Checks if the message is initialized. - - Returns: - bool: The method returns True if the message is initialized (i.e. all of - its required fields are set). - """ - raise NotImplementedError - - # TODO(robinson): MergeFromString() should probably return None and be - # implemented in terms of a helper that returns the # of bytes read. Our - # deserialization routines would use the helper when recursively - # deserializing, but the end user would almost always just want the no-return - # MergeFromString(). - - def MergeFromString(self, serialized): - """Merges serialized protocol buffer data into this message. - - When we find a field in `serialized` that is already present - in this message: - - - If it's a "repeated" field, we append to the end of our list. - - Else, if it's a scalar, we overwrite our field. - - Else, (it's a nonrepeated composite), we recursively merge - into the existing composite. - - Args: - serialized (bytes): Any object that allows us to call - ``memoryview(serialized)`` to access a string of bytes using the - buffer interface. - - Returns: - int: The number of bytes read from `serialized`. - For non-group messages, this will always be `len(serialized)`, - but for messages which are actually groups, this will - generally be less than `len(serialized)`, since we must - stop when we reach an ``END_GROUP`` tag. Note that if - we *do* stop because of an ``END_GROUP`` tag, the number - of bytes returned does not include the bytes - for the ``END_GROUP`` tag information. - - Raises: - DecodeError: if the input cannot be parsed. - """ - # TODO(robinson): Document handling of unknown fields. - # TODO(robinson): When we switch to a helper, this will return None. - raise NotImplementedError - - def ParseFromString(self, serialized): - """Parse serialized protocol buffer data into this message. - - Like :func:`MergeFromString()`, except we clear the object first. - - Raises: - message.DecodeError if the input cannot be parsed. - """ - self.Clear() - return self.MergeFromString(serialized) - - def SerializeToString(self, **kwargs): - """Serializes the protocol message to a binary string. - - Keyword Args: - deterministic (bool): If true, requests deterministic serialization - of the protobuf, with predictable ordering of map keys. - - Returns: - A binary string representation of the message if all of the required - fields in the message are set (i.e. the message is initialized). - - Raises: - EncodeError: if the message isn't initialized (see :func:`IsInitialized`). - """ - raise NotImplementedError - - def SerializePartialToString(self, **kwargs): - """Serializes the protocol message to a binary string. - - This method is similar to SerializeToString but doesn't check if the - message is initialized. - - Keyword Args: - deterministic (bool): If true, requests deterministic serialization - of the protobuf, with predictable ordering of map keys. - - Returns: - bytes: A serialized representation of the partial message. - """ - raise NotImplementedError - - # TODO(robinson): Decide whether we like these better - # than auto-generated has_foo() and clear_foo() methods - # on the instances themselves. This way is less consistent - # with C++, but it makes reflection-type access easier and - # reduces the number of magically autogenerated things. - # - # TODO(robinson): Be sure to document (and test) exactly - # which field names are accepted here. Are we case-sensitive? - # What do we do with fields that share names with Python keywords - # like 'lambda' and 'yield'? - # - # nnorwitz says: - # """ - # Typically (in python), an underscore is appended to names that are - # keywords. So they would become lambda_ or yield_. - # """ - def ListFields(self): - """Returns a list of (FieldDescriptor, value) tuples for present fields. - - A message field is non-empty if HasField() would return true. A singular - primitive field is non-empty if HasField() would return true in proto2 or it - is non zero in proto3. A repeated field is non-empty if it contains at least - one element. The fields are ordered by field number. - - Returns: - list[tuple(FieldDescriptor, value)]: field descriptors and values - for all fields in the message which are not empty. The values vary by - field type. - """ - raise NotImplementedError - - def HasField(self, field_name): - """Checks if a certain field is set for the message. - - For a oneof group, checks if any field inside is set. Note that if the - field_name is not defined in the message descriptor, :exc:`ValueError` will - be raised. - - Args: - field_name (str): The name of the field to check for presence. - - Returns: - bool: Whether a value has been set for the named field. - - Raises: - ValueError: if the `field_name` is not a member of this message. - """ - raise NotImplementedError - - def ClearField(self, field_name): - """Clears the contents of a given field. - - Inside a oneof group, clears the field set. If the name neither refers to a - defined field or oneof group, :exc:`ValueError` is raised. - - Args: - field_name (str): The name of the field to check for presence. - - Raises: - ValueError: if the `field_name` is not a member of this message. - """ - raise NotImplementedError - - def WhichOneof(self, oneof_group): - """Returns the name of the field that is set inside a oneof group. - - If no field is set, returns None. - - Args: - oneof_group (str): the name of the oneof group to check. - - Returns: - str or None: The name of the group that is set, or None. - - Raises: - ValueError: no group with the given name exists - """ - raise NotImplementedError - - def HasExtension(self, extension_handle): - """Checks if a certain extension is present for this message. - - Extensions are retrieved using the :attr:`Extensions` mapping (if present). - - Args: - extension_handle: The handle for the extension to check. - - Returns: - bool: Whether the extension is present for this message. - - Raises: - KeyError: if the extension is repeated. Similar to repeated fields, - there is no separate notion of presence: a "not present" repeated - extension is an empty list. - """ - raise NotImplementedError - - def ClearExtension(self, extension_handle): - """Clears the contents of a given extension. - - Args: - extension_handle: The handle for the extension to clear. - """ - raise NotImplementedError - - def UnknownFields(self): - """Returns the UnknownFieldSet. - - Returns: - UnknownFieldSet: The unknown fields stored in this message. - """ - raise NotImplementedError - - def DiscardUnknownFields(self): - """Clears all fields in the :class:`UnknownFieldSet`. - - This operation is recursive for nested message. - """ - raise NotImplementedError - - def ByteSize(self): - """Returns the serialized size of this message. - - Recursively calls ByteSize() on all contained messages. - - Returns: - int: The number of bytes required to serialize this message. - """ - raise NotImplementedError - - @classmethod - def FromString(cls, s): - raise NotImplementedError - - @staticmethod - def RegisterExtension(extension_handle): - raise NotImplementedError - - def _SetListener(self, message_listener): - """Internal method used by the protocol message implementation. - Clients should not call this directly. - - Sets a listener that this message will call on certain state transitions. - - The purpose of this method is to register back-edges from children to - parents at runtime, for the purpose of setting "has" bits and - byte-size-dirty bits in the parent and ancestor objects whenever a child or - descendant object is modified. - - If the client wants to disconnect this Message from the object tree, she - explicitly sets callback to None. - - If message_listener is None, unregisters any existing listener. Otherwise, - message_listener must implement the MessageListener interface in - internal/message_listener.py, and we discard any listener registered - via a previous _SetListener() call. - """ - raise NotImplementedError - - def __getstate__(self): - """Support the pickle protocol.""" - return dict(serialized=self.SerializePartialToString()) - - def __setstate__(self, state): - """Support the pickle protocol.""" - self.__init__() - serialized = state['serialized'] - # On Python 3, using encoding='latin1' is required for unpickling - # protos pickled by Python 2. - if not isinstance(serialized, bytes): - serialized = serialized.encode('latin1') - self.ParseFromString(serialized) - - def __reduce__(self): - message_descriptor = self.DESCRIPTOR - if message_descriptor.containing_type is None: - return type(self), (), self.__getstate__() - # the message type must be nested. - # Python does not pickle nested classes; use the symbol_database on the - # receiving end. - container = message_descriptor - return (_InternalConstructMessage, (container.full_name,), - self.__getstate__()) - - -def _InternalConstructMessage(full_name): - """Constructs a nested message.""" - from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top - - return symbol_database.Default().GetSymbol(full_name)() diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message_factory.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message_factory.py deleted file mode 100644 index 3656fa6874..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message_factory.py +++ /dev/null @@ -1,185 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides a factory class for generating dynamic messages. - -The easiest way to use this class is if you have access to the FileDescriptor -protos containing the messages you want to create you can just do the following: - -message_classes = message_factory.GetMessages(iterable_of_file_descriptors) -my_proto_instance = message_classes['some.proto.package.MessageName']() -""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -from google.protobuf.internal import api_implementation -from google.protobuf import descriptor_pool -from google.protobuf import message - -if api_implementation.Type() == 'cpp': - from google.protobuf.pyext import cpp_message as message_impl -else: - from google.protobuf.internal import python_message as message_impl - - -# The type of all Message classes. -_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType - - -class MessageFactory(object): - """Factory for creating Proto2 messages from descriptors in a pool.""" - - def __init__(self, pool=None): - """Initializes a new factory.""" - self.pool = pool or descriptor_pool.DescriptorPool() - - # local cache of all classes built from protobuf descriptors - self._classes = {} - - def GetPrototype(self, descriptor): - """Obtains a proto2 message class based on the passed in descriptor. - - Passing a descriptor with a fully qualified name matching a previous - invocation will cause the same class to be returned. - - Args: - descriptor: The descriptor to build from. - - Returns: - A class describing the passed in descriptor. - """ - if descriptor not in self._classes: - result_class = self.CreatePrototype(descriptor) - # The assignment to _classes is redundant for the base implementation, but - # might avoid confusion in cases where CreatePrototype gets overridden and - # does not call the base implementation. - self._classes[descriptor] = result_class - return result_class - return self._classes[descriptor] - - def CreatePrototype(self, descriptor): - """Builds a proto2 message class based on the passed in descriptor. - - Don't call this function directly, it always creates a new class. Call - GetPrototype() instead. This method is meant to be overridden in subblasses - to perform additional operations on the newly constructed class. - - Args: - descriptor: The descriptor to build from. - - Returns: - A class describing the passed in descriptor. - """ - descriptor_name = descriptor.name - result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( - descriptor_name, - (message.Message,), - { - 'DESCRIPTOR': descriptor, - # If module not set, it wrongly points to message_factory module. - '__module__': None, - }) - result_class._FACTORY = self # pylint: disable=protected-access - # Assign in _classes before doing recursive calls to avoid infinite - # recursion. - self._classes[descriptor] = result_class - for field in descriptor.fields: - if field.message_type: - self.GetPrototype(field.message_type) - for extension in result_class.DESCRIPTOR.extensions: - if extension.containing_type not in self._classes: - self.GetPrototype(extension.containing_type) - extended_class = self._classes[extension.containing_type] - extended_class.RegisterExtension(extension) - return result_class - - def GetMessages(self, files): - """Gets all the messages from a specified file. - - This will find and resolve dependencies, failing if the descriptor - pool cannot satisfy them. - - Args: - files: The file names to extract messages from. - - Returns: - A dictionary mapping proto names to the message classes. This will include - any dependent messages as well as any messages defined in the same file as - a specified message. - """ - result = {} - for file_name in files: - file_desc = self.pool.FindFileByName(file_name) - for desc in file_desc.message_types_by_name.values(): - result[desc.full_name] = self.GetPrototype(desc) - - # While the extension FieldDescriptors are created by the descriptor pool, - # the python classes created in the factory need them to be registered - # explicitly, which is done below. - # - # The call to RegisterExtension will specifically check if the - # extension was already registered on the object and either - # ignore the registration if the original was the same, or raise - # an error if they were different. - - for extension in file_desc.extensions_by_name.values(): - if extension.containing_type not in self._classes: - self.GetPrototype(extension.containing_type) - extended_class = self._classes[extension.containing_type] - extended_class.RegisterExtension(extension) - return result - - -_FACTORY = MessageFactory() - - -def GetMessages(file_protos): - """Builds a dictionary of all the messages available in a set of files. - - Args: - file_protos: Iterable of FileDescriptorProto to build messages out of. - - Returns: - A dictionary mapping proto names to the message classes. This will include - any dependent messages as well as any messages defined in the same file as - a specified message. - """ - # The cpp implementation of the protocol buffer library requires to add the - # message in topological order of the dependency graph. - file_by_name = {file_proto.name: file_proto for file_proto in file_protos} - def _AddFile(file_proto): - for dependency in file_proto.dependency: - if dependency in file_by_name: - # Remove from elements to be visited, in order to cut cycles. - _AddFile(file_by_name.pop(dependency)) - _FACTORY.pool.Add(file_proto) - while file_by_name: - _AddFile(file_by_name.popitem()[1]) - return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/proto_builder.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/proto_builder.py deleted file mode 100644 index a4667ce63e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/proto_builder.py +++ /dev/null @@ -1,134 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Dynamic Protobuf class creator.""" - -from collections import OrderedDict -import hashlib -import os - -from google.protobuf import descriptor_pb2 -from google.protobuf import descriptor -from google.protobuf import message_factory - - -def _GetMessageFromFactory(factory, full_name): - """Get a proto class from the MessageFactory by name. - - Args: - factory: a MessageFactory instance. - full_name: str, the fully qualified name of the proto type. - Returns: - A class, for the type identified by full_name. - Raises: - KeyError, if the proto is not found in the factory's descriptor pool. - """ - proto_descriptor = factory.pool.FindMessageTypeByName(full_name) - proto_cls = factory.GetPrototype(proto_descriptor) - return proto_cls - - -def MakeSimpleProtoClass(fields, full_name=None, pool=None): - """Create a Protobuf class whose fields are basic types. - - Note: this doesn't validate field names! - - Args: - fields: dict of {name: field_type} mappings for each field in the proto. If - this is an OrderedDict the order will be maintained, otherwise the - fields will be sorted by name. - full_name: optional str, the fully-qualified name of the proto type. - pool: optional DescriptorPool instance. - Returns: - a class, the new protobuf class with a FileDescriptor. - """ - factory = message_factory.MessageFactory(pool=pool) - - if full_name is not None: - try: - proto_cls = _GetMessageFromFactory(factory, full_name) - return proto_cls - except KeyError: - # The factory's DescriptorPool doesn't know about this class yet. - pass - - # Get a list of (name, field_type) tuples from the fields dict. If fields was - # an OrderedDict we keep the order, but otherwise we sort the field to ensure - # consistent ordering. - field_items = fields.items() - if not isinstance(fields, OrderedDict): - field_items = sorted(field_items) - - # Use a consistent file name that is unlikely to conflict with any imported - # proto files. - fields_hash = hashlib.sha1() - for f_name, f_type in field_items: - fields_hash.update(f_name.encode('utf-8')) - fields_hash.update(str(f_type).encode('utf-8')) - proto_file_name = fields_hash.hexdigest() + '.proto' - - # If the proto is anonymous, use the same hash to name it. - if full_name is None: - full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + - fields_hash.hexdigest()) - try: - proto_cls = _GetMessageFromFactory(factory, full_name) - return proto_cls - except KeyError: - # The factory's DescriptorPool doesn't know about this class yet. - pass - - # This is the first time we see this proto: add a new descriptor to the pool. - factory.pool.Add( - _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) - return _GetMessageFromFactory(factory, full_name) - - -def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): - """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" - package, name = full_name.rsplit('.', 1) - file_proto = descriptor_pb2.FileDescriptorProto() - file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) - file_proto.package = package - desc_proto = file_proto.message_type.add() - desc_proto.name = name - for f_number, (f_name, f_type) in enumerate(field_items, 1): - field_proto = desc_proto.field.add() - field_proto.name = f_name - # # If the number falls in the reserved range, reassign it to the correct - # # number after the range. - if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: - f_number += ( - descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - - descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) - field_proto.number = f_number - field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL - field_proto.type = f_type - return file_proto diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/cpp_message.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/cpp_message.py deleted file mode 100644 index fc8eb32d79..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/cpp_message.py +++ /dev/null @@ -1,65 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Protocol message implementation hooks for C++ implementation. - -Contains helper functions used to create protocol message classes from -Descriptor objects at runtime backed by the protocol buffer C++ API. -""" - -__author__ = 'tibell@google.com (Johan Tibell)' - -from google.protobuf.pyext import _message - - -class GeneratedProtocolMessageType(_message.MessageMeta): - - """Metaclass for protocol message classes created at runtime from Descriptors. - - The protocol compiler currently uses this metaclass to create protocol - message classes at runtime. Clients can also manually create their own - classes at runtime, as in this example: - - mydescriptor = Descriptor(.....) - factory = symbol_database.Default() - factory.pool.AddDescriptor(mydescriptor) - MyProtoClass = factory.GetPrototype(mydescriptor) - myproto_instance = MyProtoClass() - myproto.foo_field = 23 - ... - - The above example will not work for nested types. If you wish to include them, - use reflection.MakeClass() instead of manually instantiating the class in - order to create the appropriate class structure. - """ - - # Must be consistent with the protocol-compiler code in - # proto2/compiler/internal/generator.*. - _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/python_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/python_pb2.py deleted file mode 100644 index 2c6ecf4c98..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/python_pb2.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/pyext/python.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestAllExtensions.RegisterExtension(optional_nested_message_extension) - TestAllExtensions.RegisterExtension(repeated_nested_message_extension) - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'H\001' - _TESTALLTYPES._serialized_start=72 - _TESTALLTYPES._serialized_end=388 - _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 - _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 - _FOREIGNMESSAGE._serialized_start=390 - _FOREIGNMESSAGE._serialized_end=428 - _TESTALLEXTENSIONS._serialized_start=430 - _TESTALLEXTENSIONS._serialized_end=459 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/reflection.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/reflection.py deleted file mode 100644 index 81e18859a8..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/reflection.py +++ /dev/null @@ -1,95 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This code is meant to work on Python 2.4 and above only. - -"""Contains a metaclass and helper functions used to create -protocol message classes from Descriptor objects at runtime. - -Recall that a metaclass is the "type" of a class. -(A class is to a metaclass what an instance is to a class.) - -In this case, we use the GeneratedProtocolMessageType metaclass -to inject all the useful functionality into the classes -output by the protocol compiler at compile-time. - -The upshot of all this is that the real implementation -details for ALL pure-Python protocol buffers are *here in -this file*. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - - -from google.protobuf import message_factory -from google.protobuf import symbol_database - -# The type of all Message classes. -# Part of the public interface, but normally only used by message factories. -GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE - -MESSAGE_CLASS_CACHE = {} - - -# Deprecated. Please NEVER use reflection.ParseMessage(). -def ParseMessage(descriptor, byte_str): - """Generate a new Message instance from this Descriptor and a byte string. - - DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). - Please use MessageFactory.GetPrototype() instead. - - Args: - descriptor: Protobuf Descriptor object - byte_str: Serialized protocol buffer byte string - - Returns: - Newly created protobuf Message object. - """ - result_class = MakeClass(descriptor) - new_msg = result_class() - new_msg.ParseFromString(byte_str) - return new_msg - - -# Deprecated. Please NEVER use reflection.MakeClass(). -def MakeClass(descriptor): - """Construct a class object for a protobuf described by descriptor. - - DEPRECATED: use MessageFactory.GetPrototype() instead. - - Args: - descriptor: A descriptor.Descriptor object describing the protobuf. - Returns: - The Message class object described by the descriptor. - """ - # Original implementation leads to duplicate message classes, which won't play - # well with extensions. Message factory info is also missing. - # Redirect to message_factory. - return symbol_database.Default().GetPrototype(descriptor) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service.py deleted file mode 100644 index 5625246324..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service.py +++ /dev/null @@ -1,228 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""DEPRECATED: Declares the RPC service interfaces. - -This module declares the abstract interfaces underlying proto2 RPC -services. These are intended to be independent of any particular RPC -implementation, so that proto2 services can be used on top of a variety -of implementations. Starting with version 2.3.0, RPC implementations should -not try to build on these, but should instead provide code generator plugins -which generate code specific to the particular RPC implementation. This way -the generated code can be more appropriate for the implementation in use -and can avoid unnecessary layers of indirection. -""" - -__author__ = 'petar@google.com (Petar Petrov)' - - -class RpcException(Exception): - """Exception raised on failed blocking RPC method call.""" - pass - - -class Service(object): - - """Abstract base interface for protocol-buffer-based RPC services. - - Services themselves are abstract classes (implemented either by servers or as - stubs), but they subclass this base interface. The methods of this - interface can be used to call the methods of the service without knowing - its exact type at compile time (analogous to the Message interface). - """ - - def GetDescriptor(): - """Retrieves this service's descriptor.""" - raise NotImplementedError - - def CallMethod(self, method_descriptor, rpc_controller, - request, done): - """Calls a method of the service specified by method_descriptor. - - If "done" is None then the call is blocking and the response - message will be returned directly. Otherwise the call is asynchronous - and "done" will later be called with the response value. - - In the blocking case, RpcException will be raised on error. - - Preconditions: - - * method_descriptor.service == GetDescriptor - * request is of the exact same classes as returned by - GetRequestClass(method). - * After the call has started, the request must not be modified. - * "rpc_controller" is of the correct type for the RPC implementation being - used by this Service. For stubs, the "correct type" depends on the - RpcChannel which the stub is using. - - Postconditions: - - * "done" will be called when the method is complete. This may be - before CallMethod() returns or it may be at some point in the future. - * If the RPC failed, the response value passed to "done" will be None. - Further details about the failure can be found by querying the - RpcController. - """ - raise NotImplementedError - - def GetRequestClass(self, method_descriptor): - """Returns the class of the request message for the specified method. - - CallMethod() requires that the request is of a particular subclass of - Message. GetRequestClass() gets the default instance of this required - type. - - Example: - method = service.GetDescriptor().FindMethodByName("Foo") - request = stub.GetRequestClass(method)() - request.ParseFromString(input) - service.CallMethod(method, request, callback) - """ - raise NotImplementedError - - def GetResponseClass(self, method_descriptor): - """Returns the class of the response message for the specified method. - - This method isn't really needed, as the RpcChannel's CallMethod constructs - the response protocol message. It's provided anyway in case it is useful - for the caller to know the response type in advance. - """ - raise NotImplementedError - - -class RpcController(object): - - """An RpcController mediates a single method call. - - The primary purpose of the controller is to provide a way to manipulate - settings specific to the RPC implementation and to find out about RPC-level - errors. The methods provided by the RpcController interface are intended - to be a "least common denominator" set of features which we expect all - implementations to support. Specific implementations may provide more - advanced features (e.g. deadline propagation). - """ - - # Client-side methods below - - def Reset(self): - """Resets the RpcController to its initial state. - - After the RpcController has been reset, it may be reused in - a new call. Must not be called while an RPC is in progress. - """ - raise NotImplementedError - - def Failed(self): - """Returns true if the call failed. - - After a call has finished, returns true if the call failed. The possible - reasons for failure depend on the RPC implementation. Failed() must not - be called before a call has finished. If Failed() returns true, the - contents of the response message are undefined. - """ - raise NotImplementedError - - def ErrorText(self): - """If Failed is true, returns a human-readable description of the error.""" - raise NotImplementedError - - def StartCancel(self): - """Initiate cancellation. - - Advises the RPC system that the caller desires that the RPC call be - canceled. The RPC system may cancel it immediately, may wait awhile and - then cancel it, or may not even cancel the call at all. If the call is - canceled, the "done" callback will still be called and the RpcController - will indicate that the call failed at that time. - """ - raise NotImplementedError - - # Server-side methods below - - def SetFailed(self, reason): - """Sets a failure reason. - - Causes Failed() to return true on the client side. "reason" will be - incorporated into the message returned by ErrorText(). If you find - you need to return machine-readable information about failures, you - should incorporate it into your response protocol buffer and should - NOT call SetFailed(). - """ - raise NotImplementedError - - def IsCanceled(self): - """Checks if the client cancelled the RPC. - - If true, indicates that the client canceled the RPC, so the server may - as well give up on replying to it. The server should still call the - final "done" callback. - """ - raise NotImplementedError - - def NotifyOnCancel(self, callback): - """Sets a callback to invoke on cancel. - - Asks that the given callback be called when the RPC is canceled. The - callback will always be called exactly once. If the RPC completes without - being canceled, the callback will be called after completion. If the RPC - has already been canceled when NotifyOnCancel() is called, the callback - will be called immediately. - - NotifyOnCancel() must be called no more than once per request. - """ - raise NotImplementedError - - -class RpcChannel(object): - - """Abstract interface for an RPC channel. - - An RpcChannel represents a communication line to a service which can be used - to call that service's methods. The service may be running on another - machine. Normally, you should not use an RpcChannel directly, but instead - construct a stub {@link Service} wrapping it. Example: - - Example: - RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") - RpcController controller = rpcImpl.Controller() - MyService service = MyService_Stub(channel) - service.MyMethod(controller, request, callback) - """ - - def CallMethod(self, method_descriptor, rpc_controller, - request, response_class, done): - """Calls the method identified by the descriptor. - - Call the given method of the remote service. The signature of this - procedure looks the same as Service.CallMethod(), but the requirements - are less strict in one important way: the request object doesn't have to - be of any specific class as long as its descriptor is method.input_type. - """ - raise NotImplementedError diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service_reflection.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service_reflection.py deleted file mode 100644 index f82ab7145a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service_reflection.py +++ /dev/null @@ -1,295 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains metaclasses used to create protocol service and service stub -classes from ServiceDescriptor objects at runtime. - -The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to -inject all useful functionality into the classes output by the protocol -compiler at compile-time. -""" - -__author__ = 'petar@google.com (Petar Petrov)' - - -class GeneratedServiceType(type): - - """Metaclass for service classes created at runtime from ServiceDescriptors. - - Implementations for all methods described in the Service class are added here - by this class. We also create properties to allow getting/setting all fields - in the protocol message. - - The protocol compiler currently uses this metaclass to create protocol service - classes at runtime. Clients can also manually create their own classes at - runtime, as in this example:: - - mydescriptor = ServiceDescriptor(.....) - class MyProtoService(service.Service): - __metaclass__ = GeneratedServiceType - DESCRIPTOR = mydescriptor - myservice_instance = MyProtoService() - # ... - """ - - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __init__(cls, name, bases, dictionary): - """Creates a message service class. - - Args: - name: Name of the class (ignored, but required by the metaclass - protocol). - bases: Base classes of the class being constructed. - dictionary: The class dictionary of the class being constructed. - dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object - describing this protocol service type. - """ - # Don't do anything if this class doesn't have a descriptor. This happens - # when a service class is subclassed. - if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: - return - - descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] - service_builder = _ServiceBuilder(descriptor) - service_builder.BuildService(cls) - cls.DESCRIPTOR = descriptor - - -class GeneratedServiceStubType(GeneratedServiceType): - - """Metaclass for service stubs created at runtime from ServiceDescriptors. - - This class has similar responsibilities as GeneratedServiceType, except that - it creates the service stub classes. - """ - - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __init__(cls, name, bases, dictionary): - """Creates a message service stub class. - - Args: - name: Name of the class (ignored, here). - bases: Base classes of the class being constructed. - dictionary: The class dictionary of the class being constructed. - dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object - describing this protocol service type. - """ - super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) - # Don't do anything if this class doesn't have a descriptor. This happens - # when a service stub is subclassed. - if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: - return - - descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] - service_stub_builder = _ServiceStubBuilder(descriptor) - service_stub_builder.BuildServiceStub(cls) - - -class _ServiceBuilder(object): - - """This class constructs a protocol service class using a service descriptor. - - Given a service descriptor, this class constructs a class that represents - the specified service descriptor. One service builder instance constructs - exactly one service class. That means all instances of that class share the - same builder. - """ - - def __init__(self, service_descriptor): - """Initializes an instance of the service class builder. - - Args: - service_descriptor: ServiceDescriptor to use when constructing the - service class. - """ - self.descriptor = service_descriptor - - def BuildService(builder, cls): - """Constructs the service class. - - Args: - cls: The class that will be constructed. - """ - - # CallMethod needs to operate with an instance of the Service class. This - # internal wrapper function exists only to be able to pass the service - # instance to the method that does the real CallMethod work. - # Making sure to use exact argument names from the abstract interface in - # service.py to match the type signature - def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): - return builder._CallMethod(self, method_descriptor, rpc_controller, - request, done) - - def _WrapGetRequestClass(self, method_descriptor): - return builder._GetRequestClass(method_descriptor) - - def _WrapGetResponseClass(self, method_descriptor): - return builder._GetResponseClass(method_descriptor) - - builder.cls = cls - cls.CallMethod = _WrapCallMethod - cls.GetDescriptor = staticmethod(lambda: builder.descriptor) - cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' - cls.GetRequestClass = _WrapGetRequestClass - cls.GetResponseClass = _WrapGetResponseClass - for method in builder.descriptor.methods: - setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) - - def _CallMethod(self, srvc, method_descriptor, - rpc_controller, request, callback): - """Calls the method described by a given method descriptor. - - Args: - srvc: Instance of the service for which this method is called. - method_descriptor: Descriptor that represent the method to call. - rpc_controller: RPC controller to use for this method's execution. - request: Request protocol message. - callback: A callback to invoke after the method has completed. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'CallMethod() given method descriptor for wrong service type.') - method = getattr(srvc, method_descriptor.name) - return method(rpc_controller, request, callback) - - def _GetRequestClass(self, method_descriptor): - """Returns the class of the request protocol message. - - Args: - method_descriptor: Descriptor of the method for which to return the - request protocol message class. - - Returns: - A class that represents the input protocol message of the specified - method. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'GetRequestClass() given method descriptor for wrong service type.') - return method_descriptor.input_type._concrete_class - - def _GetResponseClass(self, method_descriptor): - """Returns the class of the response protocol message. - - Args: - method_descriptor: Descriptor of the method for which to return the - response protocol message class. - - Returns: - A class that represents the output protocol message of the specified - method. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'GetResponseClass() given method descriptor for wrong service type.') - return method_descriptor.output_type._concrete_class - - def _GenerateNonImplementedMethod(self, method): - """Generates and returns a method that can be set for a service methods. - - Args: - method: Descriptor of the service method for which a method is to be - generated. - - Returns: - A method that can be added to the service class. - """ - return lambda inst, rpc_controller, request, callback: ( - self._NonImplementedMethod(method.name, rpc_controller, callback)) - - def _NonImplementedMethod(self, method_name, rpc_controller, callback): - """The body of all methods in the generated service class. - - Args: - method_name: Name of the method being executed. - rpc_controller: RPC controller used to execute this method. - callback: A callback which will be invoked when the method finishes. - """ - rpc_controller.SetFailed('Method %s not implemented.' % method_name) - callback(None) - - -class _ServiceStubBuilder(object): - - """Constructs a protocol service stub class using a service descriptor. - - Given a service descriptor, this class constructs a suitable stub class. - A stub is just a type-safe wrapper around an RpcChannel which emulates a - local implementation of the service. - - One service stub builder instance constructs exactly one class. It means all - instances of that class share the same service stub builder. - """ - - def __init__(self, service_descriptor): - """Initializes an instance of the service stub class builder. - - Args: - service_descriptor: ServiceDescriptor to use when constructing the - stub class. - """ - self.descriptor = service_descriptor - - def BuildServiceStub(self, cls): - """Constructs the stub class. - - Args: - cls: The class that will be constructed. - """ - - def _ServiceStubInit(stub, rpc_channel): - stub.rpc_channel = rpc_channel - self.cls = cls - cls.__init__ = _ServiceStubInit - for method in self.descriptor.methods: - setattr(cls, method.name, self._GenerateStubMethod(method)) - - def _GenerateStubMethod(self, method): - return (lambda inst, rpc_controller, request, callback=None: - self._StubMethod(inst, method, rpc_controller, request, callback)) - - def _StubMethod(self, stub, method_descriptor, - rpc_controller, request, callback): - """The body of all service methods in the generated stub class. - - Args: - stub: Stub instance. - method_descriptor: Descriptor of the invoked method. - rpc_controller: Rpc controller to execute the method. - request: Request protocol message. - callback: A callback to execute when the method finishes. - Returns: - Response message (in case of blocking call). - """ - return stub.rpc_channel.CallMethod( - method_descriptor, rpc_controller, request, - method_descriptor.output_type._concrete_class, callback) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/source_context_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/source_context_pb2.py deleted file mode 100644 index 30cca2e06e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/source_context_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/source_context.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _SOURCECONTEXT._serialized_start=57 - _SOURCECONTEXT._serialized_end=91 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/struct_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/struct_pb2.py deleted file mode 100644 index 149728ca08..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/struct_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/struct.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _STRUCT_FIELDSENTRY._options = None - _STRUCT_FIELDSENTRY._serialized_options = b'8\001' - _NULLVALUE._serialized_start=474 - _NULLVALUE._serialized_end=501 - _STRUCT._serialized_start=50 - _STRUCT._serialized_end=182 - _STRUCT_FIELDSENTRY._serialized_start=113 - _STRUCT_FIELDSENTRY._serialized_end=182 - _VALUE._serialized_start=185 - _VALUE._serialized_end=419 - _LISTVALUE._serialized_start=421 - _LISTVALUE._serialized_end=472 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/symbol_database.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/symbol_database.py deleted file mode 100644 index fdcf8cf06c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/symbol_database.py +++ /dev/null @@ -1,194 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""A database of Python protocol buffer generated symbols. - -SymbolDatabase is the MessageFactory for messages generated at compile time, -and makes it easy to create new instances of a registered type, given only the -type's protocol buffer symbol name. - -Example usage:: - - db = symbol_database.SymbolDatabase() - - # Register symbols of interest, from one or multiple files. - db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) - db.RegisterMessage(my_proto_pb2.MyMessage) - db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) - - # The database can be used as a MessageFactory, to generate types based on - # their name: - types = db.GetMessages(['my_proto.proto']) - my_message_instance = types['MyMessage']() - - # The database's underlying descriptor pool can be queried, so it's not - # necessary to know a type's filename to be able to generate it: - filename = db.pool.FindFileContainingSymbol('MyMessage') - my_message_instance = db.GetMessages([filename])['MyMessage']() - - # This functionality is also provided directly via a convenience method: - my_message_instance = db.GetSymbol('MyMessage')() -""" - - -from google.protobuf.internal import api_implementation -from google.protobuf import descriptor_pool -from google.protobuf import message_factory - - -class SymbolDatabase(message_factory.MessageFactory): - """A database of Python generated symbols.""" - - def RegisterMessage(self, message): - """Registers the given message type in the local database. - - Calls to GetSymbol() and GetMessages() will return messages registered here. - - Args: - message: A :class:`google.protobuf.message.Message` subclass (or - instance); its descriptor will be registered. - - Returns: - The provided message. - """ - - desc = message.DESCRIPTOR - self._classes[desc] = message - self.RegisterMessageDescriptor(desc) - return message - - def RegisterMessageDescriptor(self, message_descriptor): - """Registers the given message descriptor in the local database. - - Args: - message_descriptor (Descriptor): the message descriptor to add. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddDescriptor(message_descriptor) - - def RegisterEnumDescriptor(self, enum_descriptor): - """Registers the given enum descriptor in the local database. - - Args: - enum_descriptor (EnumDescriptor): The enum descriptor to register. - - Returns: - EnumDescriptor: The provided descriptor. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddEnumDescriptor(enum_descriptor) - return enum_descriptor - - def RegisterServiceDescriptor(self, service_descriptor): - """Registers the given service descriptor in the local database. - - Args: - service_descriptor (ServiceDescriptor): the service descriptor to - register. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddServiceDescriptor(service_descriptor) - - def RegisterFileDescriptor(self, file_descriptor): - """Registers the given file descriptor in the local database. - - Args: - file_descriptor (FileDescriptor): The file descriptor to register. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._InternalAddFileDescriptor(file_descriptor) - - def GetSymbol(self, symbol): - """Tries to find a symbol in the local database. - - Currently, this method only returns message.Message instances, however, if - may be extended in future to support other symbol types. - - Args: - symbol (str): a protocol buffer symbol. - - Returns: - A Python class corresponding to the symbol. - - Raises: - KeyError: if the symbol could not be found. - """ - - return self._classes[self.pool.FindMessageTypeByName(symbol)] - - def GetMessages(self, files): - # TODO(amauryfa): Fix the differences with MessageFactory. - """Gets all registered messages from a specified file. - - Only messages already created and registered will be returned; (this is the - case for imported _pb2 modules) - But unlike MessageFactory, this version also returns already defined nested - messages, but does not register any message extensions. - - Args: - files (list[str]): The file names to extract messages from. - - Returns: - A dictionary mapping proto names to the message classes. - - Raises: - KeyError: if a file could not be found. - """ - - def _GetAllMessages(desc): - """Walk a message Descriptor and recursively yields all message names.""" - yield desc - for msg_desc in desc.nested_types: - for nested_desc in _GetAllMessages(msg_desc): - yield nested_desc - - result = {} - for file_name in files: - file_desc = self.pool.FindFileByName(file_name) - for msg_desc in file_desc.message_types_by_name.values(): - for desc in _GetAllMessages(msg_desc): - try: - result[desc.full_name] = self._classes[desc] - except KeyError: - # This descriptor has no registered class, skip it. - pass - return result - - -_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) - - -def Default(): - """Returns the default SymbolDatabase.""" - return _DEFAULT diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_encoding.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_encoding.py deleted file mode 100644 index 759cf11f62..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_encoding.py +++ /dev/null @@ -1,110 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Encoding related utilities.""" -import re - -_cescape_chr_to_symbol_map = {} -_cescape_chr_to_symbol_map[9] = r'\t' # optional escape -_cescape_chr_to_symbol_map[10] = r'\n' # optional escape -_cescape_chr_to_symbol_map[13] = r'\r' # optional escape -_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape -_cescape_chr_to_symbol_map[39] = r"\'" # optional escape -_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape - -# Lookup table for unicode -_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] -for byte, string in _cescape_chr_to_symbol_map.items(): - _cescape_unicode_to_str[byte] = string - -# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) -_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + - [chr(i) for i in range(32, 127)] + - [r'\%03o' % i for i in range(127, 256)]) -for byte, string in _cescape_chr_to_symbol_map.items(): - _cescape_byte_to_str[byte] = string -del byte, string - - -def CEscape(text, as_utf8): - # type: (...) -> str - """Escape a bytes string for use in an text protocol buffer. - - Args: - text: A byte string to be escaped. - as_utf8: Specifies if result may contain non-ASCII characters. - In Python 3 this allows unescaped non-ASCII Unicode characters. - In Python 2 the return value will be valid UTF-8 rather than only ASCII. - Returns: - Escaped string (str). - """ - # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not - # satisfy our needs; they encodes unprintable characters using two-digit hex - # escapes whereas our C++ unescaping function allows hex escapes to be any - # length. So, "\0011".encode('string_escape') ends up being "\\x011", which - # will be decoded in C++ as a single-character string with char code 0x11. - text_is_unicode = isinstance(text, str) - if as_utf8 and text_is_unicode: - # We're already unicode, no processing beyond control char escapes. - return text.translate(_cescape_chr_to_symbol_map) - ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. - if as_utf8: - return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) - return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) - - -_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') - - -def CUnescape(text): - # type: (str) -> bytes - """Unescape a text string with C-style escape sequences to UTF-8 bytes. - - Args: - text: The data to parse in a str. - Returns: - A byte string. - """ - - def ReplaceHex(m): - # Only replace the match if the number of leading back slashes is odd. i.e. - # the slash itself is not escaped. - if len(m.group(1)) & 1: - return m.group(1) + 'x0' + m.group(2) - return m.group(0) - - # This is required because the 'string_escape' encoding doesn't - # allow single-digit hex escapes (like '\xf'). - result = _CUNESCAPE_HEX.sub(ReplaceHex, text) - - return (result.encode('utf-8') # Make it bytes to allow decode. - .decode('unicode_escape') - # Make it bytes again to return the proper type. - .encode('raw_unicode_escape')) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_format.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_format.py deleted file mode 100644 index 412385c26f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_format.py +++ /dev/null @@ -1,1795 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains routines for printing protocol messages in text format. - -Simple usage example:: - - # Create a proto object and serialize it to a text proto string. - message = my_proto_pb2.MyMessage(foo='bar') - text_proto = text_format.MessageToString(message) - - # Parse a text proto string. - message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) -""" - -__author__ = 'kenton@google.com (Kenton Varda)' - -# TODO(b/129989314) Import thread contention leads to test failures. -import encodings.raw_unicode_escape # pylint: disable=unused-import -import encodings.unicode_escape # pylint: disable=unused-import -import io -import math -import re - -from google.protobuf.internal import decoder -from google.protobuf.internal import type_checkers -from google.protobuf import descriptor -from google.protobuf import text_encoding - -# pylint: disable=g-import-not-at-top -__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', - 'PrintFieldValue', 'Merge', 'MessageToBytes'] - -_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), - type_checkers.Int32ValueChecker(), - type_checkers.Uint64ValueChecker(), - type_checkers.Int64ValueChecker()) -_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) -_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) -_QUOTES = frozenset(("'", '"')) -_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' - - -class Error(Exception): - """Top-level module error for text_format.""" - - -class ParseError(Error): - """Thrown in case of text parsing or tokenizing error.""" - - def __init__(self, message=None, line=None, column=None): - if message is not None and line is not None: - loc = str(line) - if column is not None: - loc += ':{0}'.format(column) - message = '{0} : {1}'.format(loc, message) - if message is not None: - super(ParseError, self).__init__(message) - else: - super(ParseError, self).__init__() - self._line = line - self._column = column - - def GetLine(self): - return self._line - - def GetColumn(self): - return self._column - - -class TextWriter(object): - - def __init__(self, as_utf8): - self._writer = io.StringIO() - - def write(self, val): - return self._writer.write(val) - - def close(self): - return self._writer.close() - - def getvalue(self): - return self._writer.getvalue() - - -def MessageToString( - message, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - indent=0, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - # type: (...) -> str - """Convert protobuf message to text format. - - Double values can be formatted compactly with 15 digits of - precision (which is the most that IEEE 754 "double" can guarantee) - using double_format='.15g'. To ensure that converting to text and back to a - proto will result in an identical value, double_format='.17g' should be used. - - Args: - message: The protocol buffers message. - as_utf8: Return unescaped Unicode for non-ASCII characters. - In Python 3 actual Unicode characters may appear as is in strings. - In Python 2 the return value will be valid UTF-8 rather than only ASCII. - as_one_line: Don't introduce newlines between fields. - use_short_repeated_primitives: Use short repeated format for primitives. - pointy_brackets: If True, use angle brackets instead of curly braces for - nesting. - use_index_order: If True, fields of a proto message will be printed using - the order defined in source code instead of the field number, extensions - will be printed at the end of the message and their relative order is - determined by the extension number. By default, use the field number - order. - float_format (str): If set, use this to specify float field formatting - (per the "Format Specification Mini-Language"); otherwise, shortest float - that has same value in wire will be printed. Also affect double field - if double_format is not set but float_format is set. - double_format (str): If set, use this to specify double field formatting - (per the "Format Specification Mini-Language"); if it is not set but - float_format is set, use float_format. Otherwise, use ``str()`` - use_field_number: If True, print field numbers instead of names. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - indent (int): The initial indent level, in terms of spaces, for pretty - print. - message_formatter (function(message, indent, as_one_line) -> unicode|None): - Custom formatter for selected sub-messages (usually based on message - type). Use to pretty print parts of the protobuf for easier diffing. - print_unknown_fields: If True, unknown fields will be printed. - force_colon: If set, a colon will be added after the field name even if the - field is a proto message. - - Returns: - str: A string of the text formatted protocol buffer message. - """ - out = TextWriter(as_utf8) - printer = _Printer( - out, - indent, - as_utf8, - as_one_line, - use_short_repeated_primitives, - pointy_brackets, - use_index_order, - float_format, - double_format, - use_field_number, - descriptor_pool, - message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintMessage(message) - result = out.getvalue() - out.close() - if as_one_line: - return result.rstrip() - return result - - -def MessageToBytes(message, **kwargs): - # type: (...) -> bytes - """Convert protobuf message to encoded text format. See MessageToString.""" - text = MessageToString(message, **kwargs) - if isinstance(text, bytes): - return text - codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' - return text.encode(codec) - - -def _IsMapEntry(field): - return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.message_type.has_options and - field.message_type.GetOptions().map_entry) - - -def PrintMessage(message, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - printer = _Printer( - out=out, indent=indent, as_utf8=as_utf8, - as_one_line=as_one_line, - use_short_repeated_primitives=use_short_repeated_primitives, - pointy_brackets=pointy_brackets, - use_index_order=use_index_order, - float_format=float_format, - double_format=double_format, - use_field_number=use_field_number, - descriptor_pool=descriptor_pool, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintMessage(message) - - -def PrintField(field, - value, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Print a single field name/value pair.""" - printer = _Printer(out, indent, as_utf8, as_one_line, - use_short_repeated_primitives, pointy_brackets, - use_index_order, float_format, double_format, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintField(field, value) - - -def PrintFieldValue(field, - value, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Print a single field value (not including name).""" - printer = _Printer(out, indent, as_utf8, as_one_line, - use_short_repeated_primitives, pointy_brackets, - use_index_order, float_format, double_format, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintFieldValue(field, value) - - -def _BuildMessageFromTypeName(type_name, descriptor_pool): - """Returns a protobuf message instance. - - Args: - type_name: Fully-qualified protobuf message type name string. - descriptor_pool: DescriptorPool instance. - - Returns: - A Message instance of type matching type_name, or None if the a Descriptor - wasn't found matching type_name. - """ - # pylint: disable=g-import-not-at-top - if descriptor_pool is None: - from google.protobuf import descriptor_pool as pool_mod - descriptor_pool = pool_mod.Default() - from google.protobuf import symbol_database - database = symbol_database.Default() - try: - message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) - except KeyError: - return None - message_type = database.GetPrototype(message_descriptor) - return message_type() - - -# These values must match WireType enum in google/protobuf/wire_format.h. -WIRETYPE_LENGTH_DELIMITED = 2 -WIRETYPE_START_GROUP = 3 - - -class _Printer(object): - """Text format printer for protocol message.""" - - def __init__( - self, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Initialize the Printer. - - Double values can be formatted compactly with 15 digits of precision - (which is the most that IEEE 754 "double" can guarantee) using - double_format='.15g'. To ensure that converting to text and back to a proto - will result in an identical value, double_format='.17g' should be used. - - Args: - out: To record the text format result. - indent: The initial indent level for pretty print. - as_utf8: Return unescaped Unicode for non-ASCII characters. - In Python 3 actual Unicode characters may appear as is in strings. - In Python 2 the return value will be valid UTF-8 rather than ASCII. - as_one_line: Don't introduce newlines between fields. - use_short_repeated_primitives: Use short repeated format for primitives. - pointy_brackets: If True, use angle brackets instead of curly braces for - nesting. - use_index_order: If True, print fields of a proto message using the order - defined in source code instead of the field number. By default, use the - field number order. - float_format: If set, use this to specify float field formatting - (per the "Format Specification Mini-Language"); otherwise, shortest - float that has same value in wire will be printed. Also affect double - field if double_format is not set but float_format is set. - double_format: If set, use this to specify double field formatting - (per the "Format Specification Mini-Language"); if it is not set but - float_format is set, use float_format. Otherwise, str() is used. - use_field_number: If True, print field numbers instead of names. - descriptor_pool: A DescriptorPool used to resolve Any types. - message_formatter: A function(message, indent, as_one_line): unicode|None - to custom format selected sub-messages (usually based on message type). - Use to pretty print parts of the protobuf for easier diffing. - print_unknown_fields: If True, unknown fields will be printed. - force_colon: If set, a colon will be added after the field name even if - the field is a proto message. - """ - self.out = out - self.indent = indent - self.as_utf8 = as_utf8 - self.as_one_line = as_one_line - self.use_short_repeated_primitives = use_short_repeated_primitives - self.pointy_brackets = pointy_brackets - self.use_index_order = use_index_order - self.float_format = float_format - if double_format is not None: - self.double_format = double_format - else: - self.double_format = float_format - self.use_field_number = use_field_number - self.descriptor_pool = descriptor_pool - self.message_formatter = message_formatter - self.print_unknown_fields = print_unknown_fields - self.force_colon = force_colon - - def _TryPrintAsAnyMessage(self, message): - """Serializes if message is a google.protobuf.Any field.""" - if '/' not in message.type_url: - return False - packed_message = _BuildMessageFromTypeName(message.TypeName(), - self.descriptor_pool) - if packed_message: - packed_message.MergeFromString(message.value) - colon = ':' if self.force_colon else '' - self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) - self._PrintMessageFieldValue(packed_message) - self.out.write(' ' if self.as_one_line else '\n') - return True - else: - return False - - def _TryCustomFormatMessage(self, message): - formatted = self.message_formatter(message, self.indent, self.as_one_line) - if formatted is None: - return False - - out = self.out - out.write(' ' * self.indent) - out.write(formatted) - out.write(' ' if self.as_one_line else '\n') - return True - - def PrintMessage(self, message): - """Convert protobuf message to text format. - - Args: - message: The protocol buffers message. - """ - if self.message_formatter and self._TryCustomFormatMessage(message): - return - if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and - self._TryPrintAsAnyMessage(message)): - return - fields = message.ListFields() - if self.use_index_order: - fields.sort( - key=lambda x: x[0].number if x[0].is_extension else x[0].index) - for field, value in fields: - if _IsMapEntry(field): - for key in sorted(value): - # This is slow for maps with submessage entries because it copies the - # entire tree. Unfortunately this would take significant refactoring - # of this file to work around. - # - # TODO(haberman): refactor and optimize if this becomes an issue. - entry_submsg = value.GetEntryClass()(key=key, value=value[key]) - self.PrintField(field, entry_submsg) - elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if (self.use_short_repeated_primitives - and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE - and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): - self._PrintShortRepeatedPrimitivesValue(field, value) - else: - for element in value: - self.PrintField(field, element) - else: - self.PrintField(field, value) - - if self.print_unknown_fields: - self._PrintUnknownFields(message.UnknownFields()) - - def _PrintUnknownFields(self, unknown_fields): - """Print unknown fields.""" - out = self.out - for field in unknown_fields: - out.write(' ' * self.indent) - out.write(str(field.field_number)) - if field.wire_type == WIRETYPE_START_GROUP: - if self.as_one_line: - out.write(' { ') - else: - out.write(' {\n') - self.indent += 2 - - self._PrintUnknownFields(field.data) - - if self.as_one_line: - out.write('} ') - else: - self.indent -= 2 - out.write(' ' * self.indent + '}\n') - elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: - try: - # If this field is parseable as a Message, it is probably - # an embedded message. - # pylint: disable=protected-access - (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( - memoryview(field.data), 0, len(field.data)) - except Exception: # pylint: disable=broad-except - pos = 0 - - if pos == len(field.data): - if self.as_one_line: - out.write(' { ') - else: - out.write(' {\n') - self.indent += 2 - - self._PrintUnknownFields(embedded_unknown_message) - - if self.as_one_line: - out.write('} ') - else: - self.indent -= 2 - out.write(' ' * self.indent + '}\n') - else: - # A string or bytes field. self.as_utf8 may not work. - out.write(': \"') - out.write(text_encoding.CEscape(field.data, False)) - out.write('\" ' if self.as_one_line else '\"\n') - else: - # varint, fixed32, fixed64 - out.write(': ') - out.write(str(field.data)) - out.write(' ' if self.as_one_line else '\n') - - def _PrintFieldName(self, field): - """Print field name.""" - out = self.out - out.write(' ' * self.indent) - if self.use_field_number: - out.write(str(field.number)) - else: - if field.is_extension: - out.write('[') - if (field.containing_type.GetOptions().message_set_wire_format and - field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): - out.write(field.message_type.full_name) - else: - out.write(field.full_name) - out.write(']') - elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: - # For groups, use the capitalized name. - out.write(field.message_type.name) - else: - out.write(field.name) - - if (self.force_colon or - field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): - # The colon is optional in this case, but our cross-language golden files - # don't include it. Here, the colon is only included if force_colon is - # set to True - out.write(':') - - def PrintField(self, field, value): - """Print a single field name/value pair.""" - self._PrintFieldName(field) - self.out.write(' ') - self.PrintFieldValue(field, value) - self.out.write(' ' if self.as_one_line else '\n') - - def _PrintShortRepeatedPrimitivesValue(self, field, value): - """"Prints short repeated primitives value.""" - # Note: this is called only when value has at least one element. - self._PrintFieldName(field) - self.out.write(' [') - for i in range(len(value) - 1): - self.PrintFieldValue(field, value[i]) - self.out.write(', ') - self.PrintFieldValue(field, value[-1]) - self.out.write(']') - self.out.write(' ' if self.as_one_line else '\n') - - def _PrintMessageFieldValue(self, value): - if self.pointy_brackets: - openb = '<' - closeb = '>' - else: - openb = '{' - closeb = '}' - - if self.as_one_line: - self.out.write('%s ' % openb) - self.PrintMessage(value) - self.out.write(closeb) - else: - self.out.write('%s\n' % openb) - self.indent += 2 - self.PrintMessage(value) - self.indent -= 2 - self.out.write(' ' * self.indent + closeb) - - def PrintFieldValue(self, field, value): - """Print a single field value (not including name). - - For repeated fields, the value should be a single element. - - Args: - field: The descriptor of the field to be printed. - value: The value of the field. - """ - out = self.out - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - self._PrintMessageFieldValue(value) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: - enum_value = field.enum_type.values_by_number.get(value, None) - if enum_value is not None: - out.write(enum_value.name) - else: - out.write(str(value)) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: - out.write('\"') - if isinstance(value, str) and not self.as_utf8: - out_value = value.encode('utf-8') - else: - out_value = value - if field.type == descriptor.FieldDescriptor.TYPE_BYTES: - # We always need to escape all binary data in TYPE_BYTES fields. - out_as_utf8 = False - else: - out_as_utf8 = self.as_utf8 - out.write(text_encoding.CEscape(out_value, out_as_utf8)) - out.write('\"') - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: - if value: - out.write('true') - else: - out.write('false') - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: - if self.float_format is not None: - out.write('{1:{0}}'.format(self.float_format, value)) - else: - if math.isnan(value): - out.write(str(value)) - else: - out.write(str(type_checkers.ToShortestFloat(value))) - elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and - self.double_format is not None): - out.write('{1:{0}}'.format(self.double_format, value)) - else: - out.write(str(value)) - - -def Parse(text, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - NOTE: for historical reasons this function does not clear the input - message. This is different from what the binary msg.ParseFrom(...) does. - If text contains a field already set in message, the value is appended if the - field is repeated. Otherwise, an error is raised. - - Example:: - - a = MyProto() - a.repeated_field.append('test') - b = MyProto() - - # Repeated fields are combined - text_format.Parse(repr(a), b) - text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] - - # Non-repeated fields cannot be overwritten - a.singular_field = 1 - b.singular_field = 2 - text_format.Parse(repr(a), b) # ParseError - - # Binary version: - b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" - - Caller is responsible for clearing the message as needed. - - Args: - text (str): Message text representation. - message (Message): A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - Message: The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), - message, - allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - - -def Merge(text, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - Like Parse(), but allows repeated values for a non-repeated field, and uses - the last one. This means any non-repeated, top-level fields specified in text - replace those in the message. - - Args: - text (str): Message text representation. - message (Message): A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - Message: The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - return MergeLines( - text.split(b'\n' if isinstance(text, bytes) else u'\n'), - message, - allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - - -def ParseLines(lines, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - See Parse() for caveats. - - Args: - lines: An iterable of lines of a message's text representation. - message: A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool: A DescriptorPool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - parser = _Parser(allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - return parser.ParseLines(lines, message) - - -def MergeLines(lines, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - See Merge() for more details. - - Args: - lines: An iterable of lines of a message's text representation. - message: A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool: A DescriptorPool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - parser = _Parser(allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - return parser.MergeLines(lines, message) - - -class _Parser(object): - """Text format parser for protocol message.""" - - def __init__(self, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - self.allow_unknown_extension = allow_unknown_extension - self.allow_field_number = allow_field_number - self.descriptor_pool = descriptor_pool - self.allow_unknown_field = allow_unknown_field - - def ParseLines(self, lines, message): - """Parses a text representation of a protocol message into a message.""" - self._allow_multiple_scalars = False - self._ParseOrMerge(lines, message) - return message - - def MergeLines(self, lines, message): - """Merges a text representation of a protocol message into a message.""" - self._allow_multiple_scalars = True - self._ParseOrMerge(lines, message) - return message - - def _ParseOrMerge(self, lines, message): - """Converts a text representation of a protocol message into a message. - - Args: - lines: Lines of a message's text representation. - message: A protocol buffer message to merge into. - - Raises: - ParseError: On text parsing problems. - """ - # Tokenize expects native str lines. - str_lines = ( - line if isinstance(line, str) else line.decode('utf-8') - for line in lines) - tokenizer = Tokenizer(str_lines) - while not tokenizer.AtEnd(): - self._MergeField(tokenizer, message) - - def _MergeField(self, tokenizer, message): - """Merges a single protocol message field into a message. - - Args: - tokenizer: A tokenizer to parse the field name and values. - message: A protocol message to record the data. - - Raises: - ParseError: In case of text parsing problems. - """ - message_descriptor = message.DESCRIPTOR - if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and - tokenizer.TryConsume('[')): - type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) - tokenizer.Consume(']') - tokenizer.TryConsume(':') - if tokenizer.TryConsume('<'): - expanded_any_end_token = '>' - else: - tokenizer.Consume('{') - expanded_any_end_token = '}' - expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, - self.descriptor_pool) - if not expanded_any_sub_message: - raise ParseError('Type %s not found in descriptor pool' % - packed_type_name) - while not tokenizer.TryConsume(expanded_any_end_token): - if tokenizer.AtEnd(): - raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % - (expanded_any_end_token,)) - self._MergeField(tokenizer, expanded_any_sub_message) - deterministic = False - - message.Pack(expanded_any_sub_message, - type_url_prefix=type_url_prefix, - deterministic=deterministic) - return - - if tokenizer.TryConsume('['): - name = [tokenizer.ConsumeIdentifier()] - while tokenizer.TryConsume('.'): - name.append(tokenizer.ConsumeIdentifier()) - name = '.'.join(name) - - if not message_descriptor.is_extendable: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" does not have extensions.' % - message_descriptor.full_name) - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(name) - # pylint: enable=protected-access - - - if not field: - if self.allow_unknown_extension: - field = None - else: - raise tokenizer.ParseErrorPreviousToken( - 'Extension "%s" not registered. ' - 'Did you import the _pb2 module which defines it? ' - 'If you are trying to place the extension in the MessageSet ' - 'field of another message that is in an Any or MessageSet field, ' - 'that message\'s _pb2 module must be imported as well' % name) - elif message_descriptor != field.containing_type: - raise tokenizer.ParseErrorPreviousToken( - 'Extension "%s" does not extend message type "%s".' % - (name, message_descriptor.full_name)) - - tokenizer.Consume(']') - - else: - name = tokenizer.ConsumeIdentifierOrNumber() - if self.allow_field_number and name.isdigit(): - number = ParseInteger(name, True, True) - field = message_descriptor.fields_by_number.get(number, None) - if not field and message_descriptor.is_extendable: - field = message.Extensions._FindExtensionByNumber(number) - else: - field = message_descriptor.fields_by_name.get(name, None) - - # Group names are expected to be capitalized as they appear in the - # .proto file, which actually matches their type names, not their field - # names. - if not field: - field = message_descriptor.fields_by_name.get(name.lower(), None) - if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: - field = None - - if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and - field.message_type.name != name): - field = None - - if not field and not self.allow_unknown_field: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" has no field named "%s".' % - (message_descriptor.full_name, name)) - - if field: - if not self._allow_multiple_scalars and field.containing_oneof: - # Check if there's a different field set in this oneof. - # Note that we ignore the case if the same field was set before, and we - # apply _allow_multiple_scalars to non-scalar fields as well. - which_oneof = message.WhichOneof(field.containing_oneof.name) - if which_oneof is not None and which_oneof != field.name: - raise tokenizer.ParseErrorPreviousToken( - 'Field "%s" is specified along with field "%s", another member ' - 'of oneof "%s" for message type "%s".' % - (field.name, which_oneof, field.containing_oneof.name, - message_descriptor.full_name)) - - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - tokenizer.TryConsume(':') - merger = self._MergeMessageField - else: - tokenizer.Consume(':') - merger = self._MergeScalarField - - if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and - tokenizer.TryConsume('[')): - # Short repeated format, e.g. "foo: [1, 2, 3]" - if not tokenizer.TryConsume(']'): - while True: - merger(tokenizer, message, field) - if tokenizer.TryConsume(']'): - break - tokenizer.Consume(',') - - else: - merger(tokenizer, message, field) - - else: # Proto field is unknown. - assert (self.allow_unknown_extension or self.allow_unknown_field) - _SkipFieldContents(tokenizer) - - # For historical reasons, fields may optionally be separated by commas or - # semicolons. - if not tokenizer.TryConsume(','): - tokenizer.TryConsume(';') - - - def _ConsumeAnyTypeUrl(self, tokenizer): - """Consumes a google.protobuf.Any type URL and returns the type name.""" - # Consume "type.googleapis.com/". - prefix = [tokenizer.ConsumeIdentifier()] - tokenizer.Consume('.') - prefix.append(tokenizer.ConsumeIdentifier()) - tokenizer.Consume('.') - prefix.append(tokenizer.ConsumeIdentifier()) - tokenizer.Consume('/') - # Consume the fully-qualified type name. - name = [tokenizer.ConsumeIdentifier()] - while tokenizer.TryConsume('.'): - name.append(tokenizer.ConsumeIdentifier()) - return '.'.join(prefix), '.'.join(name) - - def _MergeMessageField(self, tokenizer, message, field): - """Merges a single scalar field into a message. - - Args: - tokenizer: A tokenizer to parse the field value. - message: The message of which field is a member. - field: The descriptor of the field to be merged. - - Raises: - ParseError: In case of text parsing problems. - """ - is_map_entry = _IsMapEntry(field) - - if tokenizer.TryConsume('<'): - end_token = '>' - else: - tokenizer.Consume('{') - end_token = '}' - - if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if field.is_extension: - sub_message = message.Extensions[field].add() - elif is_map_entry: - sub_message = getattr(message, field.name).GetEntryClass()() - else: - sub_message = getattr(message, field.name).add() - else: - if field.is_extension: - if (not self._allow_multiple_scalars and - message.HasExtension(field)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" extensions.' % - (message.DESCRIPTOR.full_name, field.full_name)) - sub_message = message.Extensions[field] - else: - # Also apply _allow_multiple_scalars to message field. - # TODO(jieluo): Change to _allow_singular_overwrites. - if (not self._allow_multiple_scalars and - message.HasField(field.name)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" fields.' % - (message.DESCRIPTOR.full_name, field.name)) - sub_message = getattr(message, field.name) - sub_message.SetInParent() - - while not tokenizer.TryConsume(end_token): - if tokenizer.AtEnd(): - raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) - self._MergeField(tokenizer, sub_message) - - if is_map_entry: - value_cpptype = field.message_type.fields_by_name['value'].cpp_type - if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - value = getattr(message, field.name)[sub_message.key] - value.CopyFrom(sub_message.value) - else: - getattr(message, field.name)[sub_message.key] = sub_message.value - - @staticmethod - def _IsProto3Syntax(message): - message_descriptor = message.DESCRIPTOR - return (hasattr(message_descriptor, 'syntax') and - message_descriptor.syntax == 'proto3') - - def _MergeScalarField(self, tokenizer, message, field): - """Merges a single scalar field into a message. - - Args: - tokenizer: A tokenizer to parse the field value. - message: A protocol message to record the data. - field: The descriptor of the field to be merged. - - Raises: - ParseError: In case of text parsing problems. - RuntimeError: On runtime errors. - """ - _ = self.allow_unknown_extension - value = None - - if field.type in (descriptor.FieldDescriptor.TYPE_INT32, - descriptor.FieldDescriptor.TYPE_SINT32, - descriptor.FieldDescriptor.TYPE_SFIXED32): - value = _ConsumeInt32(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, - descriptor.FieldDescriptor.TYPE_SINT64, - descriptor.FieldDescriptor.TYPE_SFIXED64): - value = _ConsumeInt64(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, - descriptor.FieldDescriptor.TYPE_FIXED32): - value = _ConsumeUint32(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, - descriptor.FieldDescriptor.TYPE_FIXED64): - value = _ConsumeUint64(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, - descriptor.FieldDescriptor.TYPE_DOUBLE): - value = tokenizer.ConsumeFloat() - elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: - value = tokenizer.ConsumeBool() - elif field.type == descriptor.FieldDescriptor.TYPE_STRING: - value = tokenizer.ConsumeString() - elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: - value = tokenizer.ConsumeByteString() - elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: - value = tokenizer.ConsumeEnum(field) - else: - raise RuntimeError('Unknown field type %d' % field.type) - - if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if field.is_extension: - message.Extensions[field].append(value) - else: - getattr(message, field.name).append(value) - else: - if field.is_extension: - if (not self._allow_multiple_scalars and - not self._IsProto3Syntax(message) and - message.HasExtension(field)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" extensions.' % - (message.DESCRIPTOR.full_name, field.full_name)) - else: - message.Extensions[field] = value - else: - duplicate_error = False - if not self._allow_multiple_scalars: - if self._IsProto3Syntax(message): - # Proto3 doesn't represent presence so we try best effort to check - # multiple scalars by compare to default values. - duplicate_error = bool(getattr(message, field.name)) - else: - duplicate_error = message.HasField(field.name) - - if duplicate_error: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" fields.' % - (message.DESCRIPTOR.full_name, field.name)) - else: - setattr(message, field.name, value) - - -def _SkipFieldContents(tokenizer): - """Skips over contents (value or message) of a field. - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - # Try to guess the type of this field. - # If this field is not a message, there should be a ":" between the - # field name and the field value and also the field value should not - # start with "{" or "<" which indicates the beginning of a message body. - # If there is no ":" or there is a "{" or "<" after ":", this field has - # to be a message or the input is ill-formed. - if tokenizer.TryConsume(':') and not tokenizer.LookingAt( - '{') and not tokenizer.LookingAt('<'): - _SkipFieldValue(tokenizer) - else: - _SkipFieldMessage(tokenizer) - - -def _SkipField(tokenizer): - """Skips over a complete field (name and value/message). - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - if tokenizer.TryConsume('['): - # Consume extension name. - tokenizer.ConsumeIdentifier() - while tokenizer.TryConsume('.'): - tokenizer.ConsumeIdentifier() - tokenizer.Consume(']') - else: - tokenizer.ConsumeIdentifierOrNumber() - - _SkipFieldContents(tokenizer) - - # For historical reasons, fields may optionally be separated by commas or - # semicolons. - if not tokenizer.TryConsume(','): - tokenizer.TryConsume(';') - - -def _SkipFieldMessage(tokenizer): - """Skips over a field message. - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - - if tokenizer.TryConsume('<'): - delimiter = '>' - else: - tokenizer.Consume('{') - delimiter = '}' - - while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): - _SkipField(tokenizer) - - tokenizer.Consume(delimiter) - - -def _SkipFieldValue(tokenizer): - """Skips over a field value. - - Args: - tokenizer: A tokenizer to parse the field name and values. - - Raises: - ParseError: In case an invalid field value is found. - """ - # String/bytes tokens can come in multiple adjacent string literals. - # If we can consume one, consume as many as we can. - if tokenizer.TryConsumeByteString(): - while tokenizer.TryConsumeByteString(): - pass - return - - if (not tokenizer.TryConsumeIdentifier() and - not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and - not tokenizer.TryConsumeFloat()): - raise ParseError('Invalid field value: ' + tokenizer.token) - - -class Tokenizer(object): - """Protocol buffer text representation tokenizer. - - This class handles the lower level string parsing by splitting it into - meaningful tokens. - - It was directly ported from the Java protocol buffer API. - """ - - _WHITESPACE = re.compile(r'\s+') - _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) - _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) - _TOKEN = re.compile('|'.join([ - r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier - r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number - ] + [ # quoted str for each quote mark - # Avoid backtracking! https://stackoverflow.com/a/844267 - r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) - for mark in _QUOTES - ])) - - _IDENTIFIER = re.compile(r'[^\d\W]\w*') - _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') - - def __init__(self, lines, skip_comments=True): - self._position = 0 - self._line = -1 - self._column = 0 - self._token_start = None - self.token = '' - self._lines = iter(lines) - self._current_line = '' - self._previous_line = 0 - self._previous_column = 0 - self._more_lines = True - self._skip_comments = skip_comments - self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT - or self._WHITESPACE) - self._SkipWhitespace() - self.NextToken() - - def LookingAt(self, token): - return self.token == token - - def AtEnd(self): - """Checks the end of the text was reached. - - Returns: - True iff the end was reached. - """ - return not self.token - - def _PopLine(self): - while len(self._current_line) <= self._column: - try: - self._current_line = next(self._lines) - except StopIteration: - self._current_line = '' - self._more_lines = False - return - else: - self._line += 1 - self._column = 0 - - def _SkipWhitespace(self): - while True: - self._PopLine() - match = self._whitespace_pattern.match(self._current_line, self._column) - if not match: - break - length = len(match.group(0)) - self._column += length - - def TryConsume(self, token): - """Tries to consume a given piece of text. - - Args: - token: Text to consume. - - Returns: - True iff the text was consumed. - """ - if self.token == token: - self.NextToken() - return True - return False - - def Consume(self, token): - """Consumes a piece of text. - - Args: - token: Text to consume. - - Raises: - ParseError: If the text couldn't be consumed. - """ - if not self.TryConsume(token): - raise self.ParseError('Expected "%s".' % token) - - def ConsumeComment(self): - result = self.token - if not self._COMMENT.match(result): - raise self.ParseError('Expected comment.') - self.NextToken() - return result - - def ConsumeCommentOrTrailingComment(self): - """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" - - # Tokenizer initializes _previous_line and _previous_column to 0. As the - # tokenizer starts, it looks like there is a previous token on the line. - just_started = self._line == 0 and self._column == 0 - - before_parsing = self._previous_line - comment = self.ConsumeComment() - - # A trailing comment is a comment on the same line than the previous token. - trailing = (self._previous_line == before_parsing - and not just_started) - - return trailing, comment - - def TryConsumeIdentifier(self): - try: - self.ConsumeIdentifier() - return True - except ParseError: - return False - - def ConsumeIdentifier(self): - """Consumes protocol message field identifier. - - Returns: - Identifier string. - - Raises: - ParseError: If an identifier couldn't be consumed. - """ - result = self.token - if not self._IDENTIFIER.match(result): - raise self.ParseError('Expected identifier.') - self.NextToken() - return result - - def TryConsumeIdentifierOrNumber(self): - try: - self.ConsumeIdentifierOrNumber() - return True - except ParseError: - return False - - def ConsumeIdentifierOrNumber(self): - """Consumes protocol message field identifier. - - Returns: - Identifier string. - - Raises: - ParseError: If an identifier couldn't be consumed. - """ - result = self.token - if not self._IDENTIFIER_OR_NUMBER.match(result): - raise self.ParseError('Expected identifier or number, got %s.' % result) - self.NextToken() - return result - - def TryConsumeInteger(self): - try: - self.ConsumeInteger() - return True - except ParseError: - return False - - def ConsumeInteger(self): - """Consumes an integer number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an integer couldn't be consumed. - """ - try: - result = _ParseAbstractInteger(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def TryConsumeFloat(self): - try: - self.ConsumeFloat() - return True - except ParseError: - return False - - def ConsumeFloat(self): - """Consumes an floating point number. - - Returns: - The number parsed. - - Raises: - ParseError: If a floating point number couldn't be consumed. - """ - try: - result = ParseFloat(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ConsumeBool(self): - """Consumes a boolean value. - - Returns: - The bool parsed. - - Raises: - ParseError: If a boolean value couldn't be consumed. - """ - try: - result = ParseBool(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def TryConsumeByteString(self): - try: - self.ConsumeByteString() - return True - except ParseError: - return False - - def ConsumeString(self): - """Consumes a string value. - - Returns: - The string parsed. - - Raises: - ParseError: If a string value couldn't be consumed. - """ - the_bytes = self.ConsumeByteString() - try: - return str(the_bytes, 'utf-8') - except UnicodeDecodeError as e: - raise self._StringParseError(e) - - def ConsumeByteString(self): - """Consumes a byte array value. - - Returns: - The array parsed (as a string). - - Raises: - ParseError: If a byte array value couldn't be consumed. - """ - the_list = [self._ConsumeSingleByteString()] - while self.token and self.token[0] in _QUOTES: - the_list.append(self._ConsumeSingleByteString()) - return b''.join(the_list) - - def _ConsumeSingleByteString(self): - """Consume one token of a string literal. - - String literals (whether bytes or text) can come in multiple adjacent - tokens which are automatically concatenated, like in C or Python. This - method only consumes one token. - - Returns: - The token parsed. - Raises: - ParseError: When the wrong format data is found. - """ - text = self.token - if len(text) < 1 or text[0] not in _QUOTES: - raise self.ParseError('Expected string but found: %r' % (text,)) - - if len(text) < 2 or text[-1] != text[0]: - raise self.ParseError('String missing ending quote: %r' % (text,)) - - try: - result = text_encoding.CUnescape(text[1:-1]) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ConsumeEnum(self, field): - try: - result = ParseEnum(field, self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ParseErrorPreviousToken(self, message): - """Creates and *returns* a ParseError for the previously read token. - - Args: - message: A message to set for the exception. - - Returns: - A ParseError instance. - """ - return ParseError(message, self._previous_line + 1, - self._previous_column + 1) - - def ParseError(self, message): - """Creates and *returns* a ParseError for the current token.""" - return ParseError('\'' + self._current_line + '\': ' + message, - self._line + 1, self._column + 1) - - def _StringParseError(self, e): - return self.ParseError('Couldn\'t parse string: ' + str(e)) - - def NextToken(self): - """Reads the next meaningful token.""" - self._previous_line = self._line - self._previous_column = self._column - - self._column += len(self.token) - self._SkipWhitespace() - - if not self._more_lines: - self.token = '' - return - - match = self._TOKEN.match(self._current_line, self._column) - if not match and not self._skip_comments: - match = self._COMMENT.match(self._current_line, self._column) - if match: - token = match.group(0) - self.token = token - else: - self.token = self._current_line[self._column] - -# Aliased so it can still be accessed by current visibility violators. -# TODO(dbarnett): Migrate violators to textformat_tokenizer. -_Tokenizer = Tokenizer # pylint: disable=invalid-name - - -def _ConsumeInt32(tokenizer): - """Consumes a signed 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If a signed 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) - - -def _ConsumeUint32(tokenizer): - """Consumes an unsigned 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an unsigned 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) - - -def _TryConsumeInt64(tokenizer): - try: - _ConsumeInt64(tokenizer) - return True - except ParseError: - return False - - -def _ConsumeInt64(tokenizer): - """Consumes a signed 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If a signed 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) - - -def _TryConsumeUint64(tokenizer): - try: - _ConsumeUint64(tokenizer) - return True - except ParseError: - return False - - -def _ConsumeUint64(tokenizer): - """Consumes an unsigned 64bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an unsigned 64bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) - - -def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): - """Consumes an integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - is_signed: True if a signed integer must be parsed. - is_long: True if a long integer must be parsed. - - Returns: - The integer parsed. - - Raises: - ParseError: If an integer with given characteristics couldn't be consumed. - """ - try: - result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) - except ValueError as e: - raise tokenizer.ParseError(str(e)) - tokenizer.NextToken() - return result - - -def ParseInteger(text, is_signed=False, is_long=False): - """Parses an integer. - - Args: - text: The text to parse. - is_signed: True if a signed integer must be parsed. - is_long: True if a long integer must be parsed. - - Returns: - The integer value. - - Raises: - ValueError: Thrown Iff the text is not a valid integer. - """ - # Do the actual parsing. Exception handling is propagated to caller. - result = _ParseAbstractInteger(text) - - # Check if the integer is sane. Exceptions handled by callers. - checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] - checker.CheckValue(result) - return result - - -def _ParseAbstractInteger(text): - """Parses an integer without checking size/signedness. - - Args: - text: The text to parse. - - Returns: - The integer value. - - Raises: - ValueError: Thrown Iff the text is not a valid integer. - """ - # Do the actual parsing. Exception handling is propagated to caller. - orig_text = text - c_octal_match = re.match(r'(-?)0(\d+)$', text) - if c_octal_match: - # Python 3 no longer supports 0755 octal syntax without the 'o', so - # we always use the '0o' prefix for multi-digit numbers starting with 0. - text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) - try: - return int(text, 0) - except ValueError: - raise ValueError('Couldn\'t parse integer: %s' % orig_text) - - -def ParseFloat(text): - """Parse a floating point number. - - Args: - text: Text to parse. - - Returns: - The number parsed. - - Raises: - ValueError: If a floating point number couldn't be parsed. - """ - try: - # Assume Python compatible syntax. - return float(text) - except ValueError: - # Check alternative spellings. - if _FLOAT_INFINITY.match(text): - if text[0] == '-': - return float('-inf') - else: - return float('inf') - elif _FLOAT_NAN.match(text): - return float('nan') - else: - # assume '1.0f' format - try: - return float(text.rstrip('f')) - except ValueError: - raise ValueError('Couldn\'t parse float: %s' % text) - - -def ParseBool(text): - """Parse a boolean value. - - Args: - text: Text to parse. - - Returns: - Boolean values parsed - - Raises: - ValueError: If text is not a valid boolean. - """ - if text in ('true', 't', '1', 'True'): - return True - elif text in ('false', 'f', '0', 'False'): - return False - else: - raise ValueError('Expected "true" or "false".') - - -def ParseEnum(field, value): - """Parse an enum value. - - The value can be specified by a number (the enum value), or by - a string literal (the enum name). - - Args: - field: Enum field descriptor. - value: String value. - - Returns: - Enum value number. - - Raises: - ValueError: If the enum value could not be parsed. - """ - enum_descriptor = field.enum_type - try: - number = int(value, 0) - except ValueError: - # Identifier. - enum_value = enum_descriptor.values_by_name.get(value, None) - if enum_value is None: - raise ValueError('Enum type "%s" has no value named %s.' % - (enum_descriptor.full_name, value)) - else: - # Numeric value. - if hasattr(field.file, 'syntax'): - # Attribute is checked for compatibility. - if field.file.syntax == 'proto3': - # Proto3 accept numeric unknown enums. - return number - enum_value = enum_descriptor.values_by_number.get(number, None) - if enum_value is None: - raise ValueError('Enum type "%s" has no value with number %d.' % - (enum_descriptor.full_name, number)) - return enum_value.number diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/timestamp_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/timestamp_pb2.py deleted file mode 100644 index 558d496941..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/timestamp_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/timestamp.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _TIMESTAMP._serialized_start=52 - _TIMESTAMP._serialized_end=95 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/type_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/type_pb2.py deleted file mode 100644 index 19903fb6b4..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/type_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/type.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _SYNTAX._serialized_start=1413 - _SYNTAX._serialized_end=1459 - _TYPE._serialized_start=113 - _TYPE._serialized_end=328 - _FIELD._serialized_start=331 - _FIELD._serialized_end=1056 - _FIELD_KIND._serialized_start=610 - _FIELD_KIND._serialized_end=938 - _FIELD_CARDINALITY._serialized_start=940 - _FIELD_CARDINALITY._serialized_end=1056 - _ENUM._serialized_start=1059 - _ENUM._serialized_end=1265 - _ENUMVALUE._serialized_start=1267 - _ENUMVALUE._serialized_end=1350 - _OPTION._serialized_start=1352 - _OPTION._serialized_end=1411 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_pb2.py deleted file mode 100644 index 66a5836c82..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_pb2.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/util/json_format.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) - - DESCRIPTOR._options = None - _TESTBOOLMAP_BOOLMAPENTRY._options = None - _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGMAP_STRINGMAPENTRY._options = None - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' - _ENUMVALUE._serialized_start=1607 - _ENUMVALUE._serialized_end=1657 - _TESTFLAGSANDSTRINGS._serialized_start=62 - _TESTFLAGSANDSTRINGS._serialized_end=199 - _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 - _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 - _TESTBASE64BYTEARRAYS._serialized_start=201 - _TESTBASE64BYTEARRAYS._serialized_end=234 - _TESTJAVASCRIPTJSON._serialized_start=236 - _TESTJAVASCRIPTJSON._serialized_end=307 - _TESTJAVASCRIPTORDERJSON1._serialized_start=309 - _TESTJAVASCRIPTORDERJSON1._serialized_end=390 - _TESTJAVASCRIPTORDERJSON2._serialized_start=393 - _TESTJAVASCRIPTORDERJSON2._serialized_end=530 - _TESTLARGEINT._serialized_start=532 - _TESTLARGEINT._serialized_end=568 - _TESTNUMBERS._serialized_start=571 - _TESTNUMBERS._serialized_end=731 - _TESTNUMBERS_MYTYPE._serialized_start=691 - _TESTNUMBERS_MYTYPE._serialized_end=731 - _TESTCAMELCASE._serialized_start=733 - _TESTCAMELCASE._serialized_end=817 - _TESTBOOLMAP._serialized_start=819 - _TESTBOOLMAP._serialized_end=943 - _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 - _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 - _TESTRECURSION._serialized_start=945 - _TESTRECURSION._serialized_end=1024 - _TESTSTRINGMAP._serialized_start=1027 - _TESTSTRINGMAP._serialized_end=1161 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 - _TESTSTRINGSERIALIZER._serialized_start=1164 - _TESTSTRINGSERIALIZER._serialized_end=1360 - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 - _TESTMESSAGEWITHEXTENSION._serialized_start=1362 - _TESTMESSAGEWITHEXTENSION._serialized_end=1398 - _TESTEXTENSION._serialized_start=1400 - _TESTEXTENSION._serialized_end=1522 - _TESTDEFAULTENUMVALUE._serialized_start=1524 - _TESTDEFAULTENUMVALUE._serialized_end=1605 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py deleted file mode 100644 index 5498deafa9..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/util/json_format_proto3.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' - _TESTMAP_BOOLMAPENTRY._options = None - _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTMAP_INT32MAPENTRY._options = None - _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' - _TESTMAP_INT64MAPENTRY._options = None - _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' - _TESTMAP_UINT32MAPENTRY._options = None - _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' - _TESTMAP_UINT64MAPENTRY._options = None - _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' - _TESTMAP_STRINGMAPENTRY._options = None - _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_BOOLMAPENTRY._options = None - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_INT32MAPENTRY._options = None - _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_INT64MAPENTRY._options = None - _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_UINT32MAPENTRY._options = None - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_UINT64MAPENTRY._options = None - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_STRINGMAPENTRY._options = None - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_MAPMAPENTRY._options = None - _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGMAP_STRINGMAPENTRY._options = None - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTBOOLVALUE_BOOLMAPENTRY._options = None - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' - _ENUMTYPE._serialized_start=4849 - _ENUMTYPE._serialized_end=4877 - _MESSAGETYPE._serialized_start=277 - _MESSAGETYPE._serialized_end=305 - _TESTMESSAGE._serialized_start=308 - _TESTMESSAGE._serialized_end=968 - _TESTONEOF._serialized_start=971 - _TESTONEOF._serialized_end=1239 - _TESTMAP._serialized_start=1242 - _TESTMAP._serialized_end=1851 - _TESTMAP_BOOLMAPENTRY._serialized_start=1557 - _TESTMAP_BOOLMAPENTRY._serialized_end=1603 - _TESTMAP_INT32MAPENTRY._serialized_start=1605 - _TESTMAP_INT32MAPENTRY._serialized_end=1652 - _TESTMAP_INT64MAPENTRY._serialized_start=1654 - _TESTMAP_INT64MAPENTRY._serialized_end=1701 - _TESTMAP_UINT32MAPENTRY._serialized_start=1703 - _TESTMAP_UINT32MAPENTRY._serialized_end=1751 - _TESTMAP_UINT64MAPENTRY._serialized_start=1753 - _TESTMAP_UINT64MAPENTRY._serialized_end=1801 - _TESTMAP_STRINGMAPENTRY._serialized_start=1803 - _TESTMAP_STRINGMAPENTRY._serialized_end=1851 - _TESTNESTEDMAP._serialized_start=1854 - _TESTNESTEDMAP._serialized_end=2627 - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 - _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 - _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 - _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 - _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 - _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 - _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 - _TESTSTRINGMAP._serialized_start=2629 - _TESTSTRINGMAP._serialized_end=2752 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 - _TESTWRAPPER._serialized_start=2755 - _TESTWRAPPER._serialized_end=3761 - _TESTTIMESTAMP._serialized_start=3763 - _TESTTIMESTAMP._serialized_end=3873 - _TESTDURATION._serialized_start=3875 - _TESTDURATION._serialized_end=3982 - _TESTFIELDMASK._serialized_start=3984 - _TESTFIELDMASK._serialized_end=4042 - _TESTSTRUCT._serialized_start=4044 - _TESTSTRUCT._serialized_end=4145 - _TESTANY._serialized_start=4147 - _TESTANY._serialized_end=4239 - _TESTVALUE._serialized_start=4241 - _TESTVALUE._serialized_end=4339 - _TESTLISTVALUE._serialized_start=4341 - _TESTLISTVALUE._serialized_end=4451 - _TESTBOOLVALUE._serialized_start=4454 - _TESTBOOLVALUE._serialized_end=4591 - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 - _TESTCUSTOMJSONNAME._serialized_start=4593 - _TESTCUSTOMJSONNAME._serialized_end=4636 - _TESTEXTENSIONS._serialized_start=4638 - _TESTEXTENSIONS._serialized_end=4712 - _TESTENUMVALUE._serialized_start=4715 - _TESTENUMVALUE._serialized_end=4847 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/wrappers_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/wrappers_pb2.py deleted file mode 100644 index e49eb4c15d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/wrappers_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/wrappers.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _DOUBLEVALUE._serialized_start=51 - _DOUBLEVALUE._serialized_end=79 - _FLOATVALUE._serialized_start=81 - _FLOATVALUE._serialized_end=108 - _INT64VALUE._serialized_start=110 - _INT64VALUE._serialized_end=137 - _UINT64VALUE._serialized_start=139 - _UINT64VALUE._serialized_end=167 - _INT32VALUE._serialized_start=169 - _INT32VALUE._serialized_end=196 - _UINT32VALUE._serialized_start=198 - _UINT32VALUE._serialized_end=226 - _BOOLVALUE._serialized_start=228 - _BOOLVALUE._serialized_end=254 - _STRINGVALUE._serialized_start=256 - _STRINGVALUE._serialized_end=284 - _BYTESVALUE._serialized_start=286 - _BYTESVALUE._serialized_end=313 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/version.py b/server_addon/nuke/client/ayon_nuke/version.py deleted file mode 100644 index 2262afb410..0000000000 --- a/server_addon/nuke/client/ayon_nuke/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring AYON addon 'nuke' version.""" -__version__ = "0.2.3" diff --git a/server_addon/nuke/package.py b/server_addon/nuke/package.py deleted file mode 100644 index 7347d21b35..0000000000 --- a/server_addon/nuke/package.py +++ /dev/null @@ -1,10 +0,0 @@ -name = "nuke" -title = "Nuke" -version = "0.2.3" - -client_dir = "ayon_nuke" - -ayon_required_addons = { - "core": ">0.3.2", -} -ayon_compatible_addons = {} diff --git a/server_addon/nuke/server/__init__.py b/server_addon/nuke/server/__init__.py deleted file mode 100644 index 0806ea8e87..0000000000 --- a/server_addon/nuke/server/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Type, Any - -from ayon_server.addons import BaseServerAddon - -from .settings import ( - NukeSettings, - DEFAULT_VALUES, - convert_settings_overrides -) - - -class NukeAddon(BaseServerAddon): - settings_model: Type[NukeSettings] = NukeSettings - - async def get_default_settings(self): - settings_model_cls = self.get_settings_model() - return settings_model_cls(**DEFAULT_VALUES) - - async def convert_settings_overrides( - self, - source_version: str, - overrides: dict[str, Any], - ) -> dict[str, Any]: - convert_settings_overrides(source_version, overrides) - # Use super conversion - return await super().convert_settings_overrides( - source_version, overrides) diff --git a/server_addon/nuke/server/settings/__init__.py b/server_addon/nuke/server/settings/__init__.py deleted file mode 100644 index da79b947f7..0000000000 --- a/server_addon/nuke/server/settings/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .main import ( - NukeSettings, - DEFAULT_VALUES, -) -from .conversion import convert_settings_overrides - - -__all__ = ( - "NukeSettings", - "DEFAULT_VALUES", - - "convert_settings_overrides", -) diff --git a/server_addon/nuke/server/settings/common.py b/server_addon/nuke/server/settings/common.py deleted file mode 100644 index 2ddbc3ca26..0000000000 --- a/server_addon/nuke/server/settings/common.py +++ /dev/null @@ -1,195 +0,0 @@ -import json -from ayon_server.exceptions import BadRequestException -from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ( - ColorRGBA_float, - ColorRGB_uint8 -) - - -def validate_json_dict(value): - if not value.strip(): - return "{}" - try: - converted_value = json.loads(value) - success = isinstance(converted_value, dict) - except json.JSONDecodeError: - success = False - - if not success: - raise BadRequestException( - "Environment's can't be parsed as json object" - ) - return value - - -class Vector2d(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - - -class Vector3d(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - z: float = SettingsField(1.0, title="Z") - - -class Box(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - r: float = SettingsField(1.0, title="R") - t: float = SettingsField(1.0, title="T") - - -def formatable_knob_type_enum(): - return [ - {"value": "text", "label": "Text"}, - {"value": "number", "label": "Number"}, - {"value": "decimal_number", "label": "Decimal number"}, - {"value": "2d_vector", "label": "2D vector"}, - # "3D vector" - ] - - -class Formatable(BaseSettingsModel): - _layout = "compact" - - template: str = SettingsField( - "", - placeholder="""{{key}} or {{key}};{{key}}""", - title="Template" - ) - to_type: str = SettingsField( - "Text", - title="To Knob type", - enum_resolver=formatable_knob_type_enum, - ) - - -knob_types_enum = [ - {"value": "text", "label": "Text"}, - {"value": "formatable", "label": "Formate from template"}, - {"value": "color_gui", "label": "Color GUI"}, - {"value": "boolean", "label": "Boolean"}, - {"value": "number", "label": "Number"}, - {"value": "decimal_number", "label": "Decimal number"}, - {"value": "vector_2d", "label": "2D vector"}, - {"value": "vector_3d", "label": "3D vector"}, - {"value": "color", "label": "Color"}, - {"value": "box", "label": "Box"}, - {"value": "expression", "label": "Expression"} -] - - -class KnobModel(BaseSettingsModel): - _layout = "expanded" - - type: str = SettingsField( - title="Type", - description="Switch between different knob types", - enum_resolver=lambda: knob_types_enum, - conditionalEnum=True - ) - - name: str = SettingsField( - title="Name", - placeholder="Name" - ) - text: str = SettingsField("", title="Value") - color_gui: ColorRGB_uint8 = SettingsField( - (0, 0, 255), - title="RGB Uint8", - ) - boolean: bool = SettingsField(False, title="Value") - number: int = SettingsField(0, title="Value") - decimal_number: float = SettingsField(0.0, title="Value") - vector_2d: Vector2d = SettingsField( - default_factory=Vector2d, - title="Value" - ) - vector_3d: Vector3d = SettingsField( - default_factory=Vector3d, - title="Value" - ) - color: ColorRGBA_float = SettingsField( - (0.0, 0.0, 1.0, 1.0), - title="RGBA Float" - ) - box: Box = SettingsField( - default_factory=Box, - title="Value" - ) - formatable: Formatable = SettingsField( - default_factory=Formatable, - title="Formatable" - ) - expression: str = SettingsField( - "", - title="Expression" - ) - - -colorspace_types_enum = [ - {"value": "colorspace", "label": "Use Colorspace"}, - {"value": "display_view", "label": "Use Display & View"}, -] - - -class DisplayAndViewProfileModel(BaseSettingsModel): - _layout = "expanded" - - display: str = SettingsField( - "", - title="Display", - description="What display to use", - ) - - view: str = SettingsField( - "", - title="View", - description=( - "What view to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - -class ColorspaceConfigurationModel(BaseSettingsModel): - _isGroup: bool = True - - enabled: bool = SettingsField( - False, - title="Enabled", - description=( - "Enable baking target (colorspace or display/view)." - ), - ) - - type: str = SettingsField( - "colorspace", - title="Target baking type", - description="Switch between different knob types", - enum_resolver=lambda: colorspace_types_enum, - conditionalEnum=True, - ) - - colorspace: str = SettingsField( - "", - title="Colorspace", - description=( - "What colorspace name to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - display_view: DisplayAndViewProfileModel = SettingsField( - title="Display & View", - description="What display & view to use", - default_factory=DisplayAndViewProfileModel, - ) diff --git a/server_addon/nuke/server/settings/conversion.py b/server_addon/nuke/server/settings/conversion.py deleted file mode 100644 index 2e9e07e354..0000000000 --- a/server_addon/nuke/server/settings/conversion.py +++ /dev/null @@ -1,143 +0,0 @@ -import re -from typing import Any - - -def _get_viewer_config_from_string(input_string): - """Convert string to display and viewer string - - Args: - input_string (str): string with viewer - - Raises: - IndexError: if more then one slash in input string - IndexError: if missing closing bracket - - Returns: - tuple[str]: display, viewer - """ - display = None - viewer = input_string - # check if () or / or \ in name - if "/" in viewer: - split = viewer.split("/") - - # rise if more then one column - if len(split) > 2: - raise IndexError( - "Viewer Input string is not correct. " - f"More then two `/` slashes! {input_string}" - ) - - viewer = split[1] - display = split[0] - elif "(" in viewer: - pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" - result_ = re.findall(pattern, viewer) - try: - result_ = result_.pop() - display = str(result_[1]).rstrip() - viewer = str(result_[0]).rstrip() - except IndexError as e: - raise IndexError( - "Viewer Input string is not correct. " - f"Missing bracket! {input_string}" - ) from e - - return (display, viewer) - - -def _convert_imageio_baking_0_2_3(overrides): - if "baking" not in overrides: - return - - baking_view_process = overrides["baking"].get("viewerProcess") - - if baking_view_process is None: - return - - display, view = _get_viewer_config_from_string(baking_view_process) - - overrides["baking_target"] = { - "enabled": True, - "type": "display_view", - "display_view": { - "display": display, - "view": view, - }, - } - - -def _convert_viewers_0_2_3(overrides): - if "viewer" not in overrides: - return - - viewer = overrides["viewer"] - - if "viewerProcess" in viewer: - viewer_process = viewer["viewerProcess"] - display, view = _get_viewer_config_from_string(viewer_process) - viewer.update({ - "display": display, - "view": view, - }) - if "output_transform" in viewer: - output_transform = viewer["output_transform"] - display, view = _get_viewer_config_from_string(output_transform) - overrides["monitor"] = { - "display": display, - "view": view, - } - - -def _convert_imageio_configs_0_2_3(overrides): - """Image IO settings had changed. - - 0.2.2. is the latest version using the old way. - """ - if "imageio" not in overrides: - return - - imageio_overrides = overrides["imageio"] - - _convert_imageio_baking_0_2_3(imageio_overrides) - _convert_viewers_0_2_3(imageio_overrides) - - -def _convert_extract_intermediate_files_0_2_3(publish_overrides): - """Extract intermediate files settings had changed. - - 0.2.2. is the latest version using the old way. - """ - # override can be either `display/view` or `view (display)` - if "ExtractReviewIntermediates" in publish_overrides: - extract_review_intermediates = publish_overrides[ - "ExtractReviewIntermediates"] - - for output in extract_review_intermediates.get("outputs", []): - if viewer_process_override := output.get("viewer_process_override"): - display, view = _get_viewer_config_from_string( - viewer_process_override) - - output["colorspace_override"] = { - "enabled": True, - "type": "display_view", - "display_view": { - "display": display, - "view": view, - }, - } - - -def _convert_publish_plugins(overrides): - if "publish" not in overrides: - return - _convert_extract_intermediate_files_0_2_3(overrides["publish"]) - - -def convert_settings_overrides( - source_version: str, - overrides: dict[str, Any], -) -> dict[str, Any]: - _convert_imageio_configs_0_2_3(overrides) - _convert_publish_plugins(overrides) - return overrides diff --git a/server_addon/nuke/server/settings/create_plugins.py b/server_addon/nuke/server/settings/create_plugins.py deleted file mode 100644 index e4a0f9c938..0000000000 --- a/server_addon/nuke/server/settings/create_plugins.py +++ /dev/null @@ -1,225 +0,0 @@ -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names -) -from .common import KnobModel - - -def instance_attributes_enum(): - """Return create write instance attributes.""" - return [ - {"value": "reviewable", "label": "Reviewable"}, - {"value": "farm_rendering", "label": "Farm rendering"}, - {"value": "use_range_limit", "label": "Use range limit"}, - { - "value": "render_on_farm", - "label": "Render On Farm" - } - ] - - -class PrenodeModel(BaseSettingsModel): - name: str = SettingsField( - title="Node name" - ) - - nodeclass: str = SettingsField( - "", - title="Node class" - ) - dependent: str = SettingsField( - "", - title="Incoming dependency" - ) - - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreateWriteRenderModel(BaseSettingsModel): - temp_rendering_path_template: str = SettingsField( - title="Temporary rendering path template" - ) - default_variants: list[str] = SettingsField( - title="Default variants", - default_factory=list - ) - instance_attributes: list[str] = SettingsField( - default_factory=list, - enum_resolver=instance_attributes_enum, - title="Instance attributes" - ) - exposed_knobs: list[str] = SettingsField( - title="Write Node Exposed Knobs", - default_factory=list - ) - prenodes: list[PrenodeModel] = SettingsField( - default_factory=list, - title="Preceding nodes", - ) - - @validator("prenodes") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreateWritePrerenderModel(BaseSettingsModel): - temp_rendering_path_template: str = SettingsField( - title="Temporary rendering path template" - ) - default_variants: list[str] = SettingsField( - title="Default variants", - default_factory=list - ) - instance_attributes: list[str] = SettingsField( - default_factory=list, - enum_resolver=instance_attributes_enum, - title="Instance attributes" - ) - exposed_knobs: list[str] = SettingsField( - title="Write Node Exposed Knobs", - default_factory=list - ) - prenodes: list[PrenodeModel] = SettingsField( - default_factory=list, - title="Preceding nodes", - ) - - @validator("prenodes") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreateWriteImageModel(BaseSettingsModel): - temp_rendering_path_template: str = SettingsField( - title="Temporary rendering path template" - ) - default_variants: list[str] = SettingsField( - title="Default variants", - default_factory=list - ) - instance_attributes: list[str] = SettingsField( - default_factory=list, - enum_resolver=instance_attributes_enum, - title="Instance attributes" - ) - exposed_knobs: list[str] = SettingsField( - title="Write Node Exposed Knobs", - default_factory=list - ) - prenodes: list[PrenodeModel] = SettingsField( - default_factory=list, - title="Preceding nodes", - ) - - @validator("prenodes") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreatorPluginsSettings(BaseSettingsModel): - CreateWriteRender: CreateWriteRenderModel = SettingsField( - default_factory=CreateWriteRenderModel, - title="Create Write Render" - ) - CreateWritePrerender: CreateWritePrerenderModel = SettingsField( - default_factory=CreateWritePrerenderModel, - title="Create Write Prerender" - ) - CreateWriteImage: CreateWriteImageModel = SettingsField( - default_factory=CreateWriteImageModel, - title="Create Write Image" - ) - - -DEFAULT_CREATE_SETTINGS = { - "CreateWriteRender": { - "temp_rendering_path_template": "{work}/renders/nuke/{product[name]}/{product[name]}.{frame}.{ext}", - "default_variants": [ - "Main", - "Mask" - ], - "instance_attributes": [ - "reviewable", - "farm_rendering" - ], - "exposed_knobs": [], - "prenodes": [ - { - "name": "Reformat01", - "nodeclass": "Reformat", - "dependent": "", - "knobs": [ - { - "type": "text", - "name": "resize", - "text": "none" - }, - { - "type": "boolean", - "name": "black_outside", - "boolean": True - } - ] - } - ] - }, - "CreateWritePrerender": { - "temp_rendering_path_template": "{work}/renders/nuke/{product[name]}/{product[name]}.{frame}.{ext}", - "default_variants": [ - "Key01", - "Bg01", - "Fg01", - "Branch01", - "Part01" - ], - "instance_attributes": [ - "farm_rendering", - "use_range_limit" - ], - "exposed_knobs": [], - "prenodes": [] - }, - "CreateWriteImage": { - "temp_rendering_path_template": "{work}/renders/nuke/{product[name]}/{product[name]}.{ext}", - "default_variants": [ - "StillFrame", - "MPFrame", - "LayoutFrame" - ], - "instance_attributes": [ - "use_range_limit" - ], - "exposed_knobs": [], - "prenodes": [ - { - "name": "FrameHold01", - "nodeclass": "FrameHold", - "dependent": "", - "knobs": [ - { - "type": "expression", - "name": "first_frame", - "expression": "parent.first" - } - ] - } - ] - } -} diff --git a/server_addon/nuke/server/settings/dirmap.py b/server_addon/nuke/server/settings/dirmap.py deleted file mode 100644 index 3e1bac0739..0000000000 --- a/server_addon/nuke/server/settings/dirmap.py +++ /dev/null @@ -1,33 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class DirmapPathsSubmodel(BaseSettingsModel): - _layout = "compact" - source_path: list[str] = SettingsField( - default_factory=list, - title="Source Paths" - ) - destination_path: list[str] = SettingsField( - default_factory=list, - title="Destination Paths" - ) - - -class DirmapSettings(BaseSettingsModel): - """Nuke color management project settings.""" - _isGroup: bool = True - - enabled: bool = SettingsField(title="enabled") - paths: DirmapPathsSubmodel = SettingsField( - default_factory=DirmapPathsSubmodel, - title="Dirmap Paths" - ) - - -DEFAULT_DIRMAP_SETTINGS = { - "enabled": False, - "paths": { - "source_path": [], - "destination_path": [] - } -} diff --git a/server_addon/nuke/server/settings/general.py b/server_addon/nuke/server/settings/general.py deleted file mode 100644 index d54c725dc1..0000000000 --- a/server_addon/nuke/server/settings/general.py +++ /dev/null @@ -1,41 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class MenuShortcut(BaseSettingsModel): - """Nuke general project settings.""" - - create: str = SettingsField( - title="Create..." - ) - publish: str = SettingsField( - title="Publish..." - ) - load: str = SettingsField( - title="Load..." - ) - manage: str = SettingsField( - title="Manage..." - ) - build_workfile: str = SettingsField( - title="Build Workfile..." - ) - - -class GeneralSettings(BaseSettingsModel): - """Nuke general project settings.""" - - menu: MenuShortcut = SettingsField( - default_factory=MenuShortcut, - title="Menu Shortcuts", - ) - - -DEFAULT_GENERAL_SETTINGS = { - "menu": { - "create": "ctrl+alt+c", - "publish": "ctrl+alt+p", - "load": "ctrl+alt+l", - "manage": "ctrl+alt+m", - "build_workfile": "ctrl+alt+b" - } -} diff --git a/server_addon/nuke/server/settings/gizmo.py b/server_addon/nuke/server/settings/gizmo.py deleted file mode 100644 index ddb56f891c..0000000000 --- a/server_addon/nuke/server/settings/gizmo.py +++ /dev/null @@ -1,79 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - MultiplatformPathModel, - MultiplatformPathListModel, -) - - -class SubGizmoItem(BaseSettingsModel): - title: str = SettingsField( - title="Label" - ) - sourcetype: str = SettingsField( - title="Type of usage" - ) - command: str = SettingsField( - title="Python command" - ) - icon: str = SettingsField( - title="Icon Path" - ) - shortcut: str = SettingsField( - title="Hotkey" - ) - - -class GizmoDefinitionItem(BaseSettingsModel): - gizmo_toolbar_path: str = SettingsField( - title="Gizmo Menu" - ) - sub_gizmo_list: list[SubGizmoItem] = SettingsField( - default_factory=list, title="Sub Gizmo List") - - -class GizmoItem(BaseSettingsModel): - """Nuke gizmo item """ - - toolbar_menu_name: str = SettingsField( - title="Toolbar Menu Name" - ) - gizmo_source_dir: MultiplatformPathListModel = SettingsField( - default_factory=MultiplatformPathListModel, - title="Gizmo Directory Path" - ) - toolbar_icon_path: MultiplatformPathModel = SettingsField( - default_factory=MultiplatformPathModel, - title="Toolbar Icon Path" - ) - gizmo_definition: list[GizmoDefinitionItem] = SettingsField( - default_factory=list, title="Gizmo Definition") - - -DEFAULT_GIZMO_ITEM = { - "toolbar_menu_name": "OpenPype Gizmo", - "gizmo_source_dir": { - "windows": [], - "darwin": [], - "linux": [] - }, - "toolbar_icon_path": { - "windows": "", - "darwin": "", - "linux": "" - }, - "gizmo_definition": [ - { - "gizmo_toolbar_path": "/path/to/menu", - "sub_gizmo_list": [ - { - "sourcetype": "python", - "title": "Gizmo Note", - "command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')", - "icon": "", - "shortcut": "" - } - ] - } - ] -} diff --git a/server_addon/nuke/server/settings/imageio.py b/server_addon/nuke/server/settings/imageio.py deleted file mode 100644 index a34cb4ab05..0000000000 --- a/server_addon/nuke/server/settings/imageio.py +++ /dev/null @@ -1,354 +0,0 @@ -from typing import Literal -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, -) - -from .common import ( - KnobModel, - ColorspaceConfigurationModel, -) - - -class NodesModel(BaseSettingsModel): - _layout = "expanded" - plugins: list[str] = SettingsField( - default_factory=list, - title="Used in plugins" - ) - nuke_node_class: str = SettingsField( - title="Nuke Node Class", - ) - - -class RequiredNodesModel(NodesModel): - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class OverrideNodesModel(NodesModel): - subsets: list[str] = SettingsField( - default_factory=list, - title="Subsets" - ) - - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class NodesSetting(BaseSettingsModel): - _isGroup: bool = True - - required_nodes: list[RequiredNodesModel] = SettingsField( - title="Plugin required", - default_factory=list - ) - override_nodes: list[OverrideNodesModel] = SettingsField( - title="Plugin's node overrides", - default_factory=list - ) - - -def ocio_configs_switcher_enum(): - return [ - {"value": "nuke-default", "label": "nuke-default"}, - {"value": "spi-vfx", "label": "spi-vfx (11)"}, - {"value": "spi-anim", "label": "spi-anim (11)"}, - {"value": "aces_0.1.1", "label": "aces_0.1.1 (11)"}, - {"value": "aces_0.7.1", "label": "aces_0.7.1 (11)"}, - {"value": "aces_1.0.1", "label": "aces_1.0.1 (11)"}, - {"value": "aces_1.0.3", "label": "aces_1.0.3 (11, 12)"}, - {"value": "aces_1.1", "label": "aces_1.1 (12, 13)"}, - {"value": "aces_1.2", "label": "aces_1.2 (13, 14)"}, - {"value": "studio-config-v1.0.0_aces-v1.3_ocio-v2.1", - "label": "studio-config-v1.0.0_aces-v1.3_ocio-v2.1 (14)"}, - {"value": "cg-config-v1.0.0_aces-v1.3_ocio-v2.1", - "label": "cg-config-v1.0.0_aces-v1.3_ocio-v2.1 (14)"}, - ] - - -class WorkfileColorspaceSettings(BaseSettingsModel): - """Nuke workfile colorspace preset. """ - - _isGroup: bool = True - - color_management: Literal["Nuke", "OCIO"] = SettingsField( - title="Color Management Workflow" - ) - - native_ocio_config: str = SettingsField( - title="Native OpenColorIO Config", - description="Switch between native OCIO configs", - enum_resolver=ocio_configs_switcher_enum, - conditionalEnum=True - ) - - working_space: str = SettingsField( - title="Working Space" - ) - monitor_lut: str = SettingsField( - title="Thumbnails" - ) - monitor_out_lut: str = SettingsField( - title="Monitor Out" - ) - int_8_lut: str = SettingsField( - title="8-bit Files" - ) - int_16_lut: str = SettingsField( - title="16-bit Files" - ) - log_lut: str = SettingsField( - title="Log Files" - ) - float_lut: str = SettingsField( - title="Float Files" - ) - - -class ReadColorspaceRulesItems(BaseSettingsModel): - _layout = "expanded" - - regex: str = SettingsField("", title="Regex expression") - colorspace: str = SettingsField("", title="Colorspace") - - -class RegexInputsModel(BaseSettingsModel): - _isGroup: bool = True - - inputs: list[ReadColorspaceRulesItems] = SettingsField( - default_factory=list, - title="Inputs" - ) - - -class ViewProcessModel(BaseSettingsModel): - _isGroup: bool = True - - display: str = SettingsField( - "", - title="Display", - description="What display to use", - ) - view: str = SettingsField( - "", - title="View", - description=( - "What view to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - -class MonitorProcessModel(BaseSettingsModel): - _isGroup: bool = True - - display: str = SettingsField( - "", - title="Display", - description="What display to use", - ) - view: str = SettingsField( - "", - title="View", - description=( - "What view to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - -class ImageIOConfigModel(BaseSettingsModel): - """[DEPRECATED] Addon OCIO config settings. Please set the OCIO config - path in the Core addon profiles here - (ayon+settings://core/imageio/ocio_config_profiles). - """ - - override_global_config: bool = SettingsField( - False, - title="Override global OCIO config", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - filepath: list[str] = SettingsField( - default_factory=list, - title="Config path", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - - -class ImageIOFileRuleModel(BaseSettingsModel): - name: str = SettingsField("", title="Rule name") - pattern: str = SettingsField("", title="Regex pattern") - colorspace: str = SettingsField("", title="Colorspace name") - ext: str = SettingsField("", title="File extension") - - -class ImageIOFileRulesModel(BaseSettingsModel): - _isGroup: bool = True - - activate_host_rules: bool = SettingsField(False) - rules: list[ImageIOFileRuleModel] = SettingsField( - default_factory=list, - title="Rules" - ) - - @validator("rules") - def validate_unique_outputs(cls, value): - ensure_unique_names(value) - return value - - -class ImageIOSettings(BaseSettingsModel): - """Nuke color management project settings. """ - - activate_host_color_management: bool = SettingsField( - True, title="Enable Color Management") - ocio_config: ImageIOConfigModel = SettingsField( - default_factory=ImageIOConfigModel, - title="OCIO config" - ) - file_rules: ImageIOFileRulesModel = SettingsField( - default_factory=ImageIOFileRulesModel, - title="File Rules" - ) - viewer: ViewProcessModel = SettingsField( - default_factory=ViewProcessModel, - title="Viewer", - description="""Viewer profile is used during - Creation of new viewer node at knob viewerProcess""" - ) - monitor: MonitorProcessModel = SettingsField( - default_factory=MonitorProcessModel, - title="Monitor OUT" - ) - baking_target: ColorspaceConfigurationModel = SettingsField( - default_factory=ColorspaceConfigurationModel, - title="Baking Target Colorspace" - ) - - workfile: WorkfileColorspaceSettings = SettingsField( - default_factory=WorkfileColorspaceSettings, - title="Workfile" - ) - - nodes: NodesSetting = SettingsField( - default_factory=NodesSetting, - title="Nodes" - ) - """# TODO: enhance settings with host api: - - [ ] no need for `inputs` middle part. It can stay - directly on `regex_inputs` - """ - regex_inputs: RegexInputsModel = SettingsField( - default_factory=RegexInputsModel, - title="Assign colorspace to read nodes via rules" - ) - - -DEFAULT_IMAGEIO_SETTINGS = { - "viewer": {"display": "ACES", "view": "sRGB"}, - "monitor": {"display": "ACES", "view": "Rec.709"}, - "baking_target": { - "enabled": True, - "type": "colorspace", - "colorspace": "Output - Rec.709", - }, - "workfile": { - "color_management": "OCIO", - "native_ocio_config": "aces_1.2", - "working_space": "role_scene_linear", - "monitor_lut": "ACES/sRGB", - "monitor_out_lut": "ACES/sRGB", - "int_8_lut": "role_matte_paint", - "int_16_lut": "role_texture_paint", - "log_lut": "role_compositing_log", - "float_lut": "role_scene_linear", - }, - "nodes": { - "required_nodes": [ - { - "plugins": ["CreateWriteRender"], - "nuke_node_class": "Write", - "knobs": [ - {"type": "text", "name": "file_type", "text": "exr"}, - {"type": "text", "name": "datatype", "text": "16 bit half"}, - {"type": "text", "name": "compression", "text": "Zip (1 scanline)"}, - {"type": "boolean", "name": "autocrop", "boolean": True}, - { - "type": "color_gui", - "name": "tile_color", - "color_gui": [186, 35, 35], - }, - {"type": "text", "name": "channels", "text": "rgb"}, - {"type": "text", "name": "colorspace", "text": "scene_linear"}, - {"type": "boolean", "name": "create_directories", "boolean": True}, - ], - }, - { - "plugins": ["CreateWritePrerender"], - "nuke_node_class": "Write", - "knobs": [ - {"type": "text", "name": "file_type", "text": "exr"}, - {"type": "text", "name": "datatype", "text": "16 bit half"}, - {"type": "text", "name": "compression", "text": "Zip (1 scanline)"}, - {"type": "boolean", "name": "autocrop", "boolean": True}, - { - "type": "color_gui", - "name": "tile_color", - "color_gui": [171, 171, 10], - }, - {"type": "text", "name": "channels", "text": "rgb"}, - {"type": "text", "name": "colorspace", "text": "scene_linear"}, - {"type": "boolean", "name": "create_directories", "boolean": True}, - ], - }, - { - "plugins": ["CreateWriteImage"], - "nuke_node_class": "Write", - "knobs": [ - {"type": "text", "name": "file_type", "text": "tiff"}, - {"type": "text", "name": "datatype", "text": "16 bit"}, - {"type": "text", "name": "compression", "text": "Deflate"}, - { - "type": "color_gui", - "name": "tile_color", - "color_gui": [56, 162, 7], - }, - {"type": "text", "name": "channels", "text": "rgb"}, - {"type": "text", "name": "colorspace", "text": "texture_paint"}, - {"type": "boolean", "name": "create_directories", "boolean": True}, - ], - }, - ], - "override_nodes": [], - }, - "regex_inputs": { - "inputs": [{"regex": "(beauty).*(?=.exr)", "colorspace": "linear"}] - }, -} diff --git a/server_addon/nuke/server/settings/loader_plugins.py b/server_addon/nuke/server/settings/loader_plugins.py deleted file mode 100644 index 22cb469e8d..0000000000 --- a/server_addon/nuke/server/settings/loader_plugins.py +++ /dev/null @@ -1,74 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class LoadImageModel(BaseSettingsModel): - enabled: bool = SettingsField( - title="Enabled" - ) - representations_include: list[str] = SettingsField( - default_factory=list, - title="Include representations" - ) - - node_name_template: str = SettingsField( - title="Read node name template" - ) - - -class LoadClipOptionsModel(BaseSettingsModel): - start_at_workfile: bool = SettingsField( - title="Start at workfile's start frame" - ) - add_retime: bool = SettingsField( - title="Add retime" - ) - deep_exr: bool = SettingsField( - title="Deep Exr Read Node" - ) - -class LoadClipModel(BaseSettingsModel): - enabled: bool = SettingsField( - title="Enabled" - ) - representations_include: list[str] = SettingsField( - default_factory=list, - title="Include representations" - ) - - node_name_template: str = SettingsField( - title="Read node name template" - ) - options_defaults: LoadClipOptionsModel = SettingsField( - default_factory=LoadClipOptionsModel, - title="Loader option defaults" - ) - - -class LoaderPluginsModel(BaseSettingsModel): - LoadImage: LoadImageModel = SettingsField( - default_factory=LoadImageModel, - title="Load Image" - ) - LoadClip: LoadClipModel = SettingsField( - default_factory=LoadClipModel, - title="Load Clip" - ) - - -DEFAULT_LOADER_PLUGINS_SETTINGS = { - "LoadImage": { - "enabled": True, - "representations_include": [], - "node_name_template": "{class_name}_{ext}" - }, - "LoadClip": { - "enabled": True, - "representations_include": [], - "node_name_template": "{class_name}_{ext}", - "options_defaults": { - "start_at_workfile": True, - "add_retime": True, - "deep_exr": False - } - } -} diff --git a/server_addon/nuke/server/settings/main.py b/server_addon/nuke/server/settings/main.py deleted file mode 100644 index 1fd347cc21..0000000000 --- a/server_addon/nuke/server/settings/main.py +++ /dev/null @@ -1,112 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, -) - -from .general import ( - GeneralSettings, - DEFAULT_GENERAL_SETTINGS -) -from .imageio import ( - ImageIOSettings, - DEFAULT_IMAGEIO_SETTINGS -) -from .dirmap import ( - DirmapSettings, - DEFAULT_DIRMAP_SETTINGS -) -from .scriptsmenu import ( - ScriptsmenuSettings, - DEFAULT_SCRIPTSMENU_SETTINGS -) -from .gizmo import ( - GizmoItem, - DEFAULT_GIZMO_ITEM -) -from .create_plugins import ( - CreatorPluginsSettings, - DEFAULT_CREATE_SETTINGS -) -from .publish_plugins import ( - PublishPluginsModel, - DEFAULT_PUBLISH_PLUGIN_SETTINGS -) -from .loader_plugins import ( - LoaderPluginsModel, - DEFAULT_LOADER_PLUGINS_SETTINGS -) -from .workfile_builder import ( - WorkfileBuilderModel, - DEFAULT_WORKFILE_BUILDER_SETTINGS -) -from .templated_workfile_build import ( - TemplatedWorkfileBuildModel -) - - -class NukeSettings(BaseSettingsModel): - """Nuke addon settings.""" - - general: GeneralSettings = SettingsField( - default_factory=GeneralSettings, - title="General", - ) - - imageio: ImageIOSettings = SettingsField( - default_factory=ImageIOSettings, - title="Color Management (imageio)", - ) - - dirmap: DirmapSettings = SettingsField( - default_factory=DirmapSettings, - title="Nuke Directory Mapping", - ) - - scriptsmenu: ScriptsmenuSettings = SettingsField( - default_factory=ScriptsmenuSettings, - title="Scripts Menu Definition", - ) - - gizmo: list[GizmoItem] = SettingsField( - default_factory=list, title="Gizmo Menu") - - create: CreatorPluginsSettings = SettingsField( - default_factory=CreatorPluginsSettings, - title="Creator Plugins", - ) - - publish: PublishPluginsModel = SettingsField( - default_factory=PublishPluginsModel, - title="Publish Plugins", - ) - - load: LoaderPluginsModel = SettingsField( - default_factory=LoaderPluginsModel, - title="Loader Plugins", - ) - - workfile_builder: WorkfileBuilderModel = SettingsField( - default_factory=WorkfileBuilderModel, - title="Workfile Builder", - ) - - templated_workfile_build: TemplatedWorkfileBuildModel = SettingsField( - title="Templated Workfile Build", - default_factory=TemplatedWorkfileBuildModel - ) - - -DEFAULT_VALUES = { - "general": DEFAULT_GENERAL_SETTINGS, - "imageio": DEFAULT_IMAGEIO_SETTINGS, - "dirmap": DEFAULT_DIRMAP_SETTINGS, - "scriptsmenu": DEFAULT_SCRIPTSMENU_SETTINGS, - "gizmo": [DEFAULT_GIZMO_ITEM], - "create": DEFAULT_CREATE_SETTINGS, - "publish": DEFAULT_PUBLISH_PLUGIN_SETTINGS, - "load": DEFAULT_LOADER_PLUGINS_SETTINGS, - "workfile_builder": DEFAULT_WORKFILE_BUILDER_SETTINGS, - "templated_workfile_build": { - "profiles": [] - } -} diff --git a/server_addon/nuke/server/settings/publish_plugins.py b/server_addon/nuke/server/settings/publish_plugins.py deleted file mode 100644 index c52c9e9c84..0000000000 --- a/server_addon/nuke/server/settings/publish_plugins.py +++ /dev/null @@ -1,412 +0,0 @@ -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, - task_types_enum -) -from .common import ( - KnobModel, - ColorspaceConfigurationModel, - validate_json_dict, -) - - -def nuke_render_publish_types_enum(): - """Return all nuke render families available in creators.""" - return [ - {"value": "render", "label": "Render"}, - {"value": "prerender", "label": "Prerender"}, - {"value": "image", "label": "Image"} - ] - - -def nuke_product_types_enum(): - """Return all nuke families available in creators.""" - return [ - {"value": "nukenodes", "label": "Nukenodes"}, - {"value": "model", "label": "Model"}, - {"value": "camera", "label": "Camera"}, - {"value": "gizmo", "label": "Gizmo"}, - {"value": "source", "label": "Source"} - ] + nuke_render_publish_types_enum() - - -class NodeModel(BaseSettingsModel): - name: str = SettingsField( - title="Node name" - ) - nodeclass: str = SettingsField( - "", - title="Node class" - ) - dependent: str = SettingsField( - "", - title="Incoming dependency" - ) - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CollectInstanceDataModel(BaseSettingsModel): - sync_workfile_version_on_product_types: list[str] = SettingsField( - default_factory=list, - enum_resolver=nuke_product_types_enum, - title="Product types" - ) - - -class OptionalPluginModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class ValidateKnobsModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - knobs: str = SettingsField( - "{}", - title="Knobs", - widget="textarea", - ) - - @validator("knobs") - def validate_json(cls, value): - return validate_json_dict(value) - - -class ExtractReviewDataModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - - -class ExtractReviewDataLutModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - - -class BakingStreamFilterModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - product_types: list[str] = SettingsField( - default_factory=list, - enum_resolver=nuke_render_publish_types_enum, - title="Sync workfile versions for familes" - ) - product_names: list[str] = SettingsField( - default_factory=list, title="Product names") - - -class ReformatNodesRepositionNodes(BaseSettingsModel): - node_class: str = SettingsField(title="Node class") - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Node knobs") - - -class ReformatNodesConfigModel(BaseSettingsModel): - """Only reposition nodes supported. - - You can add multiple reformat nodes and set their knobs. - Order of reformat nodes is important. First reformat node will - be applied first and last reformat node will be applied last. - """ - enabled: bool = SettingsField(False) - reposition_nodes: list[ReformatNodesRepositionNodes] = SettingsField( - default_factory=list, - title="Reposition knobs" - ) - - -class IntermediateOutputModel(BaseSettingsModel): - name: str = SettingsField(title="Output name") - publish: bool = SettingsField(title="Publish") - filter: BakingStreamFilterModel = SettingsField( - title="Filter", default_factory=BakingStreamFilterModel) - read_raw: bool = SettingsField( - False, - title="Input read node RAW switch" - ) - bake_viewer_process: bool = SettingsField( - True, - title="Bake viewer process", - section="Baking target", - ) - colorspace_override: ColorspaceConfigurationModel = SettingsField( - title="Target baking colorspace override", - description="Override Baking target with colorspace or display/view", - default_factory=ColorspaceConfigurationModel - ) - bake_viewer_input_process: bool = SettingsField( - True, - title="Bake viewer input process node (LUT)", - section="Baking additional", - ) - reformat_nodes_config: ReformatNodesConfigModel = SettingsField( - default_factory=ReformatNodesConfigModel, - title="Reformat Nodes") - extension: str = SettingsField( - "mov", - title="File extension" - ) - add_custom_tags: list[str] = SettingsField( - title="Custom tags", default_factory=list) - - -class ExtractReviewIntermediatesModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - viewer_lut_raw: bool = SettingsField(title="Viewer lut raw") - outputs: list[IntermediateOutputModel] = SettingsField( - default_factory=list, - title="Baking streams" - ) - - -class FSubmissionNoteModel(BaseSettingsModel): - enabled: bool = SettingsField(title="enabled") - template: str = SettingsField(title="Template") - - -class FSubmistingForModel(BaseSettingsModel): - enabled: bool = SettingsField(title="enabled") - template: str = SettingsField(title="Template") - - -class FVFXScopeOfWorkModel(BaseSettingsModel): - enabled: bool = SettingsField(title="enabled") - template: str = SettingsField(title="Template") - - -class ExctractSlateFrameParamModel(BaseSettingsModel): - f_submission_note: FSubmissionNoteModel = SettingsField( - title="f_submission_note", - default_factory=FSubmissionNoteModel - ) - f_submitting_for: FSubmistingForModel = SettingsField( - title="f_submitting_for", - default_factory=FSubmistingForModel - ) - f_vfx_scope_of_work: FVFXScopeOfWorkModel = SettingsField( - title="f_vfx_scope_of_work", - default_factory=FVFXScopeOfWorkModel - ) - - -class ExtractSlateFrameModel(BaseSettingsModel): - viewer_lut_raw: bool = SettingsField(title="Viewer lut raw") - key_value_mapping: ExctractSlateFrameParamModel = SettingsField( - title="Key value mapping", - default_factory=ExctractSlateFrameParamModel - ) - - -class IncrementScriptVersionModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class PublishPluginsModel(BaseSettingsModel): - CollectInstanceData: CollectInstanceDataModel = SettingsField( - title="Collect Instance Version", - default_factory=CollectInstanceDataModel, - section="Collectors" - ) - ValidateCorrectAssetContext: OptionalPluginModel = SettingsField( - title="Validate Correct Folder Name", - default_factory=OptionalPluginModel, - section="Validators" - ) - ValidateKnobs: ValidateKnobsModel = SettingsField( - title="Validate Knobs", - default_factory=ValidateKnobsModel - ) - ValidateOutputResolution: OptionalPluginModel = SettingsField( - title="Validate Output Resolution", - default_factory=OptionalPluginModel - ) - ValidateGizmo: OptionalPluginModel = SettingsField( - title="Validate Gizmo", - default_factory=OptionalPluginModel - ) - ValidateBackdrop: OptionalPluginModel = SettingsField( - title="Validate Backdrop", - default_factory=OptionalPluginModel - ) - ValidateScriptAttributes: OptionalPluginModel = SettingsField( - title="Validate workfile attributes", - default_factory=OptionalPluginModel - ) - ExtractReviewData: ExtractReviewDataModel = SettingsField( - title="Extract Review Data", - default_factory=ExtractReviewDataModel - ) - ExtractReviewDataLut: ExtractReviewDataLutModel = SettingsField( - title="Extract Review Data Lut", - default_factory=ExtractReviewDataLutModel - ) - ExtractReviewIntermediates: ExtractReviewIntermediatesModel = ( - SettingsField( - title="Extract Review Intermediates", - default_factory=ExtractReviewIntermediatesModel - ) - ) - ExtractSlateFrame: ExtractSlateFrameModel = SettingsField( - title="Extract Slate Frame", - default_factory=ExtractSlateFrameModel - ) - IncrementScriptVersion: IncrementScriptVersionModel = SettingsField( - title="Increment Workfile Version", - default_factory=IncrementScriptVersionModel, - section="Integrators" - ) - - -DEFAULT_PUBLISH_PLUGIN_SETTINGS = { - "CollectInstanceData": { - "sync_workfile_version_on_product_types": [ - "nukenodes", - "camera", - "gizmo", - "source", - "render", - "write" - ] - }, - "ValidateCorrectAssetContext": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateKnobs": { - "enabled": False, - "knobs": "\n".join([ - '{', - ' "render": {', - ' "review": true', - ' }', - '}' - ]) - }, - "ValidateOutputResolution": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateGizmo": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateBackdrop": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateScriptAttributes": { - "enabled": True, - "optional": True, - "active": True - }, - "ExtractReviewData": { - "enabled": False - }, - "ExtractReviewDataLut": { - "enabled": False - }, - "ExtractReviewIntermediates": { - "enabled": True, - "viewer_lut_raw": False, - "outputs": [ - { - "name": "baking", - "publish": False, - "filter": { - "task_types": [], - "product_types": [], - "product_names": [] - }, - "read_raw": False, - "colorspace_override": { - "enabled": False, - "type": "colorspace", - "colorspace": "", - "display_view": { - "display": "", - "view": "" - } - }, - "bake_viewer_process": True, - "bake_viewer_input_process": True, - "reformat_nodes_config": { - "enabled": False, - "reposition_nodes": [ - { - "node_class": "Reformat", - "knobs": [ - { - "type": "text", - "name": "type", - "text": "to format" - }, - { - "type": "text", - "name": "format", - "text": "HD_1080" - }, - { - "type": "text", - "name": "filter", - "text": "Lanczos6" - }, - { - "type": "boolean", - "name": "black_outside", - "boolean": True - }, - { - "type": "boolean", - "name": "pbb", - "boolean": False - } - ] - } - ] - }, - "extension": "mov", - "add_custom_tags": [] - } - ] - }, - "ExtractSlateFrame": { - "viewer_lut_raw": False, - "key_value_mapping": { - "f_submission_note": { - "enabled": True, - "template": "{comment}" - }, - "f_submitting_for": { - "enabled": True, - "template": "{intent[value]}" - }, - "f_vfx_scope_of_work": { - "enabled": False, - "template": "" - } - } - }, - "IncrementScriptVersion": { - "enabled": True, - "optional": True, - "active": True - } -} diff --git a/server_addon/nuke/server/settings/scriptsmenu.py b/server_addon/nuke/server/settings/scriptsmenu.py deleted file mode 100644 index 7ffd6841d5..0000000000 --- a/server_addon/nuke/server/settings/scriptsmenu.py +++ /dev/null @@ -1,52 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class ScriptsmenuSubmodel(BaseSettingsModel): - """Item Definition""" - _isGroup = True - - type: str = SettingsField(title="Type") - command: str = SettingsField(title="Command") - sourcetype: str = SettingsField(title="Source Type") - title: str = SettingsField(title="Title") - tooltip: str = SettingsField(title="Tooltip") - - -class ScriptsmenuSettings(BaseSettingsModel): - """Nuke script menu project settings.""" - _isGroup = True - - name: str = SettingsField(title="Menu Name") - definition: list[ScriptsmenuSubmodel] = SettingsField( - default_factory=list, - title="Definition", - description="Scriptmenu Items Definition" - ) - - -DEFAULT_SCRIPTSMENU_SETTINGS = { - "name": "Custom Tools", - "definition": [ - { - "type": "action", - "sourcetype": "python", - "title": "Ayon Nuke Docs", - "command": "import webbrowser;webbrowser.open(url='https://ayon.ynput.io/docs/addon_nuke_artist')", # noqa - "tooltip": "Open the Ayon Nuke user doc page" - }, - { - "type": "action", - "sourcetype": "python", - "title": "Set Frame Start (Read Node)", - "command": "from openpype.hosts.nuke.startup.frame_setting_for_read_nodes import main;main();", # noqa - "tooltip": "Set frame start for read node(s)" - }, - { - "type": "action", - "sourcetype": "python", - "title": "Set non publish output for Write Node", - "command": "from openpype.hosts.nuke.startup.custom_write_node import main;main();", # noqa - "tooltip": "Open the OpenPype Nuke user doc page" - } - ] -} diff --git a/server_addon/nuke/server/settings/templated_workfile_build.py b/server_addon/nuke/server/settings/templated_workfile_build.py deleted file mode 100644 index 12ebedf570..0000000000 --- a/server_addon/nuke/server/settings/templated_workfile_build.py +++ /dev/null @@ -1,34 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - task_types_enum, -) - - -class TemplatedWorkfileProfileModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - task_names: list[str] = SettingsField( - default_factory=list, - title="Task names" - ) - path: str = SettingsField( - title="Path to template" - ) - keep_placeholder: bool = SettingsField( - False, - title="Keep placeholders") - create_first_version: bool = SettingsField( - True, - title="Create first version" - ) - - -class TemplatedWorkfileBuildModel(BaseSettingsModel): - """Settings for templated workfile builder.""" - profiles: list[TemplatedWorkfileProfileModel] = SettingsField( - default_factory=list - ) diff --git a/server_addon/nuke/server/settings/workfile_builder.py b/server_addon/nuke/server/settings/workfile_builder.py deleted file mode 100644 index 97961655f3..0000000000 --- a/server_addon/nuke/server/settings/workfile_builder.py +++ /dev/null @@ -1,84 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - task_types_enum, - MultiplatformPathModel, -) - - -class CustomTemplateModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - path: MultiplatformPathModel = SettingsField( - default_factory=MultiplatformPathModel, - title="Gizmo Directory Path" - ) - - -class BuilderProfileItemModel(BaseSettingsModel): - product_name_filters: list[str] = SettingsField( - default_factory=list, - title="Product name" - ) - product_types: list[str] = SettingsField( - default_factory=list, - title="Product types" - ) - repre_names: list[str] = SettingsField( - default_factory=list, - title="Representations" - ) - loaders: list[str] = SettingsField( - default_factory=list, - title="Loader plugins" - ) - - -class BuilderProfileModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - tasks: list[str] = SettingsField( - default_factory=list, - title="Task names" - ) - current_context: list[BuilderProfileItemModel] = SettingsField( - default_factory=list, - title="Current context" - ) - linked_assets: list[BuilderProfileItemModel] = SettingsField( - default_factory=list, - title="Linked assets/shots" - ) - - -class WorkfileBuilderModel(BaseSettingsModel): - """[deprecated] use Template Workfile Build Settings instead. - """ - create_first_version: bool = SettingsField( - title="Create first workfile") - custom_templates: list[CustomTemplateModel] = SettingsField( - default_factory=list, - title="Custom templates" - ) - builder_on_start: bool = SettingsField( - default=False, - title="Run Builder at first workfile" - ) - profiles: list[BuilderProfileModel] = SettingsField( - default_factory=list, - title="Builder profiles" - ) - - -DEFAULT_WORKFILE_BUILDER_SETTINGS = { - "create_first_version": False, - "custom_templates": [], - "builder_on_start": False, - "profiles": [] -} diff --git a/server_addon/timers_manager/client/ayon_timers_manager/__init__.py b/server_addon/timers_manager/client/ayon_timers_manager/__init__.py deleted file mode 100644 index 1ec0d9b74b..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .version import __version__ -from .timers_manager import ( - TimersManager -) - -__all__ = ( - "__version__", - - "TimersManager", -) diff --git a/server_addon/timers_manager/client/ayon_timers_manager/exceptions.py b/server_addon/timers_manager/client/ayon_timers_manager/exceptions.py deleted file mode 100644 index 5a9e00765d..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/exceptions.py +++ /dev/null @@ -1,3 +0,0 @@ -class InvalidContextError(ValueError): - """Context for which the timer should be started is invalid.""" - pass diff --git a/server_addon/timers_manager/client/ayon_timers_manager/idle_threads.py b/server_addon/timers_manager/client/ayon_timers_manager/idle_threads.py deleted file mode 100644 index d70f7790c4..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/idle_threads.py +++ /dev/null @@ -1,160 +0,0 @@ -import time -from qtpy import QtCore -from pynput import mouse, keyboard - -from ayon_core.lib import Logger - - -class IdleItem: - """Python object holds information if state of idle changed. - - This item is used to be independent from Qt objects. - """ - def __init__(self): - self.changed = False - - def reset(self): - self.changed = False - - def set_changed(self, changed=True): - self.changed = changed - - -class IdleManager(QtCore.QThread): - """ Measure user's idle time in seconds. - Idle time resets on keyboard/mouse input. - Is able to emit signals at specific time idle. - """ - time_signals = {} - idle_time = 0 - signal_reset_timer = QtCore.Signal() - - def __init__(self): - super(IdleManager, self).__init__() - self.log = Logger.get_logger(self.__class__.__name__) - self.signal_reset_timer.connect(self._reset_time) - - self.idle_item = IdleItem() - - self._is_running = False - self._mouse_thread = None - self._keyboard_thread = None - - def add_time_signal(self, emit_time, signal): - """ If any module want to use IdleManager, need to use add_time_signal - - Args: - emit_time(int): Time when signal will be emitted. - signal(QtCore.Signal): Signal that will be emitted - (without objects). - """ - if emit_time not in self.time_signals: - self.time_signals[emit_time] = [] - self.time_signals[emit_time].append(signal) - - @property - def is_running(self): - return self._is_running - - def _reset_time(self): - self.idle_time = 0 - - def stop(self): - self._is_running = False - - def _on_mouse_destroy(self): - self._mouse_thread = None - - def _on_keyboard_destroy(self): - self._keyboard_thread = None - - def run(self): - self.log.info('IdleManager has started') - self._is_running = True - - thread_mouse = MouseThread(self.idle_item) - thread_keyboard = KeyboardThread(self.idle_item) - - thread_mouse.destroyed.connect(self._on_mouse_destroy) - thread_keyboard.destroyed.connect(self._on_keyboard_destroy) - - self._mouse_thread = thread_mouse - self._keyboard_thread = thread_keyboard - - thread_mouse.start() - thread_keyboard.start() - - # Main loop here is each second checked if idle item changed state - while self._is_running: - if self.idle_item.changed: - self.idle_item.reset() - self.signal_reset_timer.emit() - else: - self.idle_time += 1 - - if self.idle_time in self.time_signals: - for signal in self.time_signals[self.idle_time]: - signal.emit() - time.sleep(1) - - self._post_run() - self.log.info('IdleManager has stopped') - - def _post_run(self): - # Stop threads if still exist - if self._mouse_thread is not None: - self._mouse_thread.signal_stop.emit() - self._mouse_thread.terminate() - self._mouse_thread.wait() - - if self._keyboard_thread is not None: - self._keyboard_thread.signal_stop.emit() - self._keyboard_thread.terminate() - self._keyboard_thread.wait() - - -class MouseThread(QtCore.QThread): - """Listens user's mouse movement.""" - signal_stop = QtCore.Signal() - - def __init__(self, idle_item): - super(MouseThread, self).__init__() - self.signal_stop.connect(self.stop) - self.m_listener = None - self.idle_item = idle_item - - def stop(self): - if self.m_listener is not None: - self.m_listener.stop() - - def on_move(self, *args, **kwargs): - self.idle_item.set_changed() - - def run(self): - self.m_listener = mouse.Listener(on_move=self.on_move) - self.m_listener.start() - - -class KeyboardThread(QtCore.QThread): - """Listens user's keyboard input - """ - signal_stop = QtCore.Signal() - - def __init__(self, idle_item): - super(KeyboardThread, self).__init__() - self.signal_stop.connect(self.stop) - self.k_listener = None - self.idle_item = idle_item - - def stop(self): - if self.k_listener is not None: - listener = self.k_listener - self.k_listener = None - listener.stop() - - def on_press(self, *args, **kwargs): - self.idle_item.set_changed() - - def run(self): - self.k_listener = keyboard.Listener(on_press=self.on_press) - self.k_listener.start() diff --git a/server_addon/timers_manager/client/ayon_timers_manager/launch_hooks/post_start_timer.py b/server_addon/timers_manager/client/ayon_timers_manager/launch_hooks/post_start_timer.py deleted file mode 100644 index b402d4034a..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/launch_hooks/post_start_timer.py +++ /dev/null @@ -1,44 +0,0 @@ -from ayon_applications import PostLaunchHook, LaunchTypes - - -class PostStartTimerHook(PostLaunchHook): - """Start timer with TimersManager module. - - This module requires enabled TimerManager module. - """ - order = None - launch_types = {LaunchTypes.local} - - def execute(self): - project_name = self.data.get("project_name") - folder_path = self.data.get("folder_path") - task_name = self.data.get("task_name") - - missing_context_keys = set() - if not project_name: - missing_context_keys.add("project_name") - if not folder_path: - missing_context_keys.add("folder_path") - if not task_name: - missing_context_keys.add("task_name") - - if missing_context_keys: - missing_keys_str = ", ".join([ - "\"{}\"".format(key) for key in missing_context_keys - ]) - self.log.debug("Hook {} skipped. Missing data keys: {}".format( - self.__class__.__name__, missing_keys_str - )) - return - - timers_manager = self.addons_manager.get("timers_manager") - if not timers_manager or not timers_manager.enabled: - self.log.info(( - "Skipping starting timer because" - " TimersManager is not available." - )) - return - - timers_manager.start_timer_with_webserver( - project_name, folder_path, task_name, logger=self.log - ) diff --git a/server_addon/timers_manager/client/ayon_timers_manager/plugins/publish/start_timer.py b/server_addon/timers_manager/client/ayon_timers_manager/plugins/publish/start_timer.py deleted file mode 100644 index 620cdb6e65..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/plugins/publish/start_timer.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -Requires: - context -> project_settings - context -> ayonAddonsManager -""" - -import pyblish.api - - -class StartTimer(pyblish.api.ContextPlugin): - label = "Start Timer" - order = pyblish.api.IntegratorOrder + 1 - hosts = ["*"] - - def process(self, context): - timers_manager = context.data["ayonAddonsManager"]["timers_manager"] - if not timers_manager.enabled: - self.log.debug("TimersManager is disabled") - return - - project_settings = context.data["project_settings"] - if not project_settings["timers_manager"]["disregard_publishing"]: - self.log.debug("Publish is not affecting running timers.") - return - - project_name = context.data["projectName"] - folder_path = context.data.get("folderPath") - task_name = context.data.get("task") - if not project_name or not folder_path or not task_name: - self.log.info(( - "Current context does not contain all" - " required information to start a timer." - )) - return - timers_manager.start_timer_with_webserver( - project_name, folder_path, task_name, self.log - ) diff --git a/server_addon/timers_manager/client/ayon_timers_manager/plugins/publish/stop_timer.py b/server_addon/timers_manager/client/ayon_timers_manager/plugins/publish/stop_timer.py deleted file mode 100644 index eafd8cb450..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/plugins/publish/stop_timer.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Requires: - context -> project_settings - context -> ayonAddonsManager -""" - - -import pyblish.api - - -class StopTimer(pyblish.api.ContextPlugin): - label = "Stop Timer" - order = pyblish.api.ExtractorOrder - 0.49 - hosts = ["*"] - - def process(self, context): - timers_manager = context.data["ayonAddonsManager"]["timers_manager"] - if not timers_manager.enabled: - self.log.debug("TimersManager is disabled") - return - - project_settings = context.data["project_settings"] - if not project_settings["timers_manager"]["disregard_publishing"]: - self.log.debug("Publish is not affecting running timers.") - return - - timers_manager.stop_timer_with_webserver(self.log) diff --git a/server_addon/timers_manager/client/ayon_timers_manager/rest_api.py b/server_addon/timers_manager/client/ayon_timers_manager/rest_api.py deleted file mode 100644 index 88a6539510..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/rest_api.py +++ /dev/null @@ -1,85 +0,0 @@ -import json - -from aiohttp.web_response import Response -from ayon_core.lib import Logger - - -class TimersManagerModuleRestApi: - """ - REST API endpoint used for calling from hosts when context change - happens in Workfile app. - """ - def __init__(self, user_module, server_manager): - self._log = None - self.module = user_module - self.server_manager = server_manager - - self.prefix = "/timers_manager" - - self.register() - - @property - def log(self): - if self._log is None: - self._log = Logger.get_logger(self.__class__.__name__) - return self._log - - def register(self): - self.server_manager.add_route( - "POST", - self.prefix + "/start_timer", - self.start_timer - ) - self.server_manager.add_route( - "POST", - self.prefix + "/stop_timer", - self.stop_timer - ) - self.server_manager.add_route( - "GET", - self.prefix + "/get_task_time", - self.get_task_time - ) - - async def start_timer(self, request): - data = await request.json() - try: - project_name = data["project_name"] - folder_path = data["folder_path"] - task_name = data["task_name"] - except KeyError: - msg = ( - "Payload must contain fields 'project_name," - " 'folder_path' and 'task_name'" - ) - self.log.error(msg) - return Response(status=400, message=msg) - - self.module.stop_timers() - try: - self.module.start_timer(project_name, folder_path, task_name) - except Exception as exc: - return Response(status=404, message=str(exc)) - - return Response(status=200) - - async def stop_timer(self, request): - self.module.stop_timers() - return Response(status=200) - - async def get_task_time(self, request): - data = await request.json() - try: - project_name = data["project_name"] - folder_path = data["folder_path"] - task_name = data["task_name"] - except KeyError: - message = ( - "Payload must contain fields 'project_name, 'folder_path'," - " 'task_name'" - ) - self.log.warning(message) - return Response(text=message, status=404) - - time = self.module.get_task_time(project_name, folder_path, task_name) - return Response(text=json.dumps(time)) diff --git a/server_addon/timers_manager/client/ayon_timers_manager/timers_manager.py b/server_addon/timers_manager/client/ayon_timers_manager/timers_manager.py deleted file mode 100644 index 2aac7b2a49..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/timers_manager.py +++ /dev/null @@ -1,488 +0,0 @@ -import os -import platform - -import ayon_api - -from ayon_core.addon import ( - AYONAddon, - ITrayService, - IPluginPaths -) -from ayon_core.lib.events import register_event_callback - -from .version import __version__ -from .exceptions import InvalidContextError - -TIMER_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) - - -class ExampleTimersManagerConnector: - """Timers manager can handle timers of multiple modules/addons. - - Module must have object under `timers_manager_connector` attribute with - few methods. This is example class of the object that could be stored under - module. - - Required methods are 'stop_timer' and 'start_timer'. - - Example of `data` that are passed during changing timer: - ``` - data = { - "project_name": project_name, - "folder_id": folder_id, - "folder_path": folder_entity["path"], - "task_name": task_name, - "task_type": task_type, - # Deprecated - "asset_id": folder_id, - "asset_name": folder_entity["name"], - "hierarchy": hierarchy_items, - } - ``` - """ - - # Not needed at all - def __init__(self, module): - # Store timer manager module to be able call it's methods when needed - self._timers_manager_module = None - - # Store module which want to use timers manager to have access - self._module = module - - # Required - def stop_timer(self): - """Called by timers manager when module should stop timer.""" - self._module.stop_timer() - - # Required - def start_timer(self, data): - """Method called by timers manager when should start timer.""" - self._module.start_timer(data) - - # Optional - def register_timers_manager(self, timer_manager_module): - """Method called by timers manager where it's object is passed. - - This is moment when timers manager module can be store to be able - call it's callbacks (e.g. timer started). - """ - self._timers_manager_module = timer_manager_module - - # Custom implementation - def timer_started(self, data): - """This is example of possibility to trigger callbacks on manager.""" - if self._timers_manager_module is not None: - self._timers_manager_module.timer_started(self._module.id, data) - - # Custom implementation - def timer_stopped(self): - if self._timers_manager_module is not None: - self._timers_manager_module.timer_stopped(self._module.id) - - -class TimersManager( - AYONAddon, - ITrayService, - IPluginPaths -): - """ Handles about Timers. - - Should be able to start/stop all timers at once. - - To be able use this advantage module has to have attribute with name - `timers_manager_connector` which has two methods 'stop_timer' - and 'start_timer'. Optionally may have `register_timers_manager` where - object of TimersManager module is passed to be able call it's callbacks. - - See `ExampleTimersManagerConnector`. - """ - name = "timers_manager" - version = __version__ - label = "Timers Service" - - _required_methods = ( - "stop_timer", - "start_timer" - ) - - def initialize(self, studio_settings): - timers_settings = studio_settings.get(self.name) - enabled = timers_settings is not None - - auto_stop = False - full_time = 0 - message_time = 0 - if enabled: - # When timer will stop if idle manager is running (minutes) - full_time = int(timers_settings["full_time"] * 60) - # How many minutes before the timer is stopped will popup the message - message_time = int(timers_settings["message_time"] * 60) - - auto_stop = timers_settings["auto_stop"] - platform_name = platform.system().lower() - # Turn of auto stop on MacOs because pynput requires root permissions - # and on linux can cause thread locks on application close - if full_time <= 0 or platform_name in ("darwin", "linux"): - auto_stop = False - - self.enabled = enabled - self.auto_stop = auto_stop - self.time_show_message = full_time - message_time - self.time_stop_timer = full_time - - self.is_running = False - self.last_task = None - - # Tray attributes - self._signal_handler = None - self._widget_user_idle = None - self._idle_manager = None - - self._connectors_by_module_id = {} - self._modules_by_id = {} - - def tray_init(self): - if not self.auto_stop: - return - - from .idle_threads import IdleManager - from .widget_user_idle import WidgetUserIdle, SignalHandler - - signal_handler = SignalHandler(self) - idle_manager = IdleManager() - widget_user_idle = WidgetUserIdle(self) - widget_user_idle.set_countdown_start( - self.time_stop_timer - self.time_show_message - ) - - idle_manager.signal_reset_timer.connect( - widget_user_idle.reset_countdown - ) - idle_manager.add_time_signal( - self.time_show_message, signal_handler.signal_show_message - ) - idle_manager.add_time_signal( - self.time_stop_timer, signal_handler.signal_stop_timers - ) - - self._signal_handler = signal_handler - self._widget_user_idle = widget_user_idle - self._idle_manager = idle_manager - - def tray_start(self, *_a, **_kw): - if self._idle_manager: - self._idle_manager.start() - - def tray_exit(self): - if self._idle_manager: - self._idle_manager.stop() - self._idle_manager.wait() - - def get_timer_data_for_path(self, task_path): - """Convert string path to a timer data. - - It is expected that first item is project name, last item is task name - and folder path in the middle. - """ - path_items = task_path.split("/") - task_name = path_items.pop(-1) - project_name = path_items.pop(0) - folder_path = "/" + "/".join(path_items) - return self.get_timer_data_for_context( - project_name, folder_path, task_name, self.log - ) - - def get_launch_hook_paths(self): - """Implementation for applications launch hooks.""" - - return [ - os.path.join(TIMER_MODULE_DIR, "launch_hooks") - ] - - def get_plugin_paths(self): - """Implementation of `IPluginPaths`.""" - - return { - "publish": [os.path.join(TIMER_MODULE_DIR, "plugins", "publish")] - } - - @staticmethod - def get_timer_data_for_context( - project_name, folder_path, task_name, logger=None - ): - """Prepare data for timer related callbacks.""" - if not project_name or not folder_path or not task_name: - raise InvalidContextError(( - "Missing context information got" - " Project: \"{}\" Folder: \"{}\" Task: \"{}\"" - ).format(str(project_name), str(folder_path), str(task_name))) - - folder_entity = ayon_api.get_folder_by_path( - project_name, - folder_path, - fields={"id", "name", "path"} - ) - - if not folder_entity: - raise InvalidContextError(( - "Folder \"{}\" not found in project \"{}\"" - ).format(folder_path, project_name)) - - folder_id = folder_entity["id"] - task_entity = ayon_api.get_task_by_name( - project_name, folder_id, task_name - ) - if not task_entity: - raise InvalidContextError(( - "Task \"{}\" not found on folder \"{}\" in project \"{}\"" - ).format(task_name, folder_path, project_name)) - - task_type = "" - try: - task_type = task_entity["taskType"] - except KeyError: - msg = "Couldn't find task_type for {}".format(task_name) - if logger is not None: - logger.warning(msg) - else: - print(msg) - - hierarchy_items = folder_entity["path"].split("/") - hierarchy_items.pop(0) - - return { - "project_name": project_name, - "folder_id": folder_id, - "folder_path": folder_entity["path"], - "task_name": task_name, - "task_type": task_type, - "asset_id": folder_id, - "asset_name": folder_entity["name"], - "hierarchy": hierarchy_items, - } - - def start_timer(self, project_name, folder_path, task_name): - """Start timer for passed context. - - Args: - project_name (str): Project name. - folder_path (str): Folder path. - task_name (str): Task name. - """ - data = self.get_timer_data_for_context( - project_name, folder_path, task_name, self.log - ) - self.timer_started(None, data) - - def get_task_time(self, project_name, folder_path, task_name): - """Get total time for passed context. - - TODO: - - convert context to timer data - """ - times = {} - for module_id, connector in self._connectors_by_module_id.items(): - if hasattr(connector, "get_task_time"): - module = self._modules_by_id[module_id] - times[module.name] = connector.get_task_time( - project_name, folder_path, task_name - ) - return times - - def timer_started(self, source_id, data): - """Connector triggered that timer has started. - - New timer has started for context in data. - """ - for module_id, connector in self._connectors_by_module_id.items(): - if module_id == source_id: - continue - - try: - connector.start_timer(data) - except Exception: - self.log.info( - "Failed to start timer on connector {}".format( - str(connector) - ) - ) - - self.last_task = data - self.is_running = True - - def timer_stopped(self, source_id): - """Connector triggered that hist timer has stopped. - - Should stop all other timers. - - TODO: - - pass context for which timer has stopped to validate if timers are - same and valid - """ - for module_id, connector in self._connectors_by_module_id.items(): - if module_id == source_id: - continue - - try: - connector.stop_timer() - except Exception: - self.log.info( - "Failed to stop timer on connector {}".format( - str(connector) - ) - ) - - def restart_timers(self): - if self.last_task is not None: - self.timer_started(None, self.last_task) - - def stop_timers(self): - """Stop all timers.""" - if self.is_running is False: - return - - if self._widget_user_idle is not None: - self._widget_user_idle.set_timer_stopped() - self.is_running = False - - self.timer_stopped(None) - - def connect_with_addons(self, enabled_modules): - for module in enabled_modules: - connector = getattr(module, "timers_manager_connector", None) - if connector is None: - continue - - missing_methods = set() - for method_name in self._required_methods: - if not hasattr(connector, method_name): - missing_methods.add(method_name) - - if missing_methods: - joined = ", ".join( - ['"{}"'.format(name for name in missing_methods)] - ) - self.log.info(( - "Module \"{}\" has missing required methods {}." - ).format(module.name, joined)) - continue - - self._connectors_by_module_id[module.id] = connector - self._modules_by_id[module.id] = module - - # Optional method - if hasattr(connector, "register_timers_manager"): - try: - connector.register_timers_manager(self) - except Exception: - self.log.info(( - "Failed to register timers manager" - " for connector of module \"{}\"." - ).format(module.name)) - - def show_message(self): - if self.is_running is False: - return - if not self._widget_user_idle.is_showed(): - self._widget_user_idle.reset_countdown() - self._widget_user_idle.show() - - # Webserver module implementation - def webserver_initialization(self, server_manager): - """Add routes for timers to be able start/stop with rest api.""" - if self.tray_initialized: - from .rest_api import TimersManagerModuleRestApi - self.rest_api_obj = TimersManagerModuleRestApi( - self, server_manager - ) - - @staticmethod - def start_timer_with_webserver( - project_name, folder_path, task_name, logger=None - ): - """Prepared method for calling change timers on REST api. - - Webserver must be active. At the moment is Webserver running only when - OpenPype Tray is used. - - Args: - project_name (str): Project name. - folder_path (str): Folder path. - task_name (str): Task name. - logger (logging.Logger): Logger object. Using 'print' if not - passed. - """ - - webserver_url = os.environ.get("AYON_WEBSERVER_URL") - if not webserver_url: - msg = "Couldn't find webserver url" - if logger is not None: - logger.warning(msg) - else: - print(msg) - return - - rest_api_url = "{}/timers_manager/start_timer".format(webserver_url) - try: - import requests - except Exception: - msg = "Couldn't start timer ('requests' is not available)" - if logger is not None: - logger.warning(msg) - else: - print(msg) - return - data = { - "project_name": project_name, - "folder_path": folder_path, - "task_name": task_name - } - - return requests.post(rest_api_url, json=data) - - @staticmethod - def stop_timer_with_webserver(logger=None): - """Prepared method for calling stop timers on REST api. - - Args: - logger (logging.Logger): Logger used for logging messages. - """ - - webserver_url = os.environ.get("AYON_WEBSERVER_URL") - if not webserver_url: - msg = "Couldn't find webserver url" - if logger is not None: - logger.warning(msg) - else: - print(msg) - return - - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - try: - import requests - except Exception: - msg = "Couldn't start timer ('requests' is not available)" - if logger is not None: - logger.warning(msg) - else: - print(msg) - return - - return requests.post(rest_api_url) - - def on_host_install(self, host, host_name, project_name): - self.log.debug("Installing task changed callback") - register_event_callback("taskChanged", self._on_host_task_change) - - def _on_host_task_change(self, event): - project_name = event["project_name"] - folder_path = event["folder_path"] - task_name = event["task_name"] - self.log.debug(( - "Sending message that timer should change to" - " Project: {} Folder: {} Task: {}" - ).format(project_name, folder_path, task_name)) - - self.start_timer_with_webserver( - project_name, folder_path, task_name, self.log - ) diff --git a/server_addon/timers_manager/client/ayon_timers_manager/version.py b/server_addon/timers_manager/client/ayon_timers_manager/version.py deleted file mode 100644 index 95e413aaac..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring AYON addon 'timers_manager' version.""" -__version__ = "0.2.0" diff --git a/server_addon/timers_manager/client/ayon_timers_manager/widget_user_idle.py b/server_addon/timers_manager/client/ayon_timers_manager/widget_user_idle.py deleted file mode 100644 index c59ab15b38..0000000000 --- a/server_addon/timers_manager/client/ayon_timers_manager/widget_user_idle.py +++ /dev/null @@ -1,196 +0,0 @@ -from qtpy import QtCore, QtGui, QtWidgets -from ayon_core import resources, style - - -class WidgetUserIdle(QtWidgets.QWidget): - SIZE_W = 300 - SIZE_H = 160 - - def __init__(self, module): - super(WidgetUserIdle, self).__init__() - - self.setWindowTitle("AYON - Stop timers") - - icon = QtGui.QIcon(resources.get_ayon_icon_filepath()) - self.setWindowIcon(icon) - - self.setWindowFlags( - QtCore.Qt.WindowCloseButtonHint - | QtCore.Qt.WindowMinimizeButtonHint - | QtCore.Qt.WindowStaysOnTopHint - ) - - self._is_showed = False - self._timer_stopped = False - self._countdown = 0 - self._countdown_start = 0 - - self.module = module - - msg_info = "You didn't work for a long time." - msg_question = "Would you like to stop Timers?" - msg_stopped = ( - "Your Timers were stopped. Do you want to start them again?" - ) - - lbl_info = QtWidgets.QLabel(msg_info, self) - lbl_info.setTextFormat(QtCore.Qt.RichText) - lbl_info.setWordWrap(True) - - lbl_question = QtWidgets.QLabel(msg_question, self) - lbl_question.setTextFormat(QtCore.Qt.RichText) - lbl_question.setWordWrap(True) - - lbl_stopped = QtWidgets.QLabel(msg_stopped, self) - lbl_stopped.setTextFormat(QtCore.Qt.RichText) - lbl_stopped.setWordWrap(True) - - lbl_rest_time = QtWidgets.QLabel(self) - lbl_rest_time.setTextFormat(QtCore.Qt.RichText) - lbl_rest_time.setWordWrap(True) - lbl_rest_time.setAlignment(QtCore.Qt.AlignCenter) - - form = QtWidgets.QFormLayout() - form.setContentsMargins(10, 15, 10, 5) - - form.addRow(lbl_info) - form.addRow(lbl_question) - form.addRow(lbl_stopped) - form.addRow(lbl_rest_time) - - btn_stop = QtWidgets.QPushButton("Stop timer", self) - btn_stop.setToolTip("Stop's All timers") - - btn_continue = QtWidgets.QPushButton("Continue", self) - btn_continue.setToolTip("Timer won't stop") - - btn_close = QtWidgets.QPushButton("Close", self) - btn_close.setToolTip("Close window") - - btn_restart = QtWidgets.QPushButton("Start timers", self) - btn_restart.setToolTip("Timer will be started again") - - group_layout = QtWidgets.QHBoxLayout() - group_layout.addStretch(1) - group_layout.addWidget(btn_continue) - group_layout.addWidget(btn_stop) - group_layout.addWidget(btn_restart) - group_layout.addWidget(btn_close) - - layout = QtWidgets.QVBoxLayout(self) - layout.addLayout(form) - layout.addLayout(group_layout) - - count_timer = QtCore.QTimer() - count_timer.setInterval(1000) - - btn_stop.clicked.connect(self._on_stop_clicked) - btn_continue.clicked.connect(self._on_continue_clicked) - btn_close.clicked.connect(self._close_widget) - btn_restart.clicked.connect(self._on_restart_clicked) - count_timer.timeout.connect(self._on_count_timeout) - - self.lbl_info = lbl_info - self.lbl_question = lbl_question - self.lbl_stopped = lbl_stopped - self.lbl_rest_time = lbl_rest_time - - self.btn_stop = btn_stop - self.btn_continue = btn_continue - self.btn_close = btn_close - self.btn_restart = btn_restart - - self._count_timer = count_timer - - self.resize(self.SIZE_W, self.SIZE_H) - self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) - self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100)) - self.setStyleSheet(style.load_stylesheet()) - - def set_countdown_start(self, countdown): - self._countdown_start = countdown - if not self.is_showed(): - self.reset_countdown() - - def reset_countdown(self): - self._countdown = self._countdown_start - self._update_countdown_label() - - def is_showed(self): - return self._is_showed - - def set_timer_stopped(self): - self._timer_stopped = True - self._refresh_context() - - def _update_countdown_label(self): - self.lbl_rest_time.setText(str(self._countdown)) - - def _on_count_timeout(self): - if self._timer_stopped or not self._is_showed: - self._count_timer.stop() - return - - if self._countdown <= 0: - self._stop_timers() - self.set_timer_stopped() - else: - self._countdown -= 1 - self._update_countdown_label() - - def _refresh_context(self): - self.lbl_question.setVisible(not self._timer_stopped) - self.lbl_rest_time.setVisible(not self._timer_stopped) - self.lbl_stopped.setVisible(self._timer_stopped) - - self.btn_continue.setVisible(not self._timer_stopped) - self.btn_stop.setVisible(not self._timer_stopped) - self.btn_restart.setVisible(self._timer_stopped) - self.btn_close.setVisible(self._timer_stopped) - - def _stop_timers(self): - self.module.stop_timers() - - def _on_stop_clicked(self): - self._stop_timers() - self._close_widget() - - def _on_restart_clicked(self): - self.module.restart_timers() - self._close_widget() - - def _on_continue_clicked(self): - self._close_widget() - - def _close_widget(self): - self._is_showed = False - self._timer_stopped = False - self._refresh_context() - self.hide() - - def showEvent(self, event): - if not self._is_showed: - self._is_showed = True - self._refresh_context() - - if not self._count_timer.isActive(): - self._count_timer.start() - super(WidgetUserIdle, self).showEvent(event) - - def closeEvent(self, event): - event.ignore() - if self._timer_stopped: - self._close_widget() - else: - self._on_continue_clicked() - - -class SignalHandler(QtCore.QObject): - signal_show_message = QtCore.Signal() - signal_stop_timers = QtCore.Signal() - - def __init__(self, module): - super(SignalHandler, self).__init__() - self.module = module - self.signal_show_message.connect(module.show_message) - self.signal_stop_timers.connect(module.stop_timers) diff --git a/server_addon/timers_manager/client/pyproject.toml b/server_addon/timers_manager/client/pyproject.toml deleted file mode 100644 index 364fb33712..0000000000 --- a/server_addon/timers_manager/client/pyproject.toml +++ /dev/null @@ -1,6 +0,0 @@ -[project] -name="timers_manager" -description="AYON TimersManager addon." - -[ayon.runtimeDependencies] -pynput = "^1.7.2" \ No newline at end of file diff --git a/server_addon/timers_manager/package.py b/server_addon/timers_manager/package.py deleted file mode 100644 index 32dc7cfbf4..0000000000 --- a/server_addon/timers_manager/package.py +++ /dev/null @@ -1,10 +0,0 @@ -name = "timers_manager" -title = "Timers Manager" -version = "0.2.0" - -client_dir = "ayon_timers_manager" - -ayon_required_addons = { - "core": ">0.3.2", -} -ayon_compatible_addons = {} diff --git a/server_addon/timers_manager/server/__init__.py b/server_addon/timers_manager/server/__init__.py deleted file mode 100644 index 32e83d295c..0000000000 --- a/server_addon/timers_manager/server/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from typing import Type - -from ayon_server.addons import BaseServerAddon - -from .settings import TimersManagerSettings - - -class TimersManagerAddon(BaseServerAddon): - settings_model: Type[TimersManagerSettings] = TimersManagerSettings diff --git a/server_addon/timers_manager/server/settings.py b/server_addon/timers_manager/server/settings.py deleted file mode 100644 index 774940730c..0000000000 --- a/server_addon/timers_manager/server/settings.py +++ /dev/null @@ -1,24 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class TimersManagerSettings(BaseSettingsModel): - auto_stop: bool = SettingsField( - True, - title="Auto stop timer", - scope=["studio"], - ) - full_time: int = SettingsField( - 15, - title="Max idle time", - scope=["studio"], - ) - message_time: float = SettingsField( - 0.5, - title="When dialog will show", - scope=["studio"], - ) - disregard_publishing: bool = SettingsField( - False, - title="Disregard publishing", - scope=["studio"], - )