diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py
index 8c36719b77..4fb8b886a9 100644
--- a/client/ayon_core/plugins/publish/integrate_hero_version.py
+++ b/client/ayon_core/plugins/publish/integrate_hero_version.py
@@ -87,7 +87,9 @@ class IntegrateHeroVersion(
]
# QUESTION/TODO this process should happen on server if crashed due to
# permissions error on files (files were used or user didn't have perms)
- # *but all other plugins must be sucessfully completed
+ # *but all other plugins must be successfully completed
+
+ use_hardlinks = False
def process(self, instance):
if not self.is_active(instance.data):
@@ -617,24 +619,32 @@ def copy_file(self, src_path, dst_path):
self.log.debug("Folder already exists: \"{}\"".format(dirname))
+ if self.use_hardlinks:
+ # First try hardlink and copy if paths are cross drive
+ self.log.debug("Hardlinking file \"{}\" to \"{}\"".format(
+ src_path, dst_path
+ ))
+ try:
+ create_hard_link(src_path, dst_path)
+ # Return when successful
+ return
+
+ except OSError as exc:
+ # re-raise exception if different than
+ # EXDEV - cross drive path
+ # EINVAL - wrong format, must be NTFS
+ self.log.debug(
+ "Hardlink failed with errno:'{}'".format(exc.errno))
+ if exc.errno not in [errno.EXDEV, errno.EINVAL]:
+ raise
+
+ self.log.debug(
+ "Hardlinking failed, falling back to regular copy...")
+
self.log.debug("Copying file \"{}\" to \"{}\"".format(
src_path, dst_path
))
- # First try hardlink and copy if paths are cross drive
- try:
- create_hard_link(src_path, dst_path)
- # Return when successful
- return
-
- except OSError as exc:
- # re-raise exception if different than
- # EXDEV - cross drive path
- # EINVAL - wrong format, must be NTFS
- self.log.debug("Hardlink failed with errno:'{}'".format(exc.errno))
- if exc.errno not in [errno.EXDEV, errno.EINVAL]:
- raise
-
shutil.copy(src_path, dst_path)
def version_from_representations(self, project_name, repres):
diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py
index b37be1afe6..1b3d382f01 100644
--- a/server/settings/publish_plugins.py
+++ b/server/settings/publish_plugins.py
@@ -743,6 +743,14 @@ class IntegrateHeroVersionModel(BaseSettingsModel):
optional: bool = SettingsField(False, title="Optional")
active: bool = SettingsField(True, title="Active")
families: list[str] = SettingsField(default_factory=list, title="Families")
+ use_hardlinks: bool = SettingsField(
+ False, title="Use Hardlinks",
+ description="When enabled first try to make a hardlink of the version "
+ "instead of a copy. This helps reduce disk usage, but may "
+ "create issues.\nFor example there are known issues on "
+ "Windows being unable to delete any of the hardlinks if "
+ "any of the links is in use creating issues with updating "
+ "hero versions.")
class CleanUpModel(BaseSettingsModel):
@@ -1136,7 +1144,8 @@ class PublishPuginsModel(BaseSettingsModel):
"layout",
"mayaScene",
"simpleUnrealTexture"
- ]
+ ],
+ "use_hardlinks": False
},
"CleanUp": {
"paterns": [],
diff --git a/server_addon/celaction/client/ayon_celaction/__init__.py b/server_addon/celaction/client/ayon_celaction/__init__.py
deleted file mode 100644
index 0df0224125..0000000000
--- a/server_addon/celaction/client/ayon_celaction/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from .version import __version__
-from .addon import (
- CELACTION_ROOT_DIR,
- CelactionAddon,
-)
-
-
-__all__ = (
- "__version__",
-
- "CELACTION_ROOT_DIR",
- "CelactionAddon",
-)
diff --git a/server_addon/celaction/client/ayon_celaction/addon.py b/server_addon/celaction/client/ayon_celaction/addon.py
deleted file mode 100644
index ad04a54088..0000000000
--- a/server_addon/celaction/client/ayon_celaction/addon.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-from ayon_core.addon import AYONAddon, IHostAddon
-
-from .version import __version__
-
-CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
-
-
-class CelactionAddon(AYONAddon, IHostAddon):
- name = "celaction"
- version = __version__
- host_name = "celaction"
-
- def get_launch_hook_paths(self, app):
- if app.host_name != self.host_name:
- return []
- return [
- os.path.join(CELACTION_ROOT_DIR, "hooks")
- ]
-
- def add_implementation_envs(self, env, _app):
- # Set default values if are not already set via settings
- defaults = {
- "LOGLEVEL": "DEBUG"
- }
- for key, value in defaults.items():
- if not env.get(key):
- env[key] = value
-
- def get_workfile_extensions(self):
- return [".scn"]
diff --git a/server_addon/celaction/client/ayon_celaction/hooks/pre_celaction_setup.py b/server_addon/celaction/client/ayon_celaction/hooks/pre_celaction_setup.py
deleted file mode 100644
index 52622d43b8..0000000000
--- a/server_addon/celaction/client/ayon_celaction/hooks/pre_celaction_setup.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import os
-import shutil
-import winreg
-import subprocess
-from ayon_core.lib import get_ayon_launcher_args
-from ayon_applications import PreLaunchHook, LaunchTypes
-from ayon_celaction import CELACTION_ROOT_DIR
-
-
-class CelactionPrelaunchHook(PreLaunchHook):
- """Bootstrap celacion with AYON"""
- app_groups = {"celaction"}
- platforms = {"windows"}
- launch_types = {LaunchTypes.local}
-
- def execute(self):
- folder_attributes = self.data["folder_entity"]["attrib"]
- width = folder_attributes["resolutionWidth"]
- height = folder_attributes["resolutionHeight"]
-
- # Add workfile path to launch arguments
- workfile_path = self.workfile_path()
- if workfile_path:
- self.launch_context.launch_args.append(workfile_path)
-
- # setting output parameters
- path_user_settings = "\\".join([
- "Software", "CelAction", "CelAction2D", "User Settings"
- ])
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings)
- hKey = winreg.OpenKey(
- winreg.HKEY_CURRENT_USER, path_user_settings, 0,
- winreg.KEY_ALL_ACCESS
- )
-
- path_to_cli = os.path.join(
- CELACTION_ROOT_DIR, "scripts", "publish_cli.py"
- )
- subprocess_args = get_ayon_launcher_args("run", path_to_cli)
- executable = subprocess_args.pop(0)
- workfile_settings = self.get_workfile_settings()
-
- winreg.SetValueEx(
- hKey,
- "SubmitAppTitle",
- 0,
- winreg.REG_SZ,
- executable
- )
-
- # add required arguments for workfile path
- parameters = subprocess_args + [
- "--currentFile", "*SCENE*"
- ]
-
- # Add custom parameters from workfile settings
- if "render_chunk" in workfile_settings["submission_overrides"]:
- parameters += [
- "--chunk", "*CHUNK*"
- ]
- if "resolution" in workfile_settings["submission_overrides"]:
- parameters += [
- "--resolutionWidth", "*X*",
- "--resolutionHeight", "*Y*"
- ]
- if "frame_range" in workfile_settings["submission_overrides"]:
- parameters += [
- "--frameStart", "*START*",
- "--frameEnd", "*END*"
- ]
-
- winreg.SetValueEx(
- hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
- subprocess.list2cmdline(parameters)
- )
-
- self.log.debug(f"__ parameters: \"{parameters}\"")
-
- # setting resolution parameters
- path_submit = "\\".join([
- path_user_settings, "Dialogs", "SubmitOutput"
- ])
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit)
- hKey = winreg.OpenKey(
- winreg.HKEY_CURRENT_USER, path_submit, 0,
- winreg.KEY_ALL_ACCESS
- )
- winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
- winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width)
- winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height)
-
- # making sure message dialogs don't appear when overwriting
- path_overwrite_scene = "\\".join([
- path_user_settings, "Messages", "OverwriteScene"
- ])
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene)
- hKey = winreg.OpenKey(
- winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0,
- winreg.KEY_ALL_ACCESS
- )
- winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
- winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
-
- # set scane as not saved
- path_scene_saved = "\\".join([
- path_user_settings, "Messages", "SceneSaved"
- ])
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved)
- hKey = winreg.OpenKey(
- winreg.HKEY_CURRENT_USER, path_scene_saved, 0,
- winreg.KEY_ALL_ACCESS
- )
- winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
- winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
-
- def workfile_path(self):
- workfile_path = self.data["last_workfile_path"]
-
- # copy workfile from template if doesn't exist any on path
- if not os.path.exists(workfile_path):
- # TODO add ability to set different template workfile path via
- # settings
- template_path = os.path.join(
- CELACTION_ROOT_DIR,
- "resources",
- "celaction_template_scene.scn"
- )
-
- if not os.path.exists(template_path):
- self.log.warning(
- "Couldn't find workfile template file in {}".format(
- template_path
- )
- )
- return
-
- self.log.info(
- f"Creating workfile from template: \"{template_path}\""
- )
-
- # Copy template workfile to new destinantion
- shutil.copy2(
- os.path.normpath(template_path),
- os.path.normpath(workfile_path)
- )
-
- self.log.info(f"Workfile to open: \"{workfile_path}\"")
-
- return workfile_path
-
- def get_workfile_settings(self):
- return self.data["project_settings"]["celaction"]["workfile"]
diff --git a/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_celaction_cli_kwargs.py b/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_celaction_cli_kwargs.py
deleted file mode 100644
index 1820569918..0000000000
--- a/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_celaction_cli_kwargs.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import pyblish.api
-import sys
-from pprint import pformat
-
-
-class CollectCelactionCliKwargs(pyblish.api.ContextPlugin):
- """ Collects all keyword arguments passed from the terminal """
-
- label = "Collect Celaction Cli Kwargs"
- order = pyblish.api.CollectorOrder - 0.1
-
- def process(self, context):
- args = list(sys.argv[1:])
- self.log.info(str(args))
- missing_kwargs = []
- passing_kwargs = {}
- for key in (
- "chunk",
- "frameStart",
- "frameEnd",
- "resolutionWidth",
- "resolutionHeight",
- "currentFile",
- ):
- arg_key = f"--{key}"
- if arg_key not in args:
- missing_kwargs.append(key)
- continue
- arg_idx = args.index(arg_key)
- args.pop(arg_idx)
- if key != "currentFile":
- value = args.pop(arg_idx)
- else:
- path_parts = []
- while arg_idx < len(args):
- path_parts.append(args.pop(arg_idx))
- value = " ".join(path_parts).strip('"')
-
- passing_kwargs[key] = value
-
- if missing_kwargs:
- self.log.debug("Missing arguments {}".format(
- ", ".join(
- [f'"{key}"' for key in missing_kwargs]
- )
- ))
-
- self.log.info("Storing kwargs ...")
- self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs)))
-
- # set kwargs to context data
- context.set_data("passingKwargs", passing_kwargs)
-
- # get kwargs onto context data as keys with values
- for k, v in passing_kwargs.items():
- self.log.info(f"Setting `{k}` to instance.data with value: `{v}`")
- if k in ["frameStart", "frameEnd"]:
- context.data[k] = passing_kwargs[k] = int(v)
- else:
- context.data[k] = v
diff --git a/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_celaction_instances.py b/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_celaction_instances.py
deleted file mode 100644
index 7c22201e3e..0000000000
--- a/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_celaction_instances.py
+++ /dev/null
@@ -1,96 +0,0 @@
-import os
-import pyblish.api
-
-
-class CollectCelactionInstances(pyblish.api.ContextPlugin):
- """ Adds the celaction render instances """
-
- label = "Collect Celaction Instances"
- order = pyblish.api.CollectorOrder + 0.1
-
- def process(self, context):
- task = context.data["task"]
- current_file = context.data["currentFile"]
- staging_dir = os.path.dirname(current_file)
- scene_file = os.path.basename(current_file)
- version = context.data["version"]
-
- folder_entity = context.data["folderEntity"]
-
- folder_attributes = folder_entity["attrib"]
-
- shared_instance_data = {
- "folderPath": folder_entity["path"],
- "frameStart": folder_attributes["frameStart"],
- "frameEnd": folder_attributes["frameEnd"],
- "handleStart": folder_attributes["handleStart"],
- "handleEnd": folder_attributes["handleEnd"],
- "fps": folder_attributes["fps"],
- "resolutionWidth": folder_attributes["resolutionWidth"],
- "resolutionHeight": folder_attributes["resolutionHeight"],
- "pixelAspect": 1,
- "step": 1,
- "version": version
- }
-
- celaction_kwargs = context.data.get(
- "passingKwargs", {})
-
- if celaction_kwargs:
- shared_instance_data.update(celaction_kwargs)
-
- # workfile instance
- product_type = "workfile"
- product_name = product_type + task.capitalize()
- # Create instance
- instance = context.create_instance(product_name)
-
- # creating instance data
- instance.data.update({
- "label": scene_file,
- "productName": product_name,
- "productType": product_type,
- "family": product_type,
- "families": [product_type],
- "representations": []
- })
-
- # adding basic script data
- instance.data.update(shared_instance_data)
-
- # creating representation
- representation = {
- 'name': 'scn',
- 'ext': 'scn',
- 'files': scene_file,
- "stagingDir": staging_dir,
- }
-
- instance.data["representations"].append(representation)
-
- self.log.info('Publishing Celaction workfile')
-
- # render instance
- product_name = f"render{task}Main"
- product_type = "render.farm"
- instance = context.create_instance(name=product_name)
- # getting instance state
- instance.data["publish"] = True
-
- # add folderEntity data into instance
- instance.data.update({
- "label": "{} - farm".format(product_name),
- "productType": product_type,
- "family": product_type,
- "families": [product_type],
- "productName": product_name
- })
-
- # adding basic script data
- instance.data.update(shared_instance_data)
-
- self.log.info('Publishing Celaction render instance')
- self.log.debug(f"Instance data: `{instance.data}`")
-
- for i in context:
- self.log.debug(f"{i.data['families']}")
diff --git a/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_render_path.py b/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_render_path.py
deleted file mode 100644
index 3bcd1c69b3..0000000000
--- a/server_addon/celaction/client/ayon_celaction/plugins/publish/collect_render_path.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import os
-import copy
-import pyblish.api
-
-
-class CollectRenderPath(pyblish.api.InstancePlugin):
- """Generate file and directory path where rendered images will be"""
-
- label = "Collect Render Path"
- order = pyblish.api.CollectorOrder + 0.495
- families = ["render.farm"]
-
- settings_category = "celaction"
-
- # Presets
- output_extension = "png"
- anatomy_template_key_render_files = None
- anatomy_template_key_metadata = None
-
- def process(self, instance):
- anatomy = instance.context.data["anatomy"]
- anatomy_data = copy.deepcopy(instance.data["anatomyData"])
- padding = anatomy.templates_obj.frame_padding
- product_type = "render"
- anatomy_data.update({
- "frame": f"%0{padding}d",
- "family": product_type,
- "representation": self.output_extension,
- "ext": self.output_extension
- })
- anatomy_data["product"]["type"] = product_type
-
- # get anatomy rendering keys
- r_anatomy_key = self.anatomy_template_key_render_files
- m_anatomy_key = self.anatomy_template_key_metadata
-
- # get folder and path for rendering images from celaction
- r_template_item = anatomy.get_template_item("publish", r_anatomy_key)
- render_dir = r_template_item["directory"].format_strict(anatomy_data)
- render_path = r_template_item["path"].format_strict(anatomy_data)
- self.log.debug("__ render_path: `{}`".format(render_path))
-
- # create dir if it doesn't exists
- try:
- if not os.path.isdir(render_dir):
- os.makedirs(render_dir, exist_ok=True)
- except OSError:
- # directory is not available
- self.log.warning("Path is unreachable: `{}`".format(render_dir))
-
- # add rendering path to instance data
- instance.data["path"] = render_path
-
- # get anatomy for published renders folder path
- m_template_item = anatomy.get_template_item(
- "publish", m_anatomy_key, default=None
- )
- if m_template_item is not None:
- metadata_path = m_template_item["directory"].format_strict(
- anatomy_data
- )
- instance.data["publishRenderMetadataFolder"] = metadata_path
- self.log.info("Metadata render path: `{}`".format(metadata_path))
-
- self.log.info(f"Render output path set to: `{render_path}`")
diff --git a/server_addon/celaction/client/ayon_celaction/plugins/publish/integrate_version_up.py b/server_addon/celaction/client/ayon_celaction/plugins/publish/integrate_version_up.py
deleted file mode 100644
index c165b0c871..0000000000
--- a/server_addon/celaction/client/ayon_celaction/plugins/publish/integrate_version_up.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import shutil
-
-import pyblish.api
-
-from ayon_core.lib import version_up
-
-
-class VersionUpScene(pyblish.api.ContextPlugin):
- order = pyblish.api.IntegratorOrder + 0.5
- label = 'Version Up Scene'
- families = ['workfile']
- optional = True
- active = True
-
- def process(self, context):
- current_file = context.data.get('currentFile')
- v_up = version_up(current_file)
- self.log.debug('Current file is: {}'.format(current_file))
- self.log.debug('Version up: {}'.format(v_up))
-
- shutil.copy2(current_file, v_up)
- self.log.info('Scene saved into new version: {}'.format(v_up))
diff --git a/server_addon/celaction/client/ayon_celaction/resources/celaction_template_scene.scn b/server_addon/celaction/client/ayon_celaction/resources/celaction_template_scene.scn
deleted file mode 100644
index 54e4497a31..0000000000
Binary files a/server_addon/celaction/client/ayon_celaction/resources/celaction_template_scene.scn and /dev/null differ
diff --git a/server_addon/celaction/client/ayon_celaction/scripts/__init__.py b/server_addon/celaction/client/ayon_celaction/scripts/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/server_addon/celaction/client/ayon_celaction/scripts/publish_cli.py b/server_addon/celaction/client/ayon_celaction/scripts/publish_cli.py
deleted file mode 100644
index 4e54aa253a..0000000000
--- a/server_addon/celaction/client/ayon_celaction/scripts/publish_cli.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import os
-import sys
-
-import pyblish.api
-import pyblish.util
-
-from ayon_celaction import CELACTION_ROOT_DIR
-from ayon_core.lib import Logger
-from ayon_core.tools.utils import host_tools
-from ayon_core.pipeline import install_ayon_plugins
-
-
-log = Logger.get_logger("celaction")
-
-PUBLISH_HOST = "celaction"
-PLUGINS_DIR = os.path.join(CELACTION_ROOT_DIR, "plugins")
-PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
-
-
-def main():
- # Registers global pyblish plugins
- install_ayon_plugins()
-
- if os.path.exists(PUBLISH_PATH):
- log.info(f"Registering path: {PUBLISH_PATH}")
- pyblish.api.register_plugin_path(PUBLISH_PATH)
-
- pyblish.api.register_host(PUBLISH_HOST)
- pyblish.api.register_target("local")
-
- return host_tools.show_publish()
-
-
-if __name__ == "__main__":
- result = main()
- sys.exit(not bool(result))
diff --git a/server_addon/celaction/client/ayon_celaction/version.py b/server_addon/celaction/client/ayon_celaction/version.py
deleted file mode 100644
index ceed47c3a0..0000000000
--- a/server_addon/celaction/client/ayon_celaction/version.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Package declaring AYON addon 'celaction' version."""
-__version__ = "0.2.0"
diff --git a/server_addon/celaction/package.py b/server_addon/celaction/package.py
deleted file mode 100644
index 8b9069d019..0000000000
--- a/server_addon/celaction/package.py
+++ /dev/null
@@ -1,12 +0,0 @@
-name = "celaction"
-title = "CelAction"
-version = "0.2.0"
-
-client_dir = "ayon_celaction"
-
-ayon_required_addons = {
- "core": ">0.3.2",
-}
-ayon_compatible_addons = {
- "applications": ">=0.2.0",
-}
diff --git a/server_addon/celaction/server/__init__.py b/server_addon/celaction/server/__init__.py
deleted file mode 100644
index e3769a4b7f..0000000000
--- a/server_addon/celaction/server/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from typing import Type
-
-from ayon_server.addons import BaseServerAddon
-
-from .settings import CelActionSettings, DEFAULT_VALUES
-
-
-class CelActionAddon(BaseServerAddon):
- settings_model: Type[CelActionSettings] = CelActionSettings
-
- async def get_default_settings(self):
- settings_model_cls = self.get_settings_model()
- return settings_model_cls(**DEFAULT_VALUES)
diff --git a/server_addon/celaction/server/imageio.py b/server_addon/celaction/server/imageio.py
deleted file mode 100644
index e0e685a244..0000000000
--- a/server_addon/celaction/server/imageio.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from pydantic import validator
-from ayon_server.settings import BaseSettingsModel, SettingsField
-from ayon_server.settings.validators import ensure_unique_names
-
-
-class ImageIOConfigModel(BaseSettingsModel):
- """[DEPRECATED] Addon OCIO config settings. Please set the OCIO config
- path in the Core addon profiles here
- (ayon+settings://core/imageio/ocio_config_profiles).
- """
-
- override_global_config: bool = SettingsField(
- False,
- title="Override global OCIO config",
- description=(
- "DEPRECATED functionality. Please set the OCIO config path in the "
- "Core addon profiles here (ayon+settings://core/imageio/"
- "ocio_config_profiles)."
- ),
- )
- filepath: list[str] = SettingsField(
- default_factory=list,
- title="Config path",
- description=(
- "DEPRECATED functionality. Please set the OCIO config path in the "
- "Core addon profiles here (ayon+settings://core/imageio/"
- "ocio_config_profiles)."
- ),
- )
-
-
-class ImageIOFileRuleModel(BaseSettingsModel):
- name: str = SettingsField("", title="Rule name")
- pattern: str = SettingsField("", title="Regex pattern")
- colorspace: str = SettingsField("", title="Colorspace name")
- ext: str = SettingsField("", title="File extension")
-
-
-class ImageIOFileRulesModel(BaseSettingsModel):
- activate_host_rules: bool = SettingsField(False)
- rules: list[ImageIOFileRuleModel] = SettingsField(
- default_factory=list,
- title="Rules"
- )
-
- @validator("rules")
- def validate_unique_outputs(cls, value):
- ensure_unique_names(value)
- return value
-
-
-class CelActionImageIOModel(BaseSettingsModel):
- activate_host_color_management: bool = SettingsField(
- True, title="Enable Color Management"
- )
- ocio_config: ImageIOConfigModel = SettingsField(
- default_factory=ImageIOConfigModel,
- title="OCIO config"
- )
- file_rules: ImageIOFileRulesModel = SettingsField(
- default_factory=ImageIOFileRulesModel,
- title="File Rules"
- )
diff --git a/server_addon/celaction/server/settings.py b/server_addon/celaction/server/settings.py
deleted file mode 100644
index afa9773477..0000000000
--- a/server_addon/celaction/server/settings.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from ayon_server.settings import BaseSettingsModel, SettingsField
-from .imageio import CelActionImageIOModel
-
-
-class CollectRenderPathModel(BaseSettingsModel):
- output_extension: str = SettingsField(
- "",
- title="Output render file extension"
- )
- anatomy_template_key_render_files: str = SettingsField(
- "",
- title="Anatomy template key: render files"
- )
- anatomy_template_key_metadata: str = SettingsField(
- "",
- title="Anatomy template key: metadata job file"
- )
-
-
-def _workfile_submit_overrides():
- return [
- {
- "value": "render_chunk",
- "label": "Pass chunk size"
- },
- {
- "value": "frame_range",
- "label": "Pass frame range"
- },
- {
- "value": "resolution",
- "label": "Pass resolution"
- }
- ]
-
-
-class WorkfileModel(BaseSettingsModel):
- submission_overrides: list[str] = SettingsField(
- default_factory=list,
- title="Submission workfile overrides",
- enum_resolver=_workfile_submit_overrides
- )
-
-
-class PublishPluginsModel(BaseSettingsModel):
- CollectRenderPath: CollectRenderPathModel = SettingsField(
- default_factory=CollectRenderPathModel,
- title="Collect Render Path"
- )
-
-
-class CelActionSettings(BaseSettingsModel):
- imageio: CelActionImageIOModel = SettingsField(
- default_factory=CelActionImageIOModel,
- title="Color Management (ImageIO)"
- )
- workfile: WorkfileModel = SettingsField(
- title="Workfile"
- )
- publish: PublishPluginsModel = SettingsField(
- default_factory=PublishPluginsModel,
- title="Publish plugins",
- )
-
-
-DEFAULT_VALUES = {
- "imageio": {
- "ocio_config": {
- "enabled": False,
- "filepath": []
- },
- "file_rules": {
- "enabled": False,
- "rules": []
- }
- },
- "workfile": {
- "submission_overrides": [
- "render_chunk",
- "frame_range",
- "resolution"
- ]
- },
- "publish": {
- "CollectRenderPath": {
- "output_extension": "png",
- "anatomy_template_key_render_files": "render",
- "anatomy_template_key_metadata": "render"
- }
- }
-}
diff --git a/server_addon/clockify/client/ayon_clockify/__init__.py b/server_addon/clockify/client/ayon_clockify/__init__.py
deleted file mode 100644
index 75fb87494e..0000000000
--- a/server_addon/clockify/client/ayon_clockify/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from .addon import ClockifyAddon
-
-__all__ = (
- "ClockifyAddon",
-)
diff --git a/server_addon/clockify/client/ayon_clockify/addon.py b/server_addon/clockify/client/ayon_clockify/addon.py
deleted file mode 100644
index cf35e77ce4..0000000000
--- a/server_addon/clockify/client/ayon_clockify/addon.py
+++ /dev/null
@@ -1,290 +0,0 @@
-import os
-import threading
-import time
-
-from ayon_core.addon import AYONAddon, ITrayAddon, IPluginPaths
-
-from .version import __version__
-from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
-
-
-class ClockifyAddon(AYONAddon, ITrayAddon, IPluginPaths):
- name = "clockify"
- version = __version__
-
- def initialize(self, studio_settings):
- enabled = self.name in studio_settings
- workspace_name = None
- if enabled:
- clockify_settings = studio_settings[self.name]
- workspace_name = clockify_settings["workspace_name"]
-
- if enabled and workspace_name:
- self.log.warning("Clockify Workspace is not set in settings.")
- enabled = False
- self.enabled = enabled
- self.workspace_name = workspace_name
-
- self.timer_manager = None
- self.MessageWidgetClass = None
- self.message_widget = None
- self._clockify_api = None
-
- # TimersManager attributes
- # - set `timers_manager_connector` only in `tray_init`
- self.timers_manager_connector = None
- self._timer_manager_addon = None
-
- @property
- def clockify_api(self):
- if self._clockify_api is None:
- from .clockify_api import ClockifyAPI
-
- self._clockify_api = ClockifyAPI(master_parent=self)
- return self._clockify_api
-
- def get_global_environments(self):
- return {"CLOCKIFY_WORKSPACE": self.workspace_name}
-
- def tray_init(self):
- from .widgets import ClockifySettings, MessageWidget
-
- self.MessageWidgetClass = MessageWidget
-
- self.message_widget = None
- self.widget_settings = ClockifySettings(self.clockify_api)
- self.widget_settings_required = None
-
- self.thread_timer_check = None
- # Bools
- self.bool_thread_check_running = False
- self.bool_api_key_set = False
- self.bool_workspace_set = False
- self.bool_timer_run = False
- self.bool_api_key_set = self.clockify_api.set_api()
-
- # Define itself as TimersManager connector
- self.timers_manager_connector = self
-
- def tray_start(self):
- if self.bool_api_key_set is False:
- self.show_settings()
- return
-
- self.bool_workspace_set = self.clockify_api.workspace_id is not None
- if self.bool_workspace_set is False:
- return
-
- self.start_timer_check()
- self.set_menu_visibility()
-
- def tray_exit(self, *_a, **_kw):
- return
-
- def get_plugin_paths(self):
- """Implementation of IPluginPaths to get plugin paths."""
- actions_path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), "launcher_actions"
- )
- return {"actions": [actions_path]}
-
- def get_ftrack_event_handler_paths(self):
- """Function for ftrack addon to add ftrack event handler paths."""
- return {
- "user": [CLOCKIFY_FTRACK_USER_PATH],
- "server": [CLOCKIFY_FTRACK_SERVER_PATH],
- }
-
- def clockify_timer_stopped(self):
- self.bool_timer_run = False
- self.timer_stopped()
-
- def start_timer_check(self):
- self.bool_thread_check_running = True
- if self.thread_timer_check is None:
- self.thread_timer_check = threading.Thread(
- target=self.check_running
- )
- self.thread_timer_check.daemon = True
- self.thread_timer_check.start()
-
- def stop_timer_check(self):
- self.bool_thread_check_running = True
- if self.thread_timer_check is not None:
- self.thread_timer_check.join()
- self.thread_timer_check = None
-
- def check_running(self):
- while self.bool_thread_check_running is True:
- bool_timer_run = False
- if self.clockify_api.get_in_progress() is not None:
- bool_timer_run = True
-
- if self.bool_timer_run != bool_timer_run:
- if self.bool_timer_run is True:
- self.clockify_timer_stopped()
- elif self.bool_timer_run is False:
- current_timer = self.clockify_api.get_in_progress()
- if current_timer is None:
- continue
- current_proj_id = current_timer.get("projectId")
- if not current_proj_id:
- continue
-
- project = self.clockify_api.get_project_by_id(
- current_proj_id
- )
- if project and project.get("code") == 501:
- continue
-
- project_name = project.get("name")
-
- current_timer_hierarchy = current_timer.get("description")
- if not current_timer_hierarchy:
- continue
- hierarchy_items = current_timer_hierarchy.split("/")
- # Each pype timer must have at least 2 items!
- if len(hierarchy_items) < 2:
- continue
-
- task_name = hierarchy_items[-1]
- hierarchy = hierarchy_items[:-1]
-
- data = {
- "task_name": task_name,
- "hierarchy": hierarchy,
- "project_name": project_name,
- }
- self.timer_started(data)
-
- self.bool_timer_run = bool_timer_run
- self.set_menu_visibility()
- time.sleep(5)
-
- def signed_in(self):
- if not self.timer_manager:
- return
-
- if not self.timer_manager.last_task:
- return
-
- if self.timer_manager.is_running:
- self.start_timer_manager(self.timer_manager.last_task)
-
- def on_message_widget_close(self):
- self.message_widget = None
-
- # Definition of Tray menu
- def tray_menu(self, parent_menu):
- # Menu for Tray App
- from qtpy import QtWidgets
-
- menu = QtWidgets.QMenu("Clockify", parent_menu)
- menu.setProperty("submenu", "on")
-
- # Actions
- action_show_settings = QtWidgets.QAction("Settings", menu)
- action_stop_timer = QtWidgets.QAction("Stop timer", menu)
-
- menu.addAction(action_show_settings)
- menu.addAction(action_stop_timer)
-
- action_show_settings.triggered.connect(self.show_settings)
- action_stop_timer.triggered.connect(self.stop_timer)
-
- self.action_stop_timer = action_stop_timer
-
- self.set_menu_visibility()
-
- parent_menu.addMenu(menu)
-
- def show_settings(self):
- self.widget_settings.input_api_key.setText(
- self.clockify_api.get_api_key()
- )
- self.widget_settings.show()
-
- def set_menu_visibility(self):
- self.action_stop_timer.setVisible(self.bool_timer_run)
-
- # --- TimersManager connection methods ---
- def register_timers_manager(self, timer_manager_addon):
- """Store TimersManager for future use."""
- self._timer_manager_addon = timer_manager_addon
-
- def timer_started(self, data):
- """Tell TimersManager that timer started."""
- if self._timer_manager_addon is not None:
- self._timer_manager_addon.timer_started(self.id, data)
-
- def timer_stopped(self):
- """Tell TimersManager that timer stopped."""
- if self._timer_manager_addon is not None:
- self._timer_manager_addon.timer_stopped(self.id)
-
- def stop_timer(self):
- """Called from TimersManager to stop timer."""
- self.clockify_api.finish_time_entry()
-
- def _verify_project_exists(self, project_name):
- project_id = self.clockify_api.get_project_id(project_name)
- if not project_id:
- self.log.warning(
- 'Project "{}" was not found in Clockify. Timer won\'t start.'
- ).format(project_name)
-
- if not self.MessageWidgetClass:
- return
-
- msg = (
- 'Project "{}" is not'
- ' in Clockify Workspace "{}".'
- "
Please inform your Project Manager."
- ).format(project_name, str(self.clockify_api.workspace_name))
-
- self.message_widget = self.MessageWidgetClass(
- msg, "Clockify - Info Message"
- )
- self.message_widget.closed.connect(self.on_message_widget_close)
- self.message_widget.show()
- return False
- return project_id
-
- def start_timer(self, input_data):
- """Called from TimersManager to start timer."""
- # If not api key is not entered then skip
- if not self.clockify_api.get_api_key():
- return
-
- project_name = input_data.get("project_name")
- folder_path = input_data.get("folder_path")
- task_name = input_data.get("task_name")
- task_type = input_data.get("task_type")
- if not all((project_name, folder_path, task_name, task_type)):
- return
-
- # Concatenate hierarchy and task to get description
- description = "/".join([folder_path.lstrip("/"), task_name])
-
- # Check project existence
- project_id = self._verify_project_exists(project_name)
- if not project_id:
- return
-
- # Setup timer tags
- if not task_type:
- self.log.info("No tag information found for the timer")
-
- tag_ids = []
- task_tag_id = self.clockify_api.get_tag_id(task_type)
- if task_tag_id is not None:
- tag_ids.append(task_tag_id)
-
- # Start timer
- self.clockify_api.start_time_entry(
- description,
- project_id,
- tag_ids=tag_ids,
- workspace_id=self.clockify_api.workspace_id,
- user_id=self.clockify_api.user_id,
- )
diff --git a/server_addon/clockify/client/ayon_clockify/clockify_api.py b/server_addon/clockify/client/ayon_clockify/clockify_api.py
deleted file mode 100644
index 38ca6cdb66..0000000000
--- a/server_addon/clockify/client/ayon_clockify/clockify_api.py
+++ /dev/null
@@ -1,447 +0,0 @@
-import os
-import json
-import datetime
-
-import requests
-
-from ayon_core.lib.local_settings import AYONSecureRegistry
-from ayon_core.lib import Logger
-
-from .constants import (
- CLOCKIFY_ENDPOINT,
- ADMIN_PERMISSION_NAMES,
-)
-
-
-class ClockifyAPI:
- log = Logger.get_logger(__name__)
-
- def __init__(self, api_key=None, master_parent=None):
- self.workspace_name = None
- self.master_parent = master_parent
- self.api_key = api_key
- self._workspace_id = None
- self._user_id = None
- self._secure_registry = None
-
- @property
- def secure_registry(self):
- if self._secure_registry is None:
- self._secure_registry = AYONSecureRegistry("clockify")
- return self._secure_registry
-
- @property
- def headers(self):
- return {"x-api-key": self.api_key}
-
- @property
- def workspace_id(self):
- return self._workspace_id
-
- @property
- def user_id(self):
- return self._user_id
-
- def verify_api(self):
- for key, value in self.headers.items():
- if value is None or value.strip() == "":
- return False
- return True
-
- def set_api(self, api_key=None):
- if api_key is None:
- api_key = self.get_api_key()
-
- if api_key is not None and self.validate_api_key(api_key) is True:
- self.api_key = api_key
- self.set_workspace()
- self.set_user_id()
- if self.master_parent:
- self.master_parent.signed_in()
- return True
- return False
-
- def validate_api_key(self, api_key):
- test_headers = {"x-api-key": api_key}
- action_url = "user"
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=test_headers
- )
- if response.status_code != 200:
- return False
- return True
-
- def validate_workspace_permissions(self, workspace_id=None, user_id=None):
- if user_id is None:
- self.log.info("No user_id found during validation")
- return False
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = f"workspaces/{workspace_id}/users?includeRoles=1"
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- data = response.json()
- for user in data:
- if user.get("id") == user_id:
- roles_data = user.get("roles")
- for entities in roles_data:
- if entities.get("role") in ADMIN_PERMISSION_NAMES:
- return True
- return False
-
- def get_user_id(self):
- action_url = "user"
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- result = response.json()
- user_id = result.get("id", None)
-
- return user_id
-
- def set_workspace(self, name=None):
- if name is None:
- name = os.environ.get("CLOCKIFY_WORKSPACE", None)
- self.workspace_name = name
- if self.workspace_name is None:
- return
- try:
- result = self.validate_workspace()
- except Exception:
- result = False
- if result is not False:
- self._workspace_id = result
- if self.master_parent is not None:
- self.master_parent.start_timer_check()
- return True
- return False
-
- def validate_workspace(self, name=None):
- if name is None:
- name = self.workspace_name
- all_workspaces = self.get_workspaces()
- if name in all_workspaces:
- return all_workspaces[name]
- return False
-
- def set_user_id(self):
- try:
- user_id = self.get_user_id()
- except Exception:
- user_id = None
- if user_id is not None:
- self._user_id = user_id
-
- def get_api_key(self):
- return self.secure_registry.get_item("api_key", None)
-
- def save_api_key(self, api_key):
- self.secure_registry.set_item("api_key", api_key)
-
- def get_workspaces(self):
- action_url = "workspaces/"
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- return {
- workspace["name"]: workspace["id"] for workspace in response.json()
- }
-
- def get_projects(self, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = f"workspaces/{workspace_id}/projects"
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- if response.status_code != 403:
- result = response.json()
- return {project["name"]: project["id"] for project in result}
-
- def get_project_by_id(self, project_id, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/projects/{}".format(
- workspace_id, project_id
- )
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
-
- return response.json()
-
- def get_tags(self, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/tags".format(workspace_id)
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
-
- return {tag["name"]: tag["id"] for tag in response.json()}
-
- def get_tasks(self, project_id, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/projects/{}/tasks".format(
- workspace_id, project_id
- )
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
-
- return {task["name"]: task["id"] for task in response.json()}
-
- def get_workspace_id(self, workspace_name):
- all_workspaces = self.get_workspaces()
- if workspace_name not in all_workspaces:
- return None
- return all_workspaces[workspace_name]
-
- def get_project_id(self, project_name, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- all_projects = self.get_projects(workspace_id)
- if project_name not in all_projects:
- return None
- return all_projects[project_name]
-
- def get_tag_id(self, tag_name, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- all_tasks = self.get_tags(workspace_id)
- if tag_name not in all_tasks:
- return None
- return all_tasks[tag_name]
-
- def get_task_id(self, task_name, project_id, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- all_tasks = self.get_tasks(project_id, workspace_id)
- if task_name not in all_tasks:
- return None
- return all_tasks[task_name]
-
- def get_current_time(self):
- return str(datetime.datetime.utcnow().isoformat()) + "Z"
-
- def start_time_entry(
- self,
- description,
- project_id,
- task_id=None,
- tag_ids=None,
- workspace_id=None,
- user_id=None,
- billable=True,
- ):
- # Workspace
- if workspace_id is None:
- workspace_id = self.workspace_id
- # User ID
- if user_id is None:
- user_id = self._user_id
-
- # get running timer to check if we need to start it
- current_timer = self.get_in_progress()
-
- # Check if is currently run another times and has same values
- # DO not restart the timer, if it is already running for current task
- if current_timer:
- current_timer_hierarchy = current_timer.get("description")
- current_project_id = current_timer.get("projectId")
- current_task_id = current_timer.get("taskId")
- if (
- description == current_timer_hierarchy
- and project_id == current_project_id
- and task_id == current_task_id
- ):
- self.log.info(
- "Timer for the current project is already running"
- )
- self.bool_timer_run = True
- return self.bool_timer_run
- self.finish_time_entry()
-
- # Convert billable to strings
- if billable:
- billable = "true"
- else:
- billable = "false"
- # Rest API Action
- action_url = "workspaces/{}/user/{}/time-entries".format(
- workspace_id, user_id
- )
- start = self.get_current_time()
- body = {
- "start": start,
- "billable": billable,
- "description": description,
- "projectId": project_id,
- "taskId": task_id,
- "tagIds": tag_ids,
- }
- response = requests.post(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
- )
- if response.status_code < 300:
- return True
- return False
-
- def _get_current_timer_values(self, response):
- if response is None:
- return
- try:
- output = response.json()
- except json.decoder.JSONDecodeError:
- return None
- if output and isinstance(output, list):
- return output[0]
- return None
-
- def get_in_progress(self, user_id=None, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- if user_id is None:
- user_id = self.user_id
-
- action_url = (
- f"workspaces/{workspace_id}/user/"
- f"{user_id}/time-entries?in-progress=1"
- )
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- return self._get_current_timer_values(response)
-
- def finish_time_entry(self, workspace_id=None, user_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- if user_id is None:
- user_id = self.user_id
- current_timer = self.get_in_progress()
- if not current_timer:
- return
- action_url = "workspaces/{}/user/{}/time-entries".format(
- workspace_id, user_id
- )
- body = {"end": self.get_current_time()}
- response = requests.patch(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
- )
- return response.json()
-
- def get_time_entries(self, workspace_id=None, user_id=None, quantity=10):
- if workspace_id is None:
- workspace_id = self.workspace_id
- if user_id is None:
- user_id = self.user_id
- action_url = "workspaces/{}/user/{}/time-entries".format(
- workspace_id, user_id
- )
- response = requests.get(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- return response.json()[:quantity]
-
- def remove_time_entry(self, tid, workspace_id=None, user_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/user/{}/time-entries/{}".format(
- workspace_id, user_id, tid
- )
- response = requests.delete(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers
- )
- return response.json()
-
- def add_project(self, name, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/projects".format(workspace_id)
- body = {
- "name": name,
- "clientId": "",
- "isPublic": "false",
- "estimate": {"estimate": 0, "type": "AUTO"},
- "color": "#f44336",
- "billable": "true",
- }
- response = requests.post(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
- )
- return response.json()
-
- def add_workspace(self, name):
- action_url = "workspaces/"
- body = {"name": name}
- response = requests.post(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
- )
- return response.json()
-
- def add_task(self, name, project_id, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/projects/{}/tasks".format(
- workspace_id, project_id
- )
- body = {"name": name, "projectId": project_id}
- response = requests.post(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
- )
- return response.json()
-
- def add_tag(self, name, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "workspaces/{}/tags".format(workspace_id)
- body = {"name": name}
- response = requests.post(
- CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
- )
- return response.json()
-
- def delete_project(self, project_id, workspace_id=None):
- if workspace_id is None:
- workspace_id = self.workspace_id
- action_url = "/workspaces/{}/projects/{}".format(
- workspace_id, project_id
- )
- response = requests.delete(
- CLOCKIFY_ENDPOINT + action_url,
- headers=self.headers,
- )
- return response.json()
-
- def convert_input(
- self, entity_id, entity_name, mode="Workspace", project_id=None
- ):
- if entity_id is None:
- error = False
- error_msg = 'Missing information "{}"'
- if mode.lower() == "workspace":
- if entity_id is None and entity_name is None:
- if self.workspace_id is not None:
- entity_id = self.workspace_id
- else:
- error = True
- else:
- entity_id = self.get_workspace_id(entity_name)
- else:
- if entity_id is None and entity_name is None:
- error = True
- elif mode.lower() == "project":
- entity_id = self.get_project_id(entity_name)
- elif mode.lower() == "task":
- entity_id = self.get_task_id(
- task_name=entity_name, project_id=project_id
- )
- else:
- raise TypeError("Unknown type")
- # Raise error
- if error:
- raise ValueError(error_msg.format(mode))
-
- return entity_id
diff --git a/server_addon/clockify/client/ayon_clockify/constants.py b/server_addon/clockify/client/ayon_clockify/constants.py
deleted file mode 100644
index 4574f91be1..0000000000
--- a/server_addon/clockify/client/ayon_clockify/constants.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import os
-
-
-CLOCKIFY_FTRACK_SERVER_PATH = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), "ftrack", "server"
-)
-CLOCKIFY_FTRACK_USER_PATH = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), "ftrack", "user"
-)
-
-ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
-CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/v1/"
diff --git a/server_addon/clockify/client/ayon_clockify/ftrack/server/action_clockify_sync_server.py b/server_addon/clockify/client/ayon_clockify/ftrack/server/action_clockify_sync_server.py
deleted file mode 100644
index ed83fed287..0000000000
--- a/server_addon/clockify/client/ayon_clockify/ftrack/server/action_clockify_sync_server.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import os
-import json
-
-from ayon_clockify.clockify_api import ClockifyAPI
-
-from ayon_ftrack.lib import ServerAction
-
-
-class SyncClockifyServer(ServerAction):
- '''Synchronise project names and task types.'''
-
- identifier = "clockify.sync.server"
- label = "Sync To Clockify (server)"
- description = "Synchronise data to Clockify workspace"
-
- role_list = ["Administrator", "project Manager"]
-
- def __init__(self, *args, **kwargs):
- super(SyncClockifyServer, self).__init__(*args, **kwargs)
-
- workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
- api_key = os.environ.get("CLOCKIFY_API_KEY")
- self.clockify_api = ClockifyAPI(api_key)
- self.clockify_api.set_workspace(workspace_name)
- if api_key is None:
- modified_key = "None"
- else:
- str_len = int(len(api_key) / 2)
- start_replace = int(len(api_key) / 4)
- modified_key = ""
- for idx in range(len(api_key)):
- if idx >= start_replace and idx < start_replace + str_len:
- replacement = "X"
- else:
- replacement = api_key[idx]
- modified_key += replacement
-
- self.log.info(
- "Clockify info. Workspace: \"{}\" API key: \"{}\"".format(
- str(workspace_name), str(modified_key)
- )
- )
-
- def discover(self, session, entities, event):
- if (
- len(entities) != 1
- or entities[0].entity_type.lower() != "project"
- ):
- return False
- return True
-
- def launch(self, session, entities, event):
- self.clockify_api.set_api()
- if self.clockify_api.workspace_id is None:
- return {
- "success": False,
- "message": "Clockify Workspace or API key are not set!"
- }
-
- if not self.clockify_api.validate_workspace_permissions(
- self.clockify_api.workspace_id, self.clockify_api.user_id
- ):
- return {
- "success": False,
- "message": "Missing permissions for this action!"
- }
-
- # JOB SETTINGS
- user_id = event["source"]["user"]["id"]
- user = session.query("User where id is " + user_id).one()
-
- job = session.create("Job", {
- "user": user,
- "status": "running",
- "data": json.dumps({"description": "Sync Ftrack to Clockify"})
- })
- session.commit()
-
- project_entity = entities[0]
- if project_entity.entity_type.lower() != "project":
- project_entity = self.get_project_from_entity(project_entity)
-
- project_name = project_entity["full_name"]
- self.log.info(
- "Synchronization of project \"{}\" to clockify begins.".format(
- project_name
- )
- )
- task_types = (
- project_entity["project_schema"]["_task_type_schema"]["types"]
- )
- task_type_names = [
- task_type["name"] for task_type in task_types
- ]
- try:
- clockify_projects = self.clockify_api.get_projects()
- if project_name not in clockify_projects:
- response = self.clockify_api.add_project(project_name)
- if "id" not in response:
- self.log.warning(
- "Project \"{}\" can't be created. Response: {}".format(
- project_name, response
- )
- )
- return {
- "success": False,
- "message": (
- "Can't create clockify project \"{}\"."
- " Unexpected error."
- ).format(project_name)
- }
-
- clockify_workspace_tags = self.clockify_api.get_tags()
- for task_type_name in task_type_names:
- if task_type_name in clockify_workspace_tags:
- self.log.debug(
- "Task \"{}\" already exist".format(task_type_name)
- )
- continue
-
- response = self.clockify_api.add_tag(task_type_name)
- if "id" not in response:
- self.log.warning(
- "Task \"{}\" can't be created. Response: {}".format(
- task_type_name, response
- )
- )
-
- job["status"] = "done"
-
- except Exception:
- self.log.warning(
- "Synchronization to clockify failed.",
- exc_info=True
- )
-
- finally:
- if job["status"] != "done":
- job["status"] = "failed"
- session.commit()
-
- return True
-
-
-def register(session, **kw):
- SyncClockifyServer(session).register()
diff --git a/server_addon/clockify/client/ayon_clockify/ftrack/user/action_clockify_sync_local.py b/server_addon/clockify/client/ayon_clockify/ftrack/user/action_clockify_sync_local.py
deleted file mode 100644
index 05a94e56fd..0000000000
--- a/server_addon/clockify/client/ayon_clockify/ftrack/user/action_clockify_sync_local.py
+++ /dev/null
@@ -1,123 +0,0 @@
-import json
-from ayon_clockify.clockify_api import ClockifyAPI
-from ayon_ftrack.lib import BaseAction, statics_icon
-
-
-class SyncClockifyLocal(BaseAction):
- """Synchronise project names and task types."""
-
- identifier = "clockify.sync.local"
- label = "Sync To Clockify"
- description = "Synchronise data to Clockify workspace"
- role_list = ["Administrator", "project Manager"]
- icon = statics_icon("app_icons", "clockify-white.png")
-
- def __init__(self, *args, **kwargs):
- super(SyncClockifyLocal, self).__init__(*args, **kwargs)
-
- self.clockify_api = ClockifyAPI()
-
- def discover(self, session, entities, event):
- if (
- len(entities) == 1
- and entities[0].entity_type.lower() == "project"
- ):
- return True
- return False
-
- def launch(self, session, entities, event):
- self.clockify_api.set_api()
- if self.clockify_api.workspace_id is None:
- return {
- "success": False,
- "message": "Clockify Workspace or API key are not set!"
- }
-
- if (
- self.clockify_api.validate_workspace_permissions(
- self.clockify_api.workspace_id, self.clockify_api.user_id)
- is False
- ):
- return {
- "success": False,
- "message": "Missing permissions for this action!"
- }
-
- # JOB SETTINGS
- userId = event['source']['user']['id']
- user = session.query('User where id is ' + userId).one()
-
- job = session.create('Job', {
- 'user': user,
- 'status': 'running',
- 'data': json.dumps({
- 'description': 'Sync ftrack to Clockify'
- })
- })
- session.commit()
-
- project_entity = entities[0]
- if project_entity.entity_type.lower() != "project":
- project_entity = self.get_project_from_entity(project_entity)
-
- project_name = project_entity["full_name"]
- self.log.info(
- "Synchronization of project \"{}\" to clockify begins.".format(
- project_name
- )
- )
- task_types = (
- project_entity["project_schema"]["_task_type_schema"]["types"]
- )
- task_type_names = [
- task_type["name"] for task_type in task_types
- ]
- try:
- clockify_projects = self.clockify_api.get_projects()
- if project_name not in clockify_projects:
- response = self.clockify_api.add_project(project_name)
- if "id" not in response:
- self.log.warning(
- "Project \"{}\" can't be created. Response: {}".format(
- project_name, response
- )
- )
- return {
- "success": False,
- "message": (
- "Can't create clockify project \"{}\"."
- " Unexpected error."
- ).format(project_name)
- }
-
- clockify_workspace_tags = self.clockify_api.get_tags()
- for task_type_name in task_type_names:
- if task_type_name in clockify_workspace_tags:
- self.log.debug(
- "Task \"{}\" already exist".format(task_type_name)
- )
- continue
-
- response = self.clockify_api.add_tag(task_type_name)
- if "id" not in response:
- self.log.warning(
- "Task \"{}\" can't be created. Response: {}".format(
- task_type_name, response
- )
- )
-
- job["status"] = "done"
-
- except Exception:
- pass
-
- finally:
- if job["status"] != "done":
- job["status"] = "failed"
- session.commit()
-
- return True
-
-
-def register(session, **kw):
- SyncClockifyLocal(session).register()
diff --git a/server_addon/clockify/client/ayon_clockify/launcher_actions/ClockifyStart.py b/server_addon/clockify/client/ayon_clockify/launcher_actions/ClockifyStart.py
deleted file mode 100644
index d69d0371c0..0000000000
--- a/server_addon/clockify/client/ayon_clockify/launcher_actions/ClockifyStart.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import ayon_api
-
-from ayon_clockify.clockify_api import ClockifyAPI
-
-from ayon_core.pipeline import LauncherAction
-
-
-class ClockifyStart(LauncherAction):
- name = "clockify_start_timer"
- label = "Clockify - Start Timer"
- icon = "app_icons/clockify.png"
- order = 500
- clockify_api = ClockifyAPI()
-
- def is_compatible(self, selection):
- """Return whether the action is compatible with the session"""
- return selection.is_task_selected
-
- def process(self, selection, **kwargs):
- self.clockify_api.set_api()
- user_id = self.clockify_api.user_id
- workspace_id = self.clockify_api.workspace_id
- project_name = selection.project_name
- folder_path = selection.folder_path
- task_name = selection.task_name
- description = "/".join([folder_path.lstrip("/"), task_name])
-
- # fetch folder entity
- folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
- task_entity = ayon_api.get_task_by_name(
- project_name, folder_entity["id"], task_name
- )
-
- # get task type to fill the timer tag
- task_type = task_entity["taskType"]
-
- project_id = self.clockify_api.get_project_id(
- project_name, workspace_id
- )
- tag_ids = []
- tag_name = task_type
- tag_ids.append(self.clockify_api.get_tag_id(tag_name, workspace_id))
- self.clockify_api.start_time_entry(
- description,
- project_id,
- tag_ids=tag_ids,
- workspace_id=workspace_id,
- user_id=user_id,
- )
diff --git a/server_addon/clockify/client/ayon_clockify/launcher_actions/ClockifySync.py b/server_addon/clockify/client/ayon_clockify/launcher_actions/ClockifySync.py
deleted file mode 100644
index a32f2a8082..0000000000
--- a/server_addon/clockify/client/ayon_clockify/launcher_actions/ClockifySync.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import ayon_api
-
-from ayon_clockify.clockify_api import ClockifyAPI
-from ayon_core.pipeline import LauncherAction
-
-
-class ClockifyPermissionsCheckFailed(Exception):
- """Timer start failed due to user permissions check.
- Message should be self explanatory as traceback won't be shown.
- """
-
- pass
-
-
-class ClockifySync(LauncherAction):
- name = "sync_to_clockify"
- label = "Sync to Clockify"
- icon = "app_icons/clockify-white.png"
- order = 500
- clockify_api = ClockifyAPI()
-
- def is_compatible(self, selection):
- """Check if there's some projects to sync"""
- if selection.is_project_selected:
- return True
-
- try:
- next(ayon_api.get_projects())
- return True
- except StopIteration:
- return False
-
- def process(self, selection, **kwargs):
- self.clockify_api.set_api()
- workspace_id = self.clockify_api.workspace_id
- user_id = self.clockify_api.user_id
- if not self.clockify_api.validate_workspace_permissions(
- workspace_id, user_id
- ):
- raise ClockifyPermissionsCheckFailed(
- "Current CLockify user is missing permissions for this action!"
- )
-
- if selection.is_project_selected:
- projects_to_sync = [selection.project_entity]
- else:
- projects_to_sync = ayon_api.get_projects()
-
- projects_info = {
- project["name"]: {
- task_type["name"]
- for task_type in project["taskTypes"]
- }
- for project in projects_to_sync
- }
-
- clockify_projects = self.clockify_api.get_projects(workspace_id)
- for project_name, task_types in projects_info.items():
- if project_name in clockify_projects:
- continue
-
- response = self.clockify_api.add_project(
- project_name, workspace_id
- )
- if "id" not in response:
- self.log.error(
- "Project {} can't be created".format(project_name)
- )
- continue
-
- clockify_workspace_tags = self.clockify_api.get_tags(workspace_id)
- for task_type in task_types:
- if task_type not in clockify_workspace_tags:
- response = self.clockify_api.add_tag(
- task_type, workspace_id
- )
- if "id" not in response:
- self.log.error(
- "Task {} can't be created".format(task_type)
- )
- continue
diff --git a/server_addon/clockify/client/ayon_clockify/version.py b/server_addon/clockify/client/ayon_clockify/version.py
deleted file mode 100644
index 36bfd79364..0000000000
--- a/server_addon/clockify/client/ayon_clockify/version.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Package declaring AYON addon 'clockify' version."""
-__version__ = "0.2.1"
diff --git a/server_addon/clockify/client/ayon_clockify/widgets.py b/server_addon/clockify/client/ayon_clockify/widgets.py
deleted file mode 100644
index e64b64601d..0000000000
--- a/server_addon/clockify/client/ayon_clockify/widgets.py
+++ /dev/null
@@ -1,207 +0,0 @@
-from qtpy import QtCore, QtGui, QtWidgets
-from ayon_core import resources, style
-
-
-class MessageWidget(QtWidgets.QWidget):
-
- SIZE_W = 300
- SIZE_H = 130
-
- closed = QtCore.Signal()
-
- def __init__(self, messages, title):
- super(MessageWidget, self).__init__()
-
- # Icon
- icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
- self.setWindowIcon(icon)
-
- self.setWindowFlags(
- QtCore.Qt.WindowCloseButtonHint |
- QtCore.Qt.WindowMinimizeButtonHint
- )
-
- # Size setting
- self.resize(self.SIZE_W, self.SIZE_H)
- self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
- self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
-
- # Style
- self.setStyleSheet(style.load_stylesheet())
-
- self.setLayout(self._ui_layout(messages))
- self.setWindowTitle(title)
-
- def _ui_layout(self, messages):
- if not messages:
- messages = ["*Missing messages (This is a bug)*", ]
-
- elif not isinstance(messages, (tuple, list)):
- messages = [messages, ]
-
- main_layout = QtWidgets.QVBoxLayout(self)
-
- labels = []
- for message in messages:
- label = QtWidgets.QLabel(message)
- label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
- label.setTextFormat(QtCore.Qt.RichText)
- label.setWordWrap(True)
-
- labels.append(label)
- main_layout.addWidget(label)
-
- btn_close = QtWidgets.QPushButton("Close")
- btn_close.setToolTip('Close this window')
- btn_close.clicked.connect(self.on_close_clicked)
-
- btn_group = QtWidgets.QHBoxLayout()
- btn_group.addStretch(1)
- btn_group.addWidget(btn_close)
-
- main_layout.addLayout(btn_group)
-
- self.labels = labels
- self.btn_group = btn_group
- self.btn_close = btn_close
- self.main_layout = main_layout
-
- return main_layout
-
- def on_close_clicked(self):
- self.close()
-
- def close(self, *args, **kwargs):
- self.closed.emit()
- super(MessageWidget, self).close(*args, **kwargs)
-
-
-class ClockifySettings(QtWidgets.QWidget):
- SIZE_W = 500
- SIZE_H = 130
-
- loginSignal = QtCore.Signal(object, object, object)
-
- def __init__(self, clockify_api, optional=True):
- super(ClockifySettings, self).__init__()
-
- self.clockify_api = clockify_api
- self.optional = optional
- self.validated = False
-
- # Icon
- icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
- self.setWindowIcon(icon)
-
- self.setWindowTitle("Clockify settings")
- self.setWindowFlags(
- QtCore.Qt.WindowCloseButtonHint |
- QtCore.Qt.WindowMinimizeButtonHint
- )
-
- # Size setting
- self.resize(self.SIZE_W, self.SIZE_H)
- self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
- self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
- self.setStyleSheet(style.load_stylesheet())
-
- self._ui_init()
-
- def _ui_init(self):
- label_api_key = QtWidgets.QLabel("Clockify API key:")
-
- input_api_key = QtWidgets.QLineEdit()
- input_api_key.setFrame(True)
- input_api_key.setPlaceholderText("e.g. XX1XxXX2x3x4xXxx")
-
- error_label = QtWidgets.QLabel("")
- error_label.setTextFormat(QtCore.Qt.RichText)
- error_label.setWordWrap(True)
- error_label.hide()
-
- form_layout = QtWidgets.QFormLayout()
- form_layout.setContentsMargins(10, 15, 10, 5)
- form_layout.addRow(label_api_key, input_api_key)
- form_layout.addRow(error_label)
-
- btn_ok = QtWidgets.QPushButton("Ok")
- btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer')
-
- btn_cancel = QtWidgets.QPushButton("Cancel")
- cancel_tooltip = 'Application won\'t start'
- if self.optional:
- cancel_tooltip = 'Close this window'
- btn_cancel.setToolTip(cancel_tooltip)
-
- btn_group = QtWidgets.QHBoxLayout()
- btn_group.addStretch(1)
- btn_group.addWidget(btn_ok)
- btn_group.addWidget(btn_cancel)
-
- main_layout = QtWidgets.QVBoxLayout(self)
- main_layout.addLayout(form_layout)
- main_layout.addLayout(btn_group)
-
- btn_ok.clicked.connect(self.click_ok)
- btn_cancel.clicked.connect(self._close_widget)
-
- self.label_api_key = label_api_key
- self.input_api_key = input_api_key
- self.error_label = error_label
-
- self.btn_ok = btn_ok
- self.btn_cancel = btn_cancel
-
- def setError(self, msg):
- self.error_label.setText(msg)
- self.error_label.show()
-
- def invalid_input(self, entity):
- entity.setStyleSheet("border: 1px solid red;")
-
- def click_ok(self):
- api_key = self.input_api_key.text().strip()
- if self.optional is True and api_key == '':
- self.clockify_api.save_api_key(None)
- self.clockify_api.set_api(api_key)
- self.validated = False
- self._close_widget()
- return
-
- validation = self.clockify_api.validate_api_key(api_key)
-
- if validation:
- self.clockify_api.save_api_key(api_key)
- self.clockify_api.set_api(api_key)
- self.validated = True
- self._close_widget()
- else:
- self.invalid_input(self.input_api_key)
- self.validated = False
- self.setError(
- "Entered invalid API key"
- )
-
- def showEvent(self, event):
- super(ClockifySettings, self).showEvent(event)
-
- # Make btns same width
- max_width = max(
- self.btn_ok.sizeHint().width(),
- self.btn_cancel.sizeHint().width()
- )
- self.btn_ok.setMinimumWidth(max_width)
- self.btn_cancel.setMinimumWidth(max_width)
-
- def closeEvent(self, event):
- if self.optional is True:
- event.ignore()
- self._close_widget()
- else:
- self.validated = False
-
- def _close_widget(self):
- if self.optional is True:
- self.hide()
- else:
- self.close()
diff --git a/server_addon/clockify/package.py b/server_addon/clockify/package.py
deleted file mode 100644
index 3245e61ca1..0000000000
--- a/server_addon/clockify/package.py
+++ /dev/null
@@ -1,9 +0,0 @@
-name = "clockify"
-title = "Clockify"
-version = "0.2.1"
-client_dir = "ayon_clockify"
-
-ayon_required_addons = {
- "core": ">0.3.2",
-}
-ayon_compatible_addons = {}
diff --git a/server_addon/clockify/server/__init__.py b/server_addon/clockify/server/__init__.py
deleted file mode 100644
index 11bbfed261..0000000000
--- a/server_addon/clockify/server/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from typing import Type
-
-from ayon_server.addons import BaseServerAddon
-
-from .settings import ClockifySettings
-
-
-class ClockifyAddon(BaseServerAddon):
- settings_model: Type[ClockifySettings] = ClockifySettings
diff --git a/server_addon/clockify/server/settings.py b/server_addon/clockify/server/settings.py
deleted file mode 100644
index c01d4c1545..0000000000
--- a/server_addon/clockify/server/settings.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from ayon_server.settings import BaseSettingsModel, SettingsField
-
-
-class ClockifySettings(BaseSettingsModel):
- workspace_name: str = SettingsField(
- "",
- title="Workspace name",
- scope=["studio"]
- )
diff --git a/server_addon/deadline/server/settings/publish_plugins.py b/server_addon/deadline/server/settings/publish_plugins.py
index 85a93d49cd..1cf699db23 100644
--- a/server_addon/deadline/server/settings/publish_plugins.py
+++ b/server_addon/deadline/server/settings/publish_plugins.py
@@ -153,8 +153,8 @@ class FusionSubmitDeadlineModel(BaseSettingsModel):
)
group: str = SettingsField("", title="Group Name")
plugin: str = SettingsField("Fusion",
- enum_resolver=fusion_deadline_plugin_enum,
- title="Deadline Plugin")
+ enum_resolver=fusion_deadline_plugin_enum,
+ title="Deadline Plugin")
class NukeSubmitDeadlineModel(BaseSettingsModel):
@@ -375,11 +375,11 @@ class PublishPluginsModel(BaseSettingsModel):
title="Nuke Submit to deadline")
ProcessSubmittedCacheJobOnFarm: ProcessCacheJobFarmModel = SettingsField(
default_factory=ProcessCacheJobFarmModel,
- title="Process submitted cache Job on farm.",
- section="Publish Jobs")
+ title="Process submitted cache Job on farm",
+ section="Publish Jobs")
ProcessSubmittedJobOnFarm: ProcessSubmittedJobOnFarmModel = SettingsField(
default_factory=ProcessSubmittedJobOnFarmModel,
- title="Process submitted job on farm.")
+ title="Process submitted job on farm")
DEFAULT_DEADLINE_PLUGINS_SETTINGS = {
diff --git a/server_addon/flame/client/ayon_flame/__init__.py b/server_addon/flame/client/ayon_flame/__init__.py
deleted file mode 100644
index d2d89bdb01..0000000000
--- a/server_addon/flame/client/ayon_flame/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from .version import __version__
-from .addon import (
- FLAME_ADDON_ROOT,
- FlameAddon,
-)
-
-
-__all__ = (
- "__version__",
-
- "FLAME_ADDON_ROOT",
- "FlameAddon",
-)
diff --git a/server_addon/flame/client/ayon_flame/addon.py b/server_addon/flame/client/ayon_flame/addon.py
deleted file mode 100644
index 5a96a9332e..0000000000
--- a/server_addon/flame/client/ayon_flame/addon.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-from ayon_core.addon import AYONAddon, IHostAddon
-
-from .version import __version__
-
-FLAME_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
-
-
-class FlameAddon(AYONAddon, IHostAddon):
- name = "flame"
- version = __version__
- host_name = "flame"
-
- def add_implementation_envs(self, env, _app):
- # Add requirements to DL_PYTHON_HOOK_PATH
- env["DL_PYTHON_HOOK_PATH"] = os.path.join(FLAME_ADDON_ROOT, "startup")
- env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
-
- # Set default values if are not already set via settings
- defaults = {
- "LOGLEVEL": "DEBUG"
- }
- for key, value in defaults.items():
- if not env.get(key):
- env[key] = value
-
- def get_launch_hook_paths(self, app):
- if app.host_name != self.host_name:
- return []
- return [
- os.path.join(FLAME_ADDON_ROOT, "hooks")
- ]
-
- def get_workfile_extensions(self):
- return [".otoc"]
diff --git a/server_addon/flame/client/ayon_flame/api/__init__.py b/server_addon/flame/client/ayon_flame/api/__init__.py
deleted file mode 100644
index 8fcf0c92b0..0000000000
--- a/server_addon/flame/client/ayon_flame/api/__init__.py
+++ /dev/null
@@ -1,159 +0,0 @@
-"""
-AYON Autodesk Flame api
-"""
-from .constants import (
- COLOR_MAP,
- MARKER_NAME,
- MARKER_COLOR,
- MARKER_DURATION,
- MARKER_PUBLISH_DEFAULT
-)
-from .lib import (
- CTX,
- FlameAppFramework,
- get_current_project,
- get_current_sequence,
- create_segment_data_marker,
- get_segment_data_marker,
- set_segment_data_marker,
- set_publish_attribute,
- get_publish_attribute,
- get_sequence_segments,
- maintained_segment_selection,
- reset_segment_selection,
- get_segment_attributes,
- get_clips_in_reels,
- get_reformatted_filename,
- get_frame_from_filename,
- get_padding_from_filename,
- maintained_object_duplication,
- maintained_temp_file_path,
- get_clip_segment,
- get_batch_group_from_desktop,
- MediaInfoFile,
- TimeEffectMetadata
-)
-from .utils import (
- setup,
- get_flame_version,
- get_flame_install_root
-)
-from .pipeline import (
- install,
- uninstall,
- ls,
- containerise,
- update_container,
- remove_instance,
- list_instances,
- imprint,
- maintained_selection
-)
-from .menu import (
- FlameMenuProjectConnect,
- FlameMenuTimeline,
- FlameMenuUniversal
-)
-from .plugin import (
- Creator,
- PublishableClip,
- ClipLoader,
- OpenClipSolver
-)
-from .workio import (
- open_file,
- save_file,
- current_file,
- has_unsaved_changes,
- file_extensions,
- work_root
-)
-from .render_utils import (
- export_clip,
- get_preset_path_by_xml_name,
- modify_preset_file
-)
-from .batch_utils import (
- create_batch_group,
- create_batch_group_conent
-)
-
-__all__ = [
- # constants
- "COLOR_MAP",
- "MARKER_NAME",
- "MARKER_COLOR",
- "MARKER_DURATION",
- "MARKER_PUBLISH_DEFAULT",
-
- # lib
- "CTX",
- "FlameAppFramework",
- "get_current_project",
- "get_current_sequence",
- "create_segment_data_marker",
- "get_segment_data_marker",
- "set_segment_data_marker",
- "set_publish_attribute",
- "get_publish_attribute",
- "get_sequence_segments",
- "maintained_segment_selection",
- "reset_segment_selection",
- "get_segment_attributes",
- "get_clips_in_reels",
- "get_reformatted_filename",
- "get_frame_from_filename",
- "get_padding_from_filename",
- "maintained_object_duplication",
- "maintained_temp_file_path",
- "get_clip_segment",
- "get_batch_group_from_desktop",
- "MediaInfoFile",
- "TimeEffectMetadata",
-
- # pipeline
- "install",
- "uninstall",
- "ls",
- "containerise",
- "update_container",
- "reload_pipeline",
- "maintained_selection",
- "remove_instance",
- "list_instances",
- "imprint",
- "maintained_selection",
-
- # utils
- "setup",
- "get_flame_version",
- "get_flame_install_root",
-
- # menu
- "FlameMenuProjectConnect",
- "FlameMenuTimeline",
- "FlameMenuUniversal",
-
- # plugin
- "Creator",
- "PublishableClip",
- "ClipLoader",
- "OpenClipSolver",
-
- # workio
- "open_file",
- "save_file",
- "current_file",
- "has_unsaved_changes",
- "file_extensions",
- "work_root",
-
- # render utils
- "export_clip",
- "get_preset_path_by_xml_name",
- "modify_preset_file",
-
- # batch utils
- "create_batch_group",
- "create_batch_group_conent"
-]
diff --git a/server_addon/flame/client/ayon_flame/api/batch_utils.py b/server_addon/flame/client/ayon_flame/api/batch_utils.py
deleted file mode 100644
index 9d419a4a90..0000000000
--- a/server_addon/flame/client/ayon_flame/api/batch_utils.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import flame
-
-
-def create_batch_group(
- name,
- frame_start,
- frame_duration,
- update_batch_group=None,
- **kwargs
-):
- """Create Batch Group in active project's Desktop
-
- Args:
- name (str): name of batch group to be created
- frame_start (int): start frame of batch
- frame_end (int): end frame of batch
- update_batch_group (PyBatch)[optional]: batch group to update
-
- Return:
- PyBatch: active flame batch group
- """
- # make sure some batch obj is present
- batch_group = update_batch_group or flame.batch
-
- schematic_reels = kwargs.get("shematic_reels") or ['LoadedReel1']
- shelf_reels = kwargs.get("shelf_reels") or ['ShelfReel1']
-
- handle_start = kwargs.get("handleStart") or 0
- handle_end = kwargs.get("handleEnd") or 0
-
- frame_start -= handle_start
- frame_duration += handle_start + handle_end
-
- if not update_batch_group:
- # Create batch group with name, start_frame value, duration value,
- # set of schematic reel names, set of shelf reel names
- batch_group = batch_group.create_batch_group(
- name,
- start_frame=frame_start,
- duration=frame_duration,
- reels=schematic_reels,
- shelf_reels=shelf_reels
- )
- else:
- batch_group.name = name
- batch_group.start_frame = frame_start
- batch_group.duration = frame_duration
-
- # add reels to batch group
- _add_reels_to_batch_group(
- batch_group, schematic_reels, shelf_reels)
-
- # TODO: also update write node if there is any
- # TODO: also update loaders to start from correct frameStart
-
- if kwargs.get("switch_batch_tab"):
- # use this command to switch to the batch tab
- batch_group.go_to()
-
- return batch_group
-
-
-def _add_reels_to_batch_group(batch_group, reels, shelf_reels):
- # update or create defined reels
- # helper variables
- reel_names = [
- r.name.get_value()
- for r in batch_group.reels
- ]
- shelf_reel_names = [
- r.name.get_value()
- for r in batch_group.shelf_reels
- ]
- # add schematic reels
- for _r in reels:
- if _r in reel_names:
- continue
- batch_group.create_reel(_r)
-
- # add shelf reels
- for _sr in shelf_reels:
- if _sr in shelf_reel_names:
- continue
- batch_group.create_shelf_reel(_sr)
-
-
-def create_batch_group_conent(batch_nodes, batch_links, batch_group=None):
- """Creating batch group with links
-
- Args:
- batch_nodes (list of dict): each dict is node definition
- batch_links (list of dict): each dict is link definition
- batch_group (PyBatch, optional): batch group. Defaults to None.
-
- Return:
- dict: all batch nodes {name or id: PyNode}
- """
- # make sure some batch obj is present
- batch_group = batch_group or flame.batch
- all_batch_nodes = {
- b.name.get_value(): b
- for b in batch_group.nodes
- }
- for node in batch_nodes:
- # NOTE: node_props needs to be ideally OrederDict type
- node_id, node_type, node_props = (
- node["id"], node["type"], node["properties"])
-
- # get node name for checking if exists
- node_name = node_props.pop("name", None) or node_id
-
- if all_batch_nodes.get(node_name):
- # update existing batch node
- batch_node = all_batch_nodes[node_name]
- else:
- # create new batch node
- batch_node = batch_group.create_node(node_type)
-
- # set name
- batch_node.name.set_value(node_name)
-
- # set attributes found in node props
- for key, value in node_props.items():
- if not hasattr(batch_node, key):
- continue
- setattr(batch_node, key, value)
-
- # add created node for possible linking
- all_batch_nodes[node_id] = batch_node
-
- # link nodes to each other
- for link in batch_links:
- _from_n, _to_n = link["from_node"], link["to_node"]
-
- # check if all linking nodes are available
- if not all([
- all_batch_nodes.get(_from_n["id"]),
- all_batch_nodes.get(_to_n["id"])
- ]):
- continue
-
- # link nodes in defined link
- batch_group.connect_nodes(
- all_batch_nodes[_from_n["id"]], _from_n["connector"],
- all_batch_nodes[_to_n["id"]], _to_n["connector"]
- )
-
- # sort batch nodes
- batch_group.organize()
-
- return all_batch_nodes
diff --git a/server_addon/flame/client/ayon_flame/api/constants.py b/server_addon/flame/client/ayon_flame/api/constants.py
deleted file mode 100644
index 04191c539d..0000000000
--- a/server_addon/flame/client/ayon_flame/api/constants.py
+++ /dev/null
@@ -1,24 +0,0 @@
-
-"""
-AYON Flame api constances
-"""
-# AYON marker workflow variables
-MARKER_NAME = "OpenPypeData"
-MARKER_DURATION = 0
-MARKER_COLOR = "cyan"
-MARKER_PUBLISH_DEFAULT = False
-
-# AYON color definitions
-COLOR_MAP = {
- "red": (1.0, 0.0, 0.0),
- "orange": (1.0, 0.5, 0.0),
- "yellow": (1.0, 1.0, 0.0),
- "pink": (1.0, 0.5, 1.0),
- "white": (1.0, 1.0, 1.0),
- "green": (0.0, 1.0, 0.0),
- "cyan": (0.0, 1.0, 1.0),
- "blue": (0.0, 0.0, 1.0),
- "purple": (0.5, 0.0, 0.5),
- "magenta": (0.5, 0.0, 1.0),
- "black": (0.0, 0.0, 0.0)
-}
diff --git a/server_addon/flame/client/ayon_flame/api/lib.py b/server_addon/flame/client/ayon_flame/api/lib.py
deleted file mode 100644
index 8bfe6348ea..0000000000
--- a/server_addon/flame/client/ayon_flame/api/lib.py
+++ /dev/null
@@ -1,1272 +0,0 @@
-import sys
-import os
-import re
-import json
-import pickle
-import clique
-import tempfile
-import traceback
-import itertools
-import contextlib
-import xml.etree.cElementTree as cET
-from copy import deepcopy, copy
-from xml.etree import ElementTree as ET
-from pprint import pformat
-
-from ayon_core.lib import Logger, run_subprocess
-
-from .constants import (
- MARKER_COLOR,
- MARKER_DURATION,
- MARKER_NAME,
- COLOR_MAP,
- MARKER_PUBLISH_DEFAULT
-)
-
-log = Logger.get_logger(__name__)
-
-FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
-
-
-class CTX:
- # singleton used for passing data between api modules
- app_framework = None
- flame_apps = []
- selection = None
-
-
-@contextlib.contextmanager
-def io_preferences_file(klass, filepath, write=False):
- try:
- flag = "w" if write else "r"
- yield open(filepath, flag)
-
- except IOError as _error:
- klass.log.info("Unable to work with preferences `{}`: {}".format(
- filepath, _error))
-
-
-class FlameAppFramework(object):
- # flameAppFramework class takes care of preferences
-
- class prefs_dict(dict):
-
- def __init__(self, master, name, **kwargs):
- self.name = name
- self.master = master
- if not self.master.get(self.name):
- self.master[self.name] = {}
- self.master[self.name].__init__()
-
- def __getitem__(self, k):
- return self.master[self.name].__getitem__(k)
-
- def __setitem__(self, k, v):
- return self.master[self.name].__setitem__(k, v)
-
- def __delitem__(self, k):
- return self.master[self.name].__delitem__(k)
-
- def get(self, k, default=None):
- return self.master[self.name].get(k, default)
-
- def setdefault(self, k, default=None):
- return self.master[self.name].setdefault(k, default)
-
- def pop(self, *args, **kwargs):
- return self.master[self.name].pop(*args, **kwargs)
-
- def update(self, mapping=(), **kwargs):
- self.master[self.name].update(mapping, **kwargs)
-
- def __contains__(self, k):
- return self.master[self.name].__contains__(k)
-
- def copy(self): # don"t delegate w/ super - dict.copy() -> dict :(
- return type(self)(self)
-
- def keys(self):
- return self.master[self.name].keys()
-
- @classmethod
- def fromkeys(cls, keys, v=None):
- return cls.master[cls.name].fromkeys(keys, v)
-
- def __repr__(self):
- return "{0}({1})".format(
- type(self).__name__, self.master[self.name].__repr__())
-
- def master_keys(self):
- return self.master.keys()
-
- def __init__(self):
- self.name = self.__class__.__name__
- self.bundle_name = "OpenPypeFlame"
- # self.prefs scope is limited to flame project and user
- self.prefs = {}
- self.prefs_user = {}
- self.prefs_global = {}
- self.log = log
-
- try:
- import flame
- self.flame = flame
- self.flame_project_name = self.flame.project.current_project.name
- self.flame_user_name = flame.users.current_user.name
- except Exception:
- self.flame = None
- self.flame_project_name = None
- self.flame_user_name = None
-
- import socket
- self.hostname = socket.gethostname()
-
- if sys.platform == "darwin":
- self.prefs_folder = os.path.join(
- os.path.expanduser("~"),
- "Library",
- "Caches",
- "OpenPype",
- self.bundle_name
- )
- elif sys.platform.startswith("linux"):
- self.prefs_folder = os.path.join(
- os.path.expanduser("~"),
- ".OpenPype",
- self.bundle_name)
-
- self.prefs_folder = os.path.join(
- self.prefs_folder,
- self.hostname,
- )
-
- self.log.info("[{}] waking up".format(self.__class__.__name__))
-
- try:
- self.load_prefs()
- except RuntimeError:
- self.save_prefs()
-
- # menu auto-refresh defaults
- if not self.prefs_global.get("menu_auto_refresh"):
- self.prefs_global["menu_auto_refresh"] = {
- "media_panel": True,
- "batch": True,
- "main_menu": True,
- "timeline_menu": True
- }
-
- self.apps = []
-
- def get_pref_file_paths(self):
-
- prefix = self.prefs_folder + os.path.sep + self.bundle_name
- prefs_file_path = "_".join([
- prefix, self.flame_user_name,
- self.flame_project_name]) + ".prefs"
- prefs_user_file_path = "_".join([
- prefix, self.flame_user_name]) + ".prefs"
- prefs_global_file_path = prefix + ".prefs"
-
- return (prefs_file_path, prefs_user_file_path, prefs_global_file_path)
-
- def load_prefs(self):
-
- (proj_pref_path, user_pref_path,
- glob_pref_path) = self.get_pref_file_paths()
-
- with io_preferences_file(self, proj_pref_path) as prefs_file:
- self.prefs = pickle.load(prefs_file)
- self.log.info(
- "Project - preferences contents:\n{}".format(
- pformat(self.prefs)
- ))
-
- with io_preferences_file(self, user_pref_path) as prefs_file:
- self.prefs_user = pickle.load(prefs_file)
- self.log.info(
- "User - preferences contents:\n{}".format(
- pformat(self.prefs_user)
- ))
-
- with io_preferences_file(self, glob_pref_path) as prefs_file:
- self.prefs_global = pickle.load(prefs_file)
- self.log.info(
- "Global - preferences contents:\n{}".format(
- pformat(self.prefs_global)
- ))
-
- return True
-
- def save_prefs(self):
- # make sure the preference folder is available
- if not os.path.isdir(self.prefs_folder):
- try:
- os.makedirs(self.prefs_folder)
- except Exception:
- self.log.info("Unable to create folder {}".format(
- self.prefs_folder))
- return False
-
- # get all pref file paths
- (proj_pref_path, user_pref_path,
- glob_pref_path) = self.get_pref_file_paths()
-
- with io_preferences_file(self, proj_pref_path, True) as prefs_file:
- pickle.dump(self.prefs, prefs_file)
- self.log.info(
- "Project - preferences contents:\n{}".format(
- pformat(self.prefs)
- ))
-
- with io_preferences_file(self, user_pref_path, True) as prefs_file:
- pickle.dump(self.prefs_user, prefs_file)
- self.log.info(
- "User - preferences contents:\n{}".format(
- pformat(self.prefs_user)
- ))
-
- with io_preferences_file(self, glob_pref_path, True) as prefs_file:
- pickle.dump(self.prefs_global, prefs_file)
- self.log.info(
- "Global - preferences contents:\n{}".format(
- pformat(self.prefs_global)
- ))
-
- return True
-
-
-def get_current_project():
- import flame
- return flame.project.current_project
-
-
-def get_current_sequence(selection):
- import flame
-
- def segment_to_sequence(_segment):
- track = _segment.parent
- version = track.parent
- return version.parent
-
- process_timeline = None
-
- if len(selection) == 1:
- if isinstance(selection[0], flame.PySequence):
- process_timeline = selection[0]
- if isinstance(selection[0], flame.PySegment):
- process_timeline = segment_to_sequence(selection[0])
- else:
- for segment in selection:
- if isinstance(segment, flame.PySegment):
- process_timeline = segment_to_sequence(segment)
- break
-
- return process_timeline
-
-
-def rescan_hooks():
- import flame
- try:
- flame.execute_shortcut("Rescan Python Hooks")
- except Exception:
- pass
-
-
-def get_metadata(project_name, _log=None):
- # TODO: can be replaced by MediaInfoFile class method
- from adsk.libwiretapPythonClientAPI import (
- WireTapClient,
- WireTapServerHandle,
- WireTapNodeHandle,
- WireTapStr
- )
-
- class GetProjectColorPolicy(object):
- def __init__(self, host_name=None, _log=None):
- # Create a connection to the Backburner manager using the Wiretap
- # python API.
- #
- self.log = _log or log
- self.host_name = host_name or "localhost"
- self._wiretap_client = WireTapClient()
- if not self._wiretap_client.init():
- raise Exception("Could not initialize Wiretap Client")
- self._server = WireTapServerHandle(
- "{}:IFFFS".format(self.host_name))
-
- def process(self, project_name):
- policy_node_handle = WireTapNodeHandle(
- self._server,
- "/projects/{}/syncolor/policy".format(project_name)
- )
- self.log.info(policy_node_handle)
-
- policy = WireTapStr()
- if not policy_node_handle.getNodeTypeStr(policy):
- self.log.warning(
- "Could not retrieve policy of '%s': %s" % (
- policy_node_handle.getNodeId().id(),
- policy_node_handle.lastError()
- )
- )
-
- return policy.c_str()
-
- policy_wiretap = GetProjectColorPolicy(_log=_log)
- return policy_wiretap.process(project_name)
-
-
-def get_segment_data_marker(segment, with_marker=None):
- """
- Get openpype track item tag created by creator or loader plugin.
-
- Attributes:
- segment (flame.PySegment): flame api object
- with_marker (bool)[optional]: if true it will return also marker object
-
- Returns:
- dict: openpype tag data
-
- Returns(with_marker=True):
- flame.PyMarker, dict
- """
- for marker in segment.markers:
- comment = marker.comment.get_value()
- color = marker.colour.get_value()
- name = marker.name.get_value()
-
- if (name == MARKER_NAME) and (
- color == COLOR_MAP[MARKER_COLOR]):
- if not with_marker:
- return json.loads(comment)
- else:
- return marker, json.loads(comment)
-
-
-def set_segment_data_marker(segment, data=None):
- """
- Set openpype track item tag to input segment.
-
- Attributes:
- segment (flame.PySegment): flame api object
-
- Returns:
- dict: json loaded data
- """
- data = data or dict()
-
- marker_data = get_segment_data_marker(segment, True)
-
- if marker_data:
- # get available openpype tag if any
- marker, tag_data = marker_data
- # update tag data with new data
- tag_data.update(data)
- # update marker with tag data
- marker.comment = json.dumps(tag_data)
- else:
- # update tag data with new data
- marker = create_segment_data_marker(segment)
- # add tag data to marker's comment
- marker.comment = json.dumps(data)
-
-
-def set_publish_attribute(segment, value):
- """ Set Publish attribute in input Tag object
-
- Attribute:
- segment (flame.PySegment)): flame api object
- value (bool): True or False
- """
- tag_data = get_segment_data_marker(segment)
- tag_data["publish"] = value
-
- # set data to the publish attribute
- set_segment_data_marker(segment, tag_data)
-
-
-def get_publish_attribute(segment):
- """ Get Publish attribute from input Tag object
-
- Attribute:
- segment (flame.PySegment)): flame api object
-
- Returns:
- bool: True or False
- """
- tag_data = get_segment_data_marker(segment)
-
- if not tag_data:
- set_publish_attribute(segment, MARKER_PUBLISH_DEFAULT)
- return MARKER_PUBLISH_DEFAULT
-
- return tag_data["publish"]
-
-
-def create_segment_data_marker(segment):
- """ Create openpype marker on a segment.
-
- Attributes:
- segment (flame.PySegment): flame api object
-
- Returns:
- flame.PyMarker: flame api object
- """
- # get duration of segment
- duration = segment.record_duration.relative_frame
- # calculate start frame of the new marker
- start_frame = int(segment.record_in.relative_frame) + int(duration / 2)
- # create marker
- marker = segment.create_marker(start_frame)
- # set marker name
- marker.name = MARKER_NAME
- # set duration
- marker.duration = MARKER_DURATION
- # set colour
- marker.colour = COLOR_MAP[MARKER_COLOR] # Red
-
- return marker
-
-
-def get_sequence_segments(sequence, selected=False):
- segments = []
- # loop versions in sequence
- for ver in sequence.versions:
- # loop track in versions
- for track in ver.tracks:
- # ignore all empty tracks and hidden too
- if len(track.segments) == 0 and track.hidden:
- continue
- # loop all segment in remaining tracks
- for segment in track.segments:
- if segment.name.get_value() == "":
- continue
- if segment.hidden.get_value() is True:
- continue
- if (
- selected is True
- and segment.selected.get_value() is not True
- ):
- continue
- # add it to original selection
- segments.append(segment)
- return segments
-
-
-@contextlib.contextmanager
-def maintained_segment_selection(sequence):
- """Maintain selection during context
-
- Attributes:
- sequence (flame.PySequence): python api object
-
- Yield:
- list of flame.PySegment
-
- Example:
- >>> with maintained_segment_selection(sequence) as selected_segments:
- ... for segment in selected_segments:
- ... segment.selected = False
- >>> print(segment.selected)
- True
- """
- selected_segments = get_sequence_segments(sequence, True)
- try:
- # do the operation on selected segments
- yield selected_segments
- finally:
- # reset all selected clips
- reset_segment_selection(sequence)
- # select only original selection of segments
- for segment in selected_segments:
- segment.selected = True
-
-
-def reset_segment_selection(sequence):
- """Deselect all selected nodes
- """
- for ver in sequence.versions:
- for track in ver.tracks:
- if len(track.segments) == 0 and track.hidden:
- continue
- for segment in track.segments:
- segment.selected = False
-
-
-def _get_shot_tokens_values(clip, tokens):
- old_value = None
- output = {}
-
- if not clip.shot_name:
- return output
-
- old_value = clip.shot_name.get_value()
-
- for token in tokens:
- clip.shot_name.set_value(token)
- _key = str(re.sub("[<>]", "", token)).replace(" ", "_")
-
- try:
- output[_key] = int(clip.shot_name.get_value())
- except ValueError:
- output[_key] = clip.shot_name.get_value()
-
- clip.shot_name.set_value(old_value)
-
- return output
-
-
-def get_segment_attributes(segment):
- if segment.name.get_value() == "":
- return None
-
- # Add timeline segment to tree
- clip_data = {
- "shot_name": segment.shot_name.get_value(),
- "segment_name": segment.name.get_value(),
- "segment_comment": segment.comment.get_value(),
- "tape_name": segment.tape_name,
- "source_name": segment.source_name,
- "fpath": segment.file_path,
- "PySegment": segment
- }
-
- # head and tail with forward compatibility
- if segment.head:
- # `infinite` can be also returned
- if isinstance(segment.head, str):
- clip_data["segment_head"] = 0
- else:
- clip_data["segment_head"] = int(segment.head)
- if segment.tail:
- # `infinite` can be also returned
- if isinstance(segment.tail, str):
- clip_data["segment_tail"] = 0
- else:
- clip_data["segment_tail"] = int(segment.tail)
-
- # add all available shot tokens
- shot_tokens = _get_shot_tokens_values(segment, [
- "", "", "", "", "",
- "