From b6c25f987c8aa591d1b79d18705e7da11ab49723 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 20 Jul 2021 16:02:23 +0200 Subject: [PATCH 01/77] separated collection from initialization of modules --- openpype/modules/base.py | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c7efbd5ab30..91fdd497241 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -352,21 +352,16 @@ def __init__(self, _system_settings=None): # For report of time consumption self._report = {} + self._raw_modules = None + self.initialize_modules() self.connect_modules() - def initialize_modules(self): - """Import and initialize modules.""" - self.log.debug("*** Pype modules initialization.") - # Prepare settings for modules - system_settings = getattr(self, "_system_settings", None) - if system_settings is None: - system_settings = get_system_settings() - modules_settings = system_settings["modules"] + def collect_modules(self): + if self._raw_modules is not None: + return - report = {} - time_start = time.time() - prev_start_time = time_start + self._raw_modules = [] # Go through globals in `pype.modules` for name in dir(openpype.modules): @@ -394,7 +389,27 @@ def initialize_modules(self): ).format(name, ", ".join(not_implemented))) continue + self._raw_modules.append(modules_item) + + def initialize_modules(self): + """Import and initialize modules.""" + self.collect_modules() + + self.log.debug("*** Pype modules initialization.") + # Prepare settings for modules + system_settings = getattr(self, "_system_settings", None) + if system_settings is None: + system_settings = get_system_settings() + modules_settings = system_settings["modules"] + + report = {} + time_start = time.time() + prev_start_time = time_start + + # Go through globals in `pype.modules` + for modules_item in self._raw_modules: try: + name = modules_item.__name__ # Try initialize module module = modules_item(self, modules_settings) # Store initialized object @@ -711,6 +726,9 @@ def __init__(self): self.modules_by_id = {} self.modules_by_name = {} self._report = {} + + self._raw_modules = None + self.tray_manager = None self.doubleclick_callbacks = {} From f622e32fcdf20477c68ab783ee5ecc7d376a17dd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 17:50:25 +0200 Subject: [PATCH 02/77] added base class of OpenPypeAddOn --- openpype/modules/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 91fdd497241..87d6c4cbbcd 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -86,6 +86,10 @@ def get_plugin_paths(self): pass +class OpenPypeAddOn(PypeModule): + pass + + @six.add_metaclass(ABCMeta) class ILaunchHookPaths: """Module has launch hook paths to return. From e6e7ee6867f401b0851f371e3ddd78e6e1fe3853 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 17:51:17 +0200 Subject: [PATCH 03/77] use callback directly --- openpype/modules/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 87d6c4cbbcd..dd144075e1e 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -149,12 +149,12 @@ def execute_in_main_thread(self, callback): Some callbacks need to be processed on main thread (menu actions must be added on main thread or they won't get triggered etc.) """ - # called without initialized tray, still main thread needed if not self.tray_initialized: + # TODO Called without initialized tray, still main thread needed try: - callback = self._main_thread_callbacks.popleft() callback() - except: + + except Exception: self.log.warning( "Failed to execute {} in main thread".format(callback), exc_info=True) From 26e8f9250636185730294580d0b7d5125929bfc4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 17:51:46 +0200 Subject: [PATCH 04/77] idea of modules and addons import --- openpype/modules/modules_import.py | 84 ++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 openpype/modules/modules_import.py diff --git a/openpype/modules/modules_import.py b/openpype/modules/modules_import.py new file mode 100644 index 00000000000..24441c6d20c --- /dev/null +++ b/openpype/modules/modules_import.py @@ -0,0 +1,84 @@ +import sys +import six + + +class __ModuleClass: + __attributes__ = {} + __defaults__ = set() + + def __getattr__(self, attr_name): + return self.__attributes__.get( + attr_name, + type("Missing.{}".format(attr_name), (), {}) + ) + + def __setattr__(self, attr_name, value): + self.__attributes__[attr_name] = value + + def keys(self): + return self.__attributes__.keys() + + def values(self): + return self.__attributes__.values() + + def items(self): + return self.__attributes__.items() + + +def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import imp + + dst_module = sys.modules[dst_module_name] + + fp, pathname, description = imp.find_module(module_name, [dirpath]) + module = imp.load_module(full_module_name, fp, pathname, description) + setattr(dst_module, module_name, module) + + return module + + +def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import importlib.util + from importlib._bootstrap_external import PathFinder + + dst_module = sys.modules[dst_module_name] + loader = PathFinder.find_module(full_module_name, [dirpath]) + + spec = importlib.util.spec_from_loader( + full_module_name, loader, origin=dirpath + ) + + module = importlib.util.module_from_spec(spec) + + if dst_module is not None: + setattr(dst_module, module_name, module) + + sys.modules[full_module_name] = module + + loader.exec_module(module) + + return module + + +def load_module_from_dirpath(dirpath, folder_name, dst_module_name): + if six.PY3: + module = _load_module_from_dirpath_py3( + dirpath, folder_name, dst_module_name + ) + else: + module = _load_module_from_dirpath_py2( + dirpath, folder_name, dst_module_name + ) + return module + + +sys.modules["openpype_modules"] = __ModuleClass() +sys.modules["openpype_interfaces"] = __ModuleClass() From 5ba787c274f43b6d91aac205ae97b8310224b88c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 18:08:18 +0200 Subject: [PATCH 05/77] defined OpenPypeInterface --- openpype/modules/__init__.py | 3 +++ openpype/modules/base.py | 10 ++++++++++ 2 files changed, 13 insertions(+) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index d6fb9c0aef9..3ac11950efe 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from .base import ( PypeModule, + OpenPypeInterface, ITrayModule, ITrayAction, ITrayService, @@ -44,6 +45,8 @@ __all__ = ( "PypeModule", + "OpenPypeInterface", + "ITrayModule", "ITrayAction", "ITrayService", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index dd144075e1e..373e9c9422a 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -14,6 +14,16 @@ from openpype import resources +@six.add_metaclass(ABCMeta) +class OpenPypeInterface: + """Base class of Interface that can be used as Mixin with abstract parts. + + This is way how OpenPype module or addon can tell that has implementation + for specific part or for other module/addon. + """ + pass + + @six.add_metaclass(ABCMeta) class PypeModule: """Base class of pype module. From f2b53133e0cafb223d35cc53866fcfde4c4815a0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 18:11:26 +0200 Subject: [PATCH 06/77] moved interfaces to interfaces --- openpype/modules/base.py | 261 -------------------------------- openpype/modules/interfaces.py | 267 +++++++++++++++++++++++++++++++++ 2 files changed, 267 insertions(+), 261 deletions(-) create mode 100644 openpype/modules/interfaces.py diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 373e9c9422a..6e1d19589cd 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -80,271 +80,10 @@ def get_global_environments(self): return {} -@six.add_metaclass(ABCMeta) -class IPluginPaths: - """Module has plugin paths to return. - - Expected result is dictionary with keys "publish", "create", "load" or - "actions" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - # TODO validation of an output - @abstractmethod - def get_plugin_paths(self): - pass - - class OpenPypeAddOn(PypeModule): pass -@six.add_metaclass(ABCMeta) -class ILaunchHookPaths: - """Module has launch hook paths to return. - - Expected result is list of paths. - ["path/to/launch_hooks_dir"] - """ - - @abstractmethod - def get_launch_hook_paths(self): - pass - - -@six.add_metaclass(ABCMeta) -class ITrayModule: - """Module has special procedures when used in Pype Tray. - - IMPORTANT: - The module still must be usable if is not used in tray even if - would do nothing. - """ - tray_initialized = False - _tray_manager = None - - @abstractmethod - def tray_init(self): - """Initialization part of tray implementation. - - Triggered between `initialization` and `connect_with_modules`. - - This is where GUIs should be loaded or tray specific parts should be - prepared. - """ - pass - - @abstractmethod - def tray_menu(self, tray_menu): - """Add module's action to tray menu.""" - pass - - @abstractmethod - def tray_start(self): - """Start procedure in Pype tray.""" - pass - - @abstractmethod - def tray_exit(self): - """Cleanup method which is executed on tray shutdown. - - This is place where all threads should be shut. - """ - pass - - def execute_in_main_thread(self, callback): - """ Pushes callback to the queue or process 'callback' on a main thread - - Some callbacks need to be processed on main thread (menu actions - must be added on main thread or they won't get triggered etc.) - """ - if not self.tray_initialized: - # TODO Called without initialized tray, still main thread needed - try: - callback() - - except Exception: - self.log.warning( - "Failed to execute {} in main thread".format(callback), - exc_info=True) - - return - self.manager.tray_manager.execute_in_main_thread(callback) - - def show_tray_message(self, title, message, icon=None, msecs=None): - """Show tray message. - - Args: - title (str): Title of message. - message (str): Content of message. - icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is - Information icon, may differ by Qt version. - msecs (int): Duration of message visibility in miliseconds. - Default is 10000 msecs, may differ by Qt version. - """ - if self._tray_manager: - self._tray_manager.show_tray_message(title, message, icon, msecs) - - def add_doubleclick_callback(self, callback): - if hasattr(self.manager, "add_doubleclick_callback"): - self.manager.add_doubleclick_callback(self, callback) - - -class ITrayAction(ITrayModule): - """Implementation of Tray action. - - Add action to tray menu which will trigger `on_action_trigger`. - It is expected to be used for showing tools. - - Methods `tray_start`, `tray_exit` and `connect_with_modules` are overriden - as it's not expected that action will use them. But it is possible if - necessary. - """ - - admin_action = False - _admin_submenu = None - - @property - @abstractmethod - def label(self): - """Service label showed in menu.""" - pass - - @abstractmethod - def on_action_trigger(self): - """What happens on actions click.""" - pass - - def tray_menu(self, tray_menu): - from Qt import QtWidgets - - if self.admin_action: - menu = self.admin_submenu(tray_menu) - action = QtWidgets.QAction(self.label, menu) - menu.addAction(action) - if not menu.menuAction().isVisible(): - menu.menuAction().setVisible(True) - - else: - action = QtWidgets.QAction(self.label, tray_menu) - tray_menu.addAction(action) - - action.triggered.connect(self.on_action_trigger) - - def tray_start(self): - return - - def tray_exit(self): - return - - @staticmethod - def admin_submenu(tray_menu): - if ITrayAction._admin_submenu is None: - from Qt import QtWidgets - - admin_submenu = QtWidgets.QMenu("Admin", tray_menu) - admin_submenu.menuAction().setVisible(False) - ITrayAction._admin_submenu = admin_submenu - return ITrayAction._admin_submenu - - -class ITrayService(ITrayModule): - # Module's property - menu_action = None - - # Class properties - _services_submenu = None - _icon_failed = None - _icon_running = None - _icon_idle = None - - @property - @abstractmethod - def label(self): - """Service label showed in menu.""" - pass - - # TODO be able to get any sort of information to show/print - # @abstractmethod - # def get_service_info(self): - # pass - - @staticmethod - def services_submenu(tray_menu): - if ITrayService._services_submenu is None: - from Qt import QtWidgets - - services_submenu = QtWidgets.QMenu("Services", tray_menu) - services_submenu.menuAction().setVisible(False) - ITrayService._services_submenu = services_submenu - return ITrayService._services_submenu - - @staticmethod - def add_service_action(action): - ITrayService._services_submenu.addAction(action) - if not ITrayService._services_submenu.menuAction().isVisible(): - ITrayService._services_submenu.menuAction().setVisible(True) - - @staticmethod - def _load_service_icons(): - from Qt import QtGui - ITrayService._failed_icon = QtGui.QIcon( - resources.get_resource("icons", "circle_red.png") - ) - ITrayService._icon_running = QtGui.QIcon( - resources.get_resource("icons", "circle_green.png") - ) - ITrayService._icon_idle = QtGui.QIcon( - resources.get_resource("icons", "circle_orange.png") - ) - - @staticmethod - def get_icon_running(): - if ITrayService._icon_running is None: - ITrayService._load_service_icons() - return ITrayService._icon_running - - @staticmethod - def get_icon_idle(): - if ITrayService._icon_idle is None: - ITrayService._load_service_icons() - return ITrayService._icon_idle - - @staticmethod - def get_icon_failed(): - if ITrayService._failed_icon is None: - ITrayService._load_service_icons() - return ITrayService._failed_icon - - def tray_menu(self, tray_menu): - from Qt import QtWidgets - action = QtWidgets.QAction( - self.label, - self.services_submenu(tray_menu) - ) - self.menu_action = action - - self.add_service_action(action) - - self.set_service_running_icon() - - def set_service_running_icon(self): - """Change icon of an QAction to green circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_running()) - - def set_service_failed_icon(self): - """Change icon of an QAction to red circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_failed()) - - def set_service_idle_icon(self): - """Change icon of an QAction to orange circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_idle()) - - class ModulesManager: """Manager of Pype modules helps to load and prepare them to work. diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py new file mode 100644 index 00000000000..6d51f1b828e --- /dev/null +++ b/openpype/modules/interfaces.py @@ -0,0 +1,267 @@ +from abc import abstractmethod + +from openpype import resources + +from .base import ( + OpenPypeInterface +) + + +class IPluginPaths(OpenPypeInterface): + """Module has plugin paths to return. + + Expected result is dictionary with keys "publish", "create", "load" or + "actions" and values as list or string. + { + "publish": ["path/to/publish_plugins"] + } + """ + # TODO validation of an output + @abstractmethod + def get_plugin_paths(self): + pass + + +class ILaunchHookPaths(OpenPypeInterface): + """Module has launch hook paths to return. + + Expected result is list of paths. + ["path/to/launch_hooks_dir"] + """ + + @abstractmethod + def get_launch_hook_paths(self): + pass + + +class ITrayModule(OpenPypeInterface): + """Module has special procedures when used in Pype Tray. + + IMPORTANT: + The module still must be usable if is not used in tray even if + would do nothing. + """ + tray_initialized = False + _tray_manager = None + + @abstractmethod + def tray_init(self): + """Initialization part of tray implementation. + + Triggered between `initialization` and `connect_with_modules`. + + This is where GUIs should be loaded or tray specific parts should be + prepared. + """ + pass + + @abstractmethod + def tray_menu(self, tray_menu): + """Add module's action to tray menu.""" + pass + + @abstractmethod + def tray_start(self): + """Start procedure in Pype tray.""" + pass + + @abstractmethod + def tray_exit(self): + """Cleanup method which is executed on tray shutdown. + + This is place where all threads should be shut. + """ + pass + + def execute_in_main_thread(self, callback): + """ Pushes callback to the queue or process 'callback' on a main thread + + Some callbacks need to be processed on main thread (menu actions + must be added on main thread or they won't get triggered etc.) + """ + if not self.tray_initialized: + # TODO Called without initialized tray, still main thread needed + try: + callback() + + except Exception: + self.log.warning( + "Failed to execute {} in main thread".format(callback), + exc_info=True) + + return + self.manager.tray_manager.execute_in_main_thread(callback) + + def show_tray_message(self, title, message, icon=None, msecs=None): + """Show tray message. + + Args: + title (str): Title of message. + message (str): Content of message. + icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is + Information icon, may differ by Qt version. + msecs (int): Duration of message visibility in miliseconds. + Default is 10000 msecs, may differ by Qt version. + """ + if self._tray_manager: + self._tray_manager.show_tray_message(title, message, icon, msecs) + + def add_doubleclick_callback(self, callback): + if hasattr(self.manager, "add_doubleclick_callback"): + self.manager.add_doubleclick_callback(self, callback) + + +class ITrayAction(ITrayModule): + """Implementation of Tray action. + + Add action to tray menu which will trigger `on_action_trigger`. + It is expected to be used for showing tools. + + Methods `tray_start`, `tray_exit` and `connect_with_modules` are overriden + as it's not expected that action will use them. But it is possible if + necessary. + """ + + admin_action = False + _admin_submenu = None + + @property + @abstractmethod + def label(self): + """Service label showed in menu.""" + pass + + @abstractmethod + def on_action_trigger(self): + """What happens on actions click.""" + pass + + def tray_menu(self, tray_menu): + from Qt import QtWidgets + + if self.admin_action: + menu = self.admin_submenu(tray_menu) + action = QtWidgets.QAction(self.label, menu) + menu.addAction(action) + if not menu.menuAction().isVisible(): + menu.menuAction().setVisible(True) + + else: + action = QtWidgets.QAction(self.label, tray_menu) + tray_menu.addAction(action) + + action.triggered.connect(self.on_action_trigger) + + def tray_start(self): + return + + def tray_exit(self): + return + + @staticmethod + def admin_submenu(tray_menu): + if ITrayAction._admin_submenu is None: + from Qt import QtWidgets + + admin_submenu = QtWidgets.QMenu("Admin", tray_menu) + admin_submenu.menuAction().setVisible(False) + ITrayAction._admin_submenu = admin_submenu + return ITrayAction._admin_submenu + + +class ITrayService(ITrayModule): + # Module's property + menu_action = None + + # Class properties + _services_submenu = None + _icon_failed = None + _icon_running = None + _icon_idle = None + + @property + @abstractmethod + def label(self): + """Service label showed in menu.""" + pass + + # TODO be able to get any sort of information to show/print + # @abstractmethod + # def get_service_info(self): + # pass + + @staticmethod + def services_submenu(tray_menu): + if ITrayService._services_submenu is None: + from Qt import QtWidgets + + services_submenu = QtWidgets.QMenu("Services", tray_menu) + services_submenu.menuAction().setVisible(False) + ITrayService._services_submenu = services_submenu + return ITrayService._services_submenu + + @staticmethod + def add_service_action(action): + ITrayService._services_submenu.addAction(action) + if not ITrayService._services_submenu.menuAction().isVisible(): + ITrayService._services_submenu.menuAction().setVisible(True) + + @staticmethod + def _load_service_icons(): + from Qt import QtGui + + ITrayService._failed_icon = QtGui.QIcon( + resources.get_resource("icons", "circle_red.png") + ) + ITrayService._icon_running = QtGui.QIcon( + resources.get_resource("icons", "circle_green.png") + ) + ITrayService._icon_idle = QtGui.QIcon( + resources.get_resource("icons", "circle_orange.png") + ) + + @staticmethod + def get_icon_running(): + if ITrayService._icon_running is None: + ITrayService._load_service_icons() + return ITrayService._icon_running + + @staticmethod + def get_icon_idle(): + if ITrayService._icon_idle is None: + ITrayService._load_service_icons() + return ITrayService._icon_idle + + @staticmethod + def get_icon_failed(): + if ITrayService._failed_icon is None: + ITrayService._load_service_icons() + return ITrayService._failed_icon + + def tray_menu(self, tray_menu): + from Qt import QtWidgets + + action = QtWidgets.QAction( + self.label, + self.services_submenu(tray_menu) + ) + self.menu_action = action + + self.add_service_action(action) + + self.set_service_running_icon() + + def set_service_running_icon(self): + """Change icon of an QAction to green circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_running()) + + def set_service_failed_icon(self): + """Change icon of an QAction to red circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_failed()) + + def set_service_idle_icon(self): + """Change icon of an QAction to orange circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_idle()) From abdaf019ba230709099b05406043c57572e2d10e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:01:08 +0200 Subject: [PATCH 07/77] interfaces inherit from OpenPypeInterface --- openpype/modules/__init__.py | 13 ++++++++----- openpype/modules/ftrack/ftrack_module.py | 4 ++-- openpype/modules/idle_manager/idle_module.py | 9 ++++++--- openpype/modules/settings_action.py | 9 ++++++--- openpype/modules/timers_manager/timers_manager.py | 11 ++++++++--- openpype/modules/webserver/webserver_module.py | 9 ++++++--- 6 files changed, 36 insertions(+), 19 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 3ac11950efe..724f442b74d 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,13 +2,15 @@ from .base import ( PypeModule, OpenPypeInterface, + ModulesManager, + TrayModulesManager +) +from .interfaces import ( ITrayModule, ITrayAction, ITrayService, IPluginPaths, - ILaunchHookPaths, - ModulesManager, - TrayModulesManager + ILaunchHookPaths ) from .settings_action import ( SettingsAction, @@ -47,13 +49,14 @@ "PypeModule", "OpenPypeInterface", + "ModulesManager", + "TrayModulesManager", + "ITrayModule", "ITrayAction", "ITrayService", "IPluginPaths", "ILaunchHookPaths", - "ModulesManager", - "TrayModulesManager", "SettingsAction", "LocalSettingsAction", diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index ee139a500e1..70f34b6389d 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -6,6 +6,7 @@ import openpype from openpype.modules import ( PypeModule, + OpenPypeInterface, ITrayModule, IPluginPaths, ITimersManager, @@ -17,8 +18,7 @@ FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -@six.add_metaclass(ABCMeta) -class IFtrackEventHandlerPaths: +class IFtrackEventHandlerPaths(OpenPypeInterface): """Other modules interface to return paths to ftrack event handlers. Expected output is dictionary with "server" and "user" keys. diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index 5dd5160aa7c..57ccc9cce78 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -4,11 +4,14 @@ import six -from openpype.modules import PypeModule, ITrayService +from openpype.modules import ( + PypeModule, + OpenPypeInterface, + ITrayService +) -@six.add_metaclass(ABCMeta) -class IIdleManager: +class IIdleManager(OpenPypeInterface): """Other modules interface to return callbacks by idle time in seconds. Expected output is dictionary with seconds as keys and callback/s diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 9db4a252bc6..f6d6463b25c 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -2,11 +2,14 @@ import six -from . import PypeModule, ITrayAction +from . import ( + PypeModule, + OpenPypeInterface, + ITrayAction +) -@six.add_metaclass(ABCMeta) -class ISettingsChangeListener: +class ISettingsChangeListener(OpenPypeInterface): """Module has plugin paths to return. Expected result is dictionary with keys "publish", "create", "load" or diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 92edd5aeaad..9566f9a6eff 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -2,12 +2,17 @@ import collections from abc import ABCMeta, abstractmethod import six -from .. import PypeModule, ITrayService, IIdleManager, IWebServerRoutes +from .. import ( + PypeModule, + OpenPypeInterface, + ITrayService, + IIdleManager, + IWebServerRoutes +) from avalon.api import AvalonMongoDB -@six.add_metaclass(ABCMeta) -class ITimersManager: +class ITimersManager(OpenPypeInterface): timer_manager_module = None @abstractmethod diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index b61619acded..edb0b0be3f1 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -5,11 +5,14 @@ import six from openpype import resources -from .. import PypeModule, ITrayService +from .. import ( + PypeModule, + OpenPypeInterface, + ITrayService +) -@six.add_metaclass(ABCMeta) -class IWebServerRoutes: +class IWebServerRoutes(OpenPypeInterface): """Other modules interface to register their routes.""" @abstractmethod def webserver_initialization(self, server_manager): From a0b24b9325e6c6a4240b55f7b1bbec1e3e2b8b2a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:01:25 +0200 Subject: [PATCH 08/77] remove deprecated sync server initialization --- openpype/modules/sync_server/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py index a814f0db622..d6a038372b7 100644 --- a/openpype/modules/sync_server/__init__.py +++ b/openpype/modules/sync_server/__init__.py @@ -1,5 +1,6 @@ from openpype.modules.sync_server.sync_server_module import SyncServerModule -def tray_init(tray_widget, main_widget): - return SyncServerModule() +__all__ = ( + "SyncServerModule", +) From b83e932a6ba76a4db80377e16f22d5219935fb70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:58:45 +0200 Subject: [PATCH 09/77] interfaces are defined in one specific file --- openpype/modules/ftrack/__init__.py | 2 - openpype/modules/ftrack/ftrack_module.py | 12 ------ openpype/modules/ftrack/interfaces.py | 12 ++++++ openpype/modules/idle_manager/__init__.py | 4 +- openpype/modules/idle_manager/idle_module.py | 27 ------------- openpype/modules/idle_manager/interfaces.py | 26 +++++++++++++ openpype/modules/interfaces.py | 4 +- openpype/modules/settings_module/__init__.py | 9 +++++ .../modules/settings_module/interfaces.py | 30 ++++++++++++++ .../{ => settings_module}/settings_action.py | 39 +------------------ openpype/modules/timers_manager/__init__.py | 4 +- openpype/modules/timers_manager/interfaces.py | 26 +++++++++++++ .../modules/timers_manager/timers_manager.py | 24 ------------ openpype/modules/webserver/__init__.py | 4 +- openpype/modules/webserver/interfaces.py | 9 +++++ .../modules/webserver/webserver_module.py | 7 ---- 16 files changed, 118 insertions(+), 121 deletions(-) create mode 100644 openpype/modules/ftrack/interfaces.py create mode 100644 openpype/modules/idle_manager/interfaces.py create mode 100644 openpype/modules/settings_module/__init__.py create mode 100644 openpype/modules/settings_module/interfaces.py rename openpype/modules/{ => settings_module}/settings_action.py (81%) create mode 100644 openpype/modules/timers_manager/interfaces.py create mode 100644 openpype/modules/webserver/interfaces.py diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index c1a557812cc..7261254c6fe 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -1,11 +1,9 @@ from .ftrack_module import ( FtrackModule, - IFtrackEventHandlerPaths, FTRACK_MODULE_DIR ) __all__ = ( "FtrackModule", - "IFtrackEventHandlerPaths", "FTRACK_MODULE_DIR" ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 70f34b6389d..36859780031 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -1,8 +1,6 @@ import os import json import collections -from abc import ABCMeta, abstractmethod -import six import openpype from openpype.modules import ( PypeModule, @@ -18,16 +16,6 @@ FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class IFtrackEventHandlerPaths(OpenPypeInterface): - """Other modules interface to return paths to ftrack event handlers. - - Expected output is dictionary with "server" and "user" keys. - """ - @abstractmethod - def get_event_handler_paths(self): - pass - - class FtrackModule( PypeModule, ITrayModule, diff --git a/openpype/modules/ftrack/interfaces.py b/openpype/modules/ftrack/interfaces.py new file mode 100644 index 00000000000..16ce0d2e621 --- /dev/null +++ b/openpype/modules/ftrack/interfaces.py @@ -0,0 +1,12 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IFtrackEventHandlerPaths(OpenPypeInterface): + """Other modules interface to return paths to ftrack event handlers. + + Expected output is dictionary with "server" and "user" keys. + """ + @abstractmethod + def get_event_handler_paths(self): + pass diff --git a/openpype/modules/idle_manager/__init__.py b/openpype/modules/idle_manager/__init__.py index 651f360c505..9d6e10bf399 100644 --- a/openpype/modules/idle_manager/__init__.py +++ b/openpype/modules/idle_manager/__init__.py @@ -1,10 +1,8 @@ from .idle_module import ( - IdleManager, - IIdleManager + IdleManager ) __all__ = ( "IdleManager", - "IIdleManager" ) diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index 57ccc9cce78..9e5211a0fa1 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -1,8 +1,5 @@ import platform import collections -from abc import ABCMeta, abstractmethod - -import six from openpype.modules import ( PypeModule, @@ -11,30 +8,6 @@ ) -class IIdleManager(OpenPypeInterface): - """Other modules interface to return callbacks by idle time in seconds. - - Expected output is dictionary with seconds as keys and callback/s - as value, value may be callback of list of callbacks. - EXAMPLE: - ``` - { - 60: self.on_minute_idle - } - ``` - """ - idle_manager = None - - @abstractmethod - def callbacks_by_idle_time(self): - pass - - @property - def idle_time(self): - if self.idle_manager: - return self.idle_manager.idle_time - - class IdleManager(PypeModule, ITrayService): """ Measure user's idle time in seconds. Idle time resets on keyboard/mouse input. diff --git a/openpype/modules/idle_manager/interfaces.py b/openpype/modules/idle_manager/interfaces.py new file mode 100644 index 00000000000..71cd17a64ac --- /dev/null +++ b/openpype/modules/idle_manager/interfaces.py @@ -0,0 +1,26 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IIdleManager(OpenPypeInterface): + """Other modules interface to return callbacks by idle time in seconds. + + Expected output is dictionary with seconds as keys and callback/s + as value, value may be callback of list of callbacks. + EXAMPLE: + ``` + { + 60: self.on_minute_idle + } + ``` + """ + idle_manager = None + + @abstractmethod + def callbacks_by_idle_time(self): + pass + + @property + def idle_time(self): + if self.idle_manager: + return self.idle_manager.idle_time diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 6d51f1b828e..a60c5fa6063 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -2,9 +2,7 @@ from openpype import resources -from .base import ( - OpenPypeInterface -) +from openpype.modules import OpenPypeInterface class IPluginPaths(OpenPypeInterface): diff --git a/openpype/modules/settings_module/__init__.py b/openpype/modules/settings_module/__init__.py new file mode 100644 index 00000000000..95510eba9da --- /dev/null +++ b/openpype/modules/settings_module/__init__.py @@ -0,0 +1,9 @@ +from .settings_action import ( + LocalSettingsAction, + SettingsAction +) + +__all__ = ( + "LocalSettingsAction", + "SettingsAction" +) diff --git a/openpype/modules/settings_module/interfaces.py b/openpype/modules/settings_module/interfaces.py new file mode 100644 index 00000000000..42db395649b --- /dev/null +++ b/openpype/modules/settings_module/interfaces.py @@ -0,0 +1,30 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class ISettingsChangeListener(OpenPypeInterface): + """Module has plugin paths to return. + + Expected result is dictionary with keys "publish", "create", "load" or + "actions" and values as list or string. + { + "publish": ["path/to/publish_plugins"] + } + """ + @abstractmethod + def on_system_settings_save( + self, old_value, new_value, changes, new_value_metadata + ): + pass + + @abstractmethod + def on_project_settings_save( + self, old_value, new_value, changes, project_name, new_value_metadata + ): + pass + + @abstractmethod + def on_project_anatomy_save( + self, old_value, new_value, changes, project_name, new_value_metadata + ): + pass diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_module/settings_action.py similarity index 81% rename from openpype/modules/settings_action.py rename to openpype/modules/settings_module/settings_action.py index f6d6463b25c..a6909e1fdf2 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_module/settings_action.py @@ -1,40 +1,5 @@ -from abc import ABCMeta, abstractmethod - -import six - -from . import ( - PypeModule, - OpenPypeInterface, - ITrayAction -) - - -class ISettingsChangeListener(OpenPypeInterface): - """Module has plugin paths to return. - - Expected result is dictionary with keys "publish", "create", "load" or - "actions" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - @abstractmethod - def on_system_settings_save( - self, old_value, new_value, changes, new_value_metadata - ): - pass - - @abstractmethod - def on_project_settings_save( - self, old_value, new_value, changes, project_name, new_value_metadata - ): - pass - - @abstractmethod - def on_project_anatomy_save( - self, old_value, new_value, changes, project_name, new_value_metadata - ): - pass +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class SettingsAction(PypeModule, ITrayAction): diff --git a/openpype/modules/timers_manager/__init__.py b/openpype/modules/timers_manager/__init__.py index 1b565cc59ae..5d7a4166d31 100644 --- a/openpype/modules/timers_manager/__init__.py +++ b/openpype/modules/timers_manager/__init__.py @@ -1,9 +1,7 @@ from .timers_manager import ( - ITimersManager, TimersManager ) __all__ = ( - "ITimersManager", - "TimersManager" + "TimersManager", ) diff --git a/openpype/modules/timers_manager/interfaces.py b/openpype/modules/timers_manager/interfaces.py new file mode 100644 index 00000000000..179013cffe6 --- /dev/null +++ b/openpype/modules/timers_manager/interfaces.py @@ -0,0 +1,26 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class ITimersManager(OpenPypeInterface): + timer_manager_module = None + + @abstractmethod + def stop_timer(self): + pass + + @abstractmethod + def start_timer(self, data): + pass + + def timer_started(self, data): + if not self.timer_manager_module: + return + + self.timer_manager_module.timer_started(self.id, data) + + def timer_stopped(self): + if not self.timer_manager_module: + return + + self.timer_manager_module.timer_stopped(self.id) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 9566f9a6eff..f893a0f3e74 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -12,30 +12,6 @@ from avalon.api import AvalonMongoDB -class ITimersManager(OpenPypeInterface): - timer_manager_module = None - - @abstractmethod - def stop_timer(self): - pass - - @abstractmethod - def start_timer(self, data): - pass - - def timer_started(self, data): - if not self.timer_manager_module: - return - - self.timer_manager_module.timer_started(self.id, data) - - def timer_stopped(self): - if not self.timer_manager_module: - return - - self.timer_manager_module.timer_stopped(self.id) - - class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): """ Handles about Timers. diff --git a/openpype/modules/webserver/__init__.py b/openpype/modules/webserver/__init__.py index defd115e575..899b97d6d4d 100644 --- a/openpype/modules/webserver/__init__.py +++ b/openpype/modules/webserver/__init__.py @@ -1,10 +1,8 @@ from .webserver_module import ( - WebServerModule, - IWebServerRoutes + WebServerModule ) __all__ = ( "WebServerModule", - "IWebServerRoutes" ) diff --git a/openpype/modules/webserver/interfaces.py b/openpype/modules/webserver/interfaces.py new file mode 100644 index 00000000000..779361a9ecd --- /dev/null +++ b/openpype/modules/webserver/interfaces.py @@ -0,0 +1,9 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IWebServerRoutes(OpenPypeInterface): + """Other modules interface to register their routes.""" + @abstractmethod + def webserver_initialization(self, server_manager): + pass diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index edb0b0be3f1..57e5df8e855 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -12,13 +12,6 @@ ) -class IWebServerRoutes(OpenPypeInterface): - """Other modules interface to register their routes.""" - @abstractmethod - def webserver_initialization(self, server_manager): - pass - - class WebServerModule(PypeModule, ITrayService): name = "webserver" label = "WebServer" From 45f894bf3c7e06baf86135a24b9fb7d50c416f5a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:59:56 +0200 Subject: [PATCH 10/77] load interfaces and modules more dynamically --- openpype/modules/base.py | 209 ++++++++++++++++++++++++++++++++------- 1 file changed, 171 insertions(+), 38 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 6e1d19589cd..e4e9013eeef 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -1,5 +1,8 @@ # -*- coding: utf-8 -*- """Base class for Pype Modules.""" +import os +import sys +import types import time import inspect import logging @@ -14,6 +17,141 @@ from openpype import resources +class __ModuleClass: + def __init__(self): + self.object_setattr("__attributes__", {}) + self.object_setattr("__defaults__", set()) + + def __getattr__(self, attr_name): + return self.__attributes__.get( + attr_name, + type("Missing.{}".format(attr_name), (), {}) + ) + + def __iter__(self): + for module in self.values(): + yield module + + def object_setattr(self, attr_name, value): + object.__setattr__(self, attr_name, value) + + def __setattr__(self, attr_name, value): + self.__attributes__[attr_name] = value + + def keys(self): + return self.__attributes__.keys() + + def values(self): + return self.__attributes__.values() + + def items(self): + return self.__attributes__.items() + + +def load_interfaces(force=False): + if not force and "openpype_interfaces" in sys.modules: + return + + sys.modules["openpype_interfaces"] = openpype_interfaces = __ModuleClass() + + log = PypeLogger.get_logger("InterfacesLoader") + + current_dir = os.path.abspath(os.path.dirname(__file__)) + + interface_paths = [ + os.path.join(current_dir, "interfaces.py") + ] + + for filename in os.listdir(current_dir): + full_path = os.path.join(current_dir, filename) + if os.path.isdir(full_path): + interface_paths.append( + os.path.join(full_path, "interfaces.py") + ) + + # print(interface_paths) + for full_path in interface_paths: + if not os.path.exists(full_path): + continue + + filename = os.path.splitext(os.path.basename(full_path))[0] + + try: + # Prepare module object where content of file will be parsed + module = types.ModuleType(filename) + + if six.PY3: + import importlib + + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + filename, full_path + ) + module_loader.exec_module(module) + else: + # Execute module code and store content to module + with open(full_path) as _stream: + # Execute content and store it to module object + exec(_stream.read(), module.__dict__) + + module.__file__ = full_path + + except Exception: + log.warning( + "Failed to load path: \"{0}\"".format(full_path), + exc_info=True + ) + continue + + for attr_name in dir(module): + attr = getattr(module, attr_name) + if ( + not inspect.isclass(attr) + or attr is OpenPypeInterface + or not issubclass(attr, OpenPypeInterface) + ): + continue + setattr(openpype_interfaces, attr_name, attr) + + +def load_modules(force=False): + if not force and "openpype_modules" in sys.modules: + return + + from openpype.lib import modules_from_path + + sys.modules["openpype_modules"] = openpype_modules = __ModuleClass() + + log = PypeLogger.get_logger("ModulesLoader") + + from . import ( + avalon_apps, + clockify, + deadline, + ftrack, + idle_manager, + log_viewer, + muster, + settings_module, + slack, + sync_server, + timers_manager, + webserver + ) + setattr(openpype_modules, "avalon_apps", avalon_apps) + setattr(openpype_modules, "clockify", clockify) + setattr(openpype_modules, "deadline", deadline) + setattr(openpype_modules, "ftrack", ftrack) + setattr(openpype_modules, "idle_manager", idle_manager) + setattr(openpype_modules, "log_viewer", log_viewer) + setattr(openpype_modules, "muster", muster) + setattr(openpype_modules, "settings_module", settings_module) + setattr(openpype_modules, "sync_server", sync_server) + setattr(openpype_modules, "slack", slack) + setattr(openpype_modules, "timers_manager", timers_manager) + setattr(openpype_modules, "webserver", webserver) + + @six.add_metaclass(ABCMeta) class OpenPypeInterface: """Base class of Interface that can be used as Mixin with abstract parts. @@ -105,44 +243,12 @@ def __init__(self, _system_settings=None): # For report of time consumption self._report = {} - self._raw_modules = None - self.initialize_modules() self.connect_modules() def collect_modules(self): - if self._raw_modules is not None: - return - - self._raw_modules = [] - - # Go through globals in `pype.modules` - for name in dir(openpype.modules): - modules_item = getattr(openpype.modules, name, None) - # Filter globals that are not classes which inherit from PypeModule - if ( - not inspect.isclass(modules_item) - or modules_item is openpype.modules.PypeModule - or not issubclass(modules_item, openpype.modules.PypeModule) - ): - continue - - # Check if class is abstract (Developing purpose) - if inspect.isabstract(modules_item): - # Find missing implementations by convetion on `abc` module - not_implemented = [] - for attr_name in dir(modules_item): - attr = getattr(modules_item, attr_name, None) - if attr and getattr(attr, "__isabstractmethod__", None): - not_implemented.append(attr_name) - - # Log missing implementations - self.log.warning(( - "Skipping abstract Class: {}. Missing implementations: {}" - ).format(name, ", ".join(not_implemented))) - continue - - self._raw_modules.append(modules_item) + load_interfaces() + load_modules() def initialize_modules(self): """Import and initialize modules.""" @@ -159,8 +265,37 @@ def initialize_modules(self): time_start = time.time() prev_start_time = time_start - # Go through globals in `pype.modules` - for modules_item in self._raw_modules: + module_classes = [] + for module in openpype_modules: + # Go through globals in `pype.modules` + for name in dir(module): + modules_item = getattr(module, name, None) + # Filter globals that are not classes which inherit from + # PypeModule + if ( + not inspect.isclass(modules_item) + or modules_item is PypeModule + or not issubclass(modules_item, PypeModule) + ): + continue + + # Check if class is abstract (Developing purpose) + if inspect.isabstract(modules_item): + # Find missing implementations by convetion on `abc` module + not_implemented = [] + for attr_name in dir(modules_item): + attr = getattr(modules_item, attr_name, None) + if attr and getattr(attr, "__isabstractmethod__", None): + not_implemented.append(attr_name) + + # Log missing implementations + self.log.warning(( + "Skipping abstract Class: {}. Missing implementations: {}" + ).format(name, ", ".join(not_implemented))) + continue + module_classes.append(modules_item) + + for modules_item in module_classes: try: name = modules_item.__name__ # Try initialize module @@ -480,8 +615,6 @@ def __init__(self): self.modules_by_name = {} self._report = {} - self._raw_modules = None - self.tray_manager = None self.doubleclick_callbacks = {} From c7e126bc6ebabc53f541706dd850cf2ffe8e941f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:00:31 +0200 Subject: [PATCH 11/77] use dynamic imports in modules manager --- openpype/modules/base.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e4e9013eeef..c84a8a95a48 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -254,6 +254,8 @@ def initialize_modules(self): """Import and initialize modules.""" self.collect_modules() + import openpype_modules + self.log.debug("*** Pype modules initialization.") # Prepare settings for modules system_settings = getattr(self, "_system_settings", None) @@ -395,6 +397,8 @@ def collect_plugin_paths(self): and "actions" each containing list of paths. """ # Output structure + from openpype_interfaces import IPluginPaths + output = { "publish": [], "create": [], @@ -447,6 +451,8 @@ def collect_launch_hook_paths(self): Returns: list: Paths to launch hook directories. """ + from openpype_interfaces import ILaunchHookPaths + str_type = type("") expected_types = (list, tuple, set) @@ -647,6 +653,8 @@ def initialize(self, tray_manager, tray_menu): self.tray_menu(tray_menu) def get_enabled_tray_modules(self): + from openpype_interfaces import ITrayModule + output = [] for module in self.modules: if module.enabled and isinstance(module, ITrayModule): @@ -722,6 +730,8 @@ def tray_menu(self, tray_menu): self._report["Tray menu"] = report def start_modules(self): + from openpype_interfaces import ITrayService + report = {} time_start = time.time() prev_start_time = time_start From 8f35cb61f5a996331b76e7e676faa46a0ec2208c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:00:53 +0200 Subject: [PATCH 12/77] removed all modules and iterfaces from public api --- openpype/modules/__init__.py | 76 +----------------------------------- 1 file changed, 1 insertion(+), 75 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 724f442b74d..3ad9a751611 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -5,44 +5,6 @@ ModulesManager, TrayModulesManager ) -from .interfaces import ( - ITrayModule, - ITrayAction, - ITrayService, - IPluginPaths, - ILaunchHookPaths -) -from .settings_action import ( - SettingsAction, - ISettingsChangeListener, - LocalSettingsAction -) -from .webserver import ( - WebServerModule, - IWebServerRoutes -) -from .idle_manager import ( - IdleManager, - IIdleManager -) -from .timers_manager import ( - TimersManager, - ITimersManager -) -from .avalon_apps import AvalonModule -from .launcher_action import LauncherAction -from .ftrack import ( - FtrackModule, - IFtrackEventHandlerPaths -) -from .clockify import ClockifyModule -from .log_viewer import LogViewModule -from .muster import MusterModule -from .deadline import DeadlineModule -from .project_manager_action import ProjectManagerAction -from .standalonepublish_action import StandAlonePublishAction -from .sync_server import SyncServerModule -from .slack import SlackIntegrationModule __all__ = ( @@ -50,41 +12,5 @@ "OpenPypeInterface", "ModulesManager", - "TrayModulesManager", - - "ITrayModule", - "ITrayAction", - "ITrayService", - "IPluginPaths", - "ILaunchHookPaths", - - "SettingsAction", - "LocalSettingsAction", - - "WebServerModule", - "IWebServerRoutes", - - "IdleManager", - "IIdleManager", - - "TimersManager", - "ITimersManager", - - "AvalonModule", - "LauncherAction", - - "FtrackModule", - "IFtrackEventHandlerPaths", - - "ClockifyModule", - "IdleManager", - "LogViewModule", - "MusterModule", - "DeadlineModule", - "ProjectManagerAction", - "StandAlonePublishAction", - - "SyncServerModule", - - "SlackIntegrationModule" + "TrayModulesManager" ) From 6813ff03664a7f6c11e995bc24669a4a6254e71d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:01:54 +0200 Subject: [PATCH 13/77] changed imports where from interfaces are loaded --- openpype/modules/avalon_apps/avalon_app.py | 4 ++-- openpype/modules/clockify/clockify_module.py | 4 ++-- openpype/modules/deadline/deadline_module.py | 4 ++-- openpype/modules/ftrack/ftrack_module.py | 9 +++++---- openpype/modules/idle_manager/idle_module.py | 8 ++++---- openpype/modules/log_viewer/log_view_module.py | 3 ++- openpype/modules/muster/muster.py | 4 ++-- openpype/modules/project_manager_action.py | 3 ++- openpype/modules/slack/slack_module.py | 7 +++++-- openpype/modules/standalonepublish_action.py | 3 ++- openpype/modules/sync_server/sync_server_module.py | 3 ++- openpype/modules/timers_manager/timers_manager.py | 8 +++----- openpype/modules/webserver/host_console_listener.py | 2 +- openpype/modules/webserver/webserver_module.py | 11 ++++------- 14 files changed, 38 insertions(+), 35 deletions(-) diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/avalon_apps/avalon_app.py index 4e95f6e72b3..7f130bfab1d 100644 --- a/openpype/modules/avalon_apps/avalon_app.py +++ b/openpype/modules/avalon_apps/avalon_app.py @@ -1,8 +1,8 @@ import os import openpype from openpype import resources -from .. import ( - PypeModule, +from openpype.modules import PypeModule +from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/clockify/clockify_module.py index e3751c46b81..83f8d07c3aa 100644 --- a/openpype/modules/clockify/clockify_module.py +++ b/openpype/modules/clockify/clockify_module.py @@ -7,8 +7,8 @@ CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH ) -from openpype.modules import ( - PypeModule, +from openpype.modules import PypeModule +from openpype_interfaces import ( ITrayModule, IPluginPaths, IFtrackEventHandlerPaths, diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index 2a2fba41d6a..47fd4e9656e 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -1,6 +1,6 @@ import os -from openpype.modules import ( - PypeModule, IPluginPaths) +from openpype.modules import PypeModule +from openpype_interfaces import IPluginPaths class DeadlineModule(PypeModule, IPluginPaths): diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 36859780031..6fce308b19b 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -2,14 +2,15 @@ import json import collections import openpype -from openpype.modules import ( - PypeModule, - OpenPypeInterface, +from openpype.modules import PypeModule + +from openpype_interfaces import ( ITrayModule, IPluginPaths, ITimersManager, ILaunchHookPaths, - ISettingsChangeListener + ISettingsChangeListener, + IFtrackEventHandlerPaths ) from openpype.settings import SaveWarningExc diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index 9e5211a0fa1..d669fcb90e1 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -1,10 +1,10 @@ import platform import collections -from openpype.modules import ( - PypeModule, - OpenPypeInterface, - ITrayService +from openpype.modules import PypeModule +from openpype_interfaces import ( + ITrayService, + IIdleManager ) diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index dde482b04ce..22826d8a540 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -1,5 +1,6 @@ from openpype.api import Logger -from .. import PypeModule, ITrayModule +from openpype.modules import PypeModule +from openpype_interfaces import ITrayModule class LogViewModule(PypeModule, ITrayModule): diff --git a/openpype/modules/muster/muster.py b/openpype/modules/muster/muster.py index 1a829268028..164f20054a5 100644 --- a/openpype/modules/muster/muster.py +++ b/openpype/modules/muster/muster.py @@ -2,8 +2,8 @@ import json import appdirs import requests -from .. import ( - PypeModule, +from openpype.modules import PypeModule +from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/project_manager_action.py index 1387aa258c2..9a36d973b37 100644 --- a/openpype/modules/project_manager_action.py +++ b/openpype/modules/project_manager_action.py @@ -1,4 +1,5 @@ -from . import PypeModule, ITrayAction +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class ProjectManagerAction(PypeModule, ITrayAction): diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py index 9dd5a3d02b7..8e6ac100370 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/slack/slack_module.py @@ -1,6 +1,9 @@ import os -from openpype.modules import ( - PypeModule, IPluginPaths, ILaunchHookPaths) +from openpype.modules import PypeModule +from openpype_interfaces import ( + IPluginPaths, + ILaunchHookPaths +) SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/modules/standalonepublish_action.py b/openpype/modules/standalonepublish_action.py index 4f87f9704c6..53319f9e113 100644 --- a/openpype/modules/standalonepublish_action.py +++ b/openpype/modules/standalonepublish_action.py @@ -2,7 +2,8 @@ import platform import subprocess from openpype.lib import get_pype_execute_args -from . import PypeModule, ITrayAction +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class StandAlonePublishAction(PypeModule, ITrayAction): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 15de4b12e9b..63f39474b1e 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -7,7 +7,8 @@ from avalon.api import AvalonMongoDB -from .. import PypeModule, ITrayModule +from openpype.modules import PypeModule +from openpype_interfaces import ITrayModule from openpype.api import ( Anatomy, get_project_settings, diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index f893a0f3e74..b31e14209a0 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -1,10 +1,8 @@ import os import collections -from abc import ABCMeta, abstractmethod -import six -from .. import ( - PypeModule, - OpenPypeInterface, +from openpype.modules import PypeModule +from openpype_interfaces import ( + ITimersManager, ITrayService, IIdleManager, IWebServerRoutes diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/webserver/host_console_listener.py index 01a8af643e6..bcf4cadf6a8 100644 --- a/openpype/modules/webserver/host_console_listener.py +++ b/openpype/modules/webserver/host_console_listener.py @@ -5,7 +5,7 @@ from concurrent.futures import CancelledError from Qt import QtWidgets -from openpype.modules import ITrayService +from openpype_interfaces import ITrayService log = logging.getLogger(__name__) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 57e5df8e855..192baad013a 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -1,14 +1,11 @@ import os import socket -from abc import ABCMeta, abstractmethod - -import six from openpype import resources -from .. import ( - PypeModule, - OpenPypeInterface, - ITrayService +from openpype.modules import PypeModule +from openpype_interfaces import ( + ITrayService, + IWebServerRoutes ) From 82a607f7d919fc5528fe7a5c08c8fd368e486be0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:05:55 +0200 Subject: [PATCH 14/77] add missing modules --- openpype/modules/base.py | 9 +++++++-- openpype/settings/lib.py | 3 ++- openpype/tools/tray/pype_tray.py | 10 +++++----- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c84a8a95a48..e8e38602978 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -14,7 +14,6 @@ import openpype from openpype.settings import get_system_settings from openpype.lib import PypeLogger -from openpype import resources class __ModuleClass: @@ -136,7 +135,9 @@ def load_modules(force=False): slack, sync_server, timers_manager, - webserver + webserver, + standalonepublish_action, + project_manager_action ) setattr(openpype_modules, "avalon_apps", avalon_apps) setattr(openpype_modules, "clockify", clockify) @@ -150,6 +151,10 @@ def load_modules(force=False): setattr(openpype_modules, "slack", slack) setattr(openpype_modules, "timers_manager", timers_manager) setattr(openpype_modules, "webserver", webserver) + setattr( + openpype_modules, "standalonepublish_action", standalonepublish_action + ) + setattr(openpype_modules, "project_manager_action", project_manager_action) @six.add_metaclass(ABCMeta) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 5c2c0dcd946..ec9846eef7e 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -114,7 +114,8 @@ def save_studio_settings(data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener old_data = get_system_settings() default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 794312f3894..ed66f1a80f8 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -15,11 +15,7 @@ get_system_settings ) from openpype.lib import get_pype_execute_args -from openpype.modules import ( - TrayModulesManager, - ITrayAction, - ITrayService -) +from openpype.modules import TrayModulesManager from openpype import style from .pype_info_widget import PypeInfoWidget @@ -80,6 +76,10 @@ def _main_thread_execution(self): def initialize_modules(self): """Add modules to tray.""" + from openpype_interfaces import ( + ITrayAction, + ITrayService + ) self.modules_manager.initialize(self, self.tray_widget.menu) From 65dedb05345034038131e774ff646a15e0b3cc86 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:06:15 +0200 Subject: [PATCH 15/77] use relative imports --- openpype/modules/sync_server/__init__.py | 2 +- openpype/modules/sync_server/tray/app.py | 2 +- openpype/modules/sync_server/tray/delegates.py | 2 +- openpype/modules/sync_server/tray/models.py | 2 +- openpype/modules/sync_server/tray/widgets.py | 8 ++++---- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py index d6a038372b7..430ab53c91b 100644 --- a/openpype/modules/sync_server/__init__.py +++ b/openpype/modules/sync_server/__init__.py @@ -1,4 +1,4 @@ -from openpype.modules.sync_server.sync_server_module import SyncServerModule +from .sync_server_module import SyncServerModule __all__ = ( diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index dd2b4be7492..106076d81c2 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -5,7 +5,7 @@ from openpype.lib import PypeLogger from openpype import resources -from openpype.modules.sync_server.tray.widgets import ( +from .widgets import ( SyncProjectListWidget, SyncRepresentationSummaryWidget ) diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 9316ec2c3ee..461b9fffb36 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -2,7 +2,7 @@ from Qt import QtCore, QtWidgets, QtGui from openpype.lib import PypeLogger -from openpype.modules.sync_server.tray import lib +from . import lib log = PypeLogger().get_logger("SyncServer") diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index efef039b8b5..8c86d3b98f6 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -11,7 +11,7 @@ from openpype.lib import PypeLogger from openpype.api import get_local_site_id -from openpype.modules.sync_server.tray import lib +from . import lib log = PypeLogger().get_logger("SyncServer") diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index d38416fbcec..c9160733a01 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -17,13 +17,13 @@ from avalon.tools.delegates import pretty_timestamp from avalon.vendor import qtawesome -from openpype.modules.sync_server.tray.models import ( +from .models import ( SyncRepresentationSummaryModel, SyncRepresentationDetailModel ) -from openpype.modules.sync_server.tray import lib -from openpype.modules.sync_server.tray import delegates +from . import lib +from . import delegates log = PypeLogger().get_logger("SyncServer") @@ -187,7 +187,7 @@ def _double_clicked(self, index): detail_window = SyncServerDetailWindow( self.sync_server, _id, self.model.project, parent=self) detail_window.exec() - + def _on_context_menu(self, point): """ Shows menu with loader actions on Right-click. From 4ca5ef46b54f7fc445cec3ecc78e4bfd7541bfd7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:10:41 +0200 Subject: [PATCH 16/77] adde new lib import functions --- openpype/lib/__init__.py | 6 +- openpype/lib/python_module_tools.py | 96 ++++++++++++++++++++++++----- 2 files changed, 84 insertions(+), 18 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 12c04a4236c..52a6024feb7 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -52,9 +52,11 @@ ) from .python_module_tools import ( + import_filepath, modules_from_path, recursive_bases_from_class, - classes_from_module + classes_from_module, + load_module_from_dirpath ) from .avalon_context import ( @@ -170,9 +172,11 @@ "get_ffmpeg_tool_path", "ffprobe_streams", + "import_filepath", "modules_from_path", "recursive_bases_from_class", "classes_from_module", + "load_module_from_dirpath", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 44a10078891..102ae7e71a9 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -9,6 +9,29 @@ PY3 = sys.version_info[0] == 3 +def import_filepath(filepath, module_name=None): + if module_name is None: + module_name = os.path.splitext(os.path.basename(filepath))[0] + + # Prepare module object where content of file will be parsed + module = types.ModuleType(module_name) + + if PY3: + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, filepath + ) + module_loader.exec_module(module) + else: + # Execute module code and store content to module + with open(filepath) as _stream: + # Execute content and store it to module object + exec(_stream.read(), module.__dict__) + + module.__file__ = filepath + return module + + def modules_from_path(folder_path): """Get python scripts as modules from a path. @@ -55,23 +78,7 @@ def modules_from_path(folder_path): continue try: - # Prepare module object where content of file will be parsed - module = types.ModuleType(mod_name) - - if PY3: - # Use loader so module has full specs - module_loader = importlib.machinery.SourceFileLoader( - mod_name, full_path - ) - module_loader.exec_module(module) - else: - # Execute module code and store content to module - with open(full_path) as _stream: - # Execute content and store it to module object - exec(_stream.read(), module.__dict__) - - module.__file__ = full_path - + module = import_filepath(full_path, mod_name) modules.append((full_path, module)) except Exception: @@ -127,3 +134,58 @@ def classes_from_module(superclass, module): classes.append(obj) return classes + + +def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import imp + + dst_module = sys.modules[dst_module_name] + + fp, pathname, description = imp.find_module(module_name, [dirpath]) + module = imp.load_module(full_module_name, fp, pathname, description) + setattr(dst_module, module_name, module) + + return module + + +def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import importlib.util + from importlib._bootstrap_external import PathFinder + + dst_module = sys.modules[dst_module_name] + loader = PathFinder.find_module(full_module_name, [dirpath]) + + spec = importlib.util.spec_from_loader( + full_module_name, loader, origin=dirpath + ) + + module = importlib.util.module_from_spec(spec) + + if dst_module is not None: + setattr(dst_module, module_name, module) + + sys.modules[full_module_name] = module + + loader.exec_module(module) + + return module + + +def load_module_from_dirpath(dirpath, folder_name, dst_module_name): + if PY3: + module = _load_module_from_dirpath_py3( + dirpath, folder_name, dst_module_name + ) + else: + module = _load_module_from_dirpath_py2( + dirpath, folder_name, dst_module_name + ) + return module From cc457406a78aaceb5ccd0504b330211313a9879f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:11:43 +0200 Subject: [PATCH 17/77] use import_filepath from lib --- openpype/modules/base.py | 22 +++------------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e8e38602978..8709bccf3eb 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -51,6 +51,8 @@ def load_interfaces(force=False): if not force and "openpype_interfaces" in sys.modules: return + from openpype.lib import import_filepath + sys.modules["openpype_interfaces"] = openpype_interfaces = __ModuleClass() log = PypeLogger.get_logger("InterfacesLoader") @@ -73,27 +75,9 @@ def load_interfaces(force=False): if not os.path.exists(full_path): continue - filename = os.path.splitext(os.path.basename(full_path))[0] - try: # Prepare module object where content of file will be parsed - module = types.ModuleType(filename) - - if six.PY3: - import importlib - - # Use loader so module has full specs - module_loader = importlib.machinery.SourceFileLoader( - filename, full_path - ) - module_loader.exec_module(module) - else: - # Execute module code and store content to module - with open(full_path) as _stream: - # Execute content and store it to module object - exec(_stream.read(), module.__dict__) - - module.__file__ = full_path + module = import_filepath(full_path) except Exception: log.warning( From 8d5ef62c1c26fe3fd5e6542ea07b665975a5bce0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:13:51 +0200 Subject: [PATCH 18/77] minor changes --- openpype/modules/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 8709bccf3eb..3c2aca73d64 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -107,6 +107,7 @@ def load_modules(force=False): log = PypeLogger.get_logger("ModulesLoader") + # TODO import dynamically from defined paths from . import ( avalon_apps, clockify, @@ -175,7 +176,7 @@ def name(self): def __init__(self, manager, settings): self.manager = manager - self.log = PypeLogger().get_logger(self.name) + self.log = PypeLogger.get_logger(self.name) self.initialize(settings) From 0444e325501d5d7e0c901d80b9e0c8994c04c11e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 11:51:05 +0200 Subject: [PATCH 19/77] python 2 compatibility --- openpype/modules/base.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 3c2aca73d64..f1b0ef68081 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -16,10 +16,12 @@ from openpype.lib import PypeLogger -class __ModuleClass: +# Inherit from `object` for Python 2 hosts +class _ModuleClass(object): def __init__(self): - self.object_setattr("__attributes__", {}) - self.object_setattr("__defaults__", set()) + # Call setattr on super class + super(_ModuleClass, self).__setattr__("__attributes__", dict()) + super(_ModuleClass, self).__setattr__("__defaults__", set()) def __getattr__(self, attr_name): return self.__attributes__.get( @@ -31,9 +33,6 @@ def __iter__(self): for module in self.values(): yield module - def object_setattr(self, attr_name, value): - object.__setattr__(self, attr_name, value) - def __setattr__(self, attr_name, value): self.__attributes__[attr_name] = value @@ -53,7 +52,7 @@ def load_interfaces(force=False): from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = __ModuleClass() + sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass() log = PypeLogger.get_logger("InterfacesLoader") @@ -103,7 +102,7 @@ def load_modules(force=False): from openpype.lib import modules_from_path - sys.modules["openpype_modules"] = openpype_modules = __ModuleClass() + sys.modules["openpype_modules"] = openpype_modules = _ModuleClass() log = PypeLogger.get_logger("ModulesLoader") From 9b84b6b72a3178599db07d930ec443db82d1f5f5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 11:51:12 +0200 Subject: [PATCH 20/77] added missing launcher module --- openpype/modules/base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index f1b0ef68081..a13363f18db 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -120,6 +120,7 @@ def load_modules(force=False): sync_server, timers_manager, webserver, + launcher_action, standalonepublish_action, project_manager_action ) @@ -135,6 +136,7 @@ def load_modules(force=False): setattr(openpype_modules, "slack", slack) setattr(openpype_modules, "timers_manager", timers_manager) setattr(openpype_modules, "webserver", webserver) + setattr(openpype_modules, "launcher_action", launcher_action) setattr( openpype_modules, "standalonepublish_action", standalonepublish_action ) From c2720b6728bb5503897cfbd7f6f746bf6b2d22d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 11:52:53 +0200 Subject: [PATCH 21/77] fix launcher module --- openpype/modules/launcher_action.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/launcher_action.py b/openpype/modules/launcher_action.py index 0059ff021b8..728143ffacd 100644 --- a/openpype/modules/launcher_action.py +++ b/openpype/modules/launcher_action.py @@ -1,4 +1,5 @@ -from . import PypeModule, ITrayAction +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class LauncherAction(PypeModule, ITrayAction): From 0b0b74ca7cd0d552583696377324e95cc9de9dfd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:02:46 +0200 Subject: [PATCH 22/77] added name attribute to _ModuleClass --- openpype/modules/base.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index a13363f18db..9ae799d6d7c 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -18,8 +18,9 @@ # Inherit from `object` for Python 2 hosts class _ModuleClass(object): - def __init__(self): + def __init__(self, name): # Call setattr on super class + super(_ModuleClass, self).__setattr__("name", name) super(_ModuleClass, self).__setattr__("__attributes__", dict()) super(_ModuleClass, self).__setattr__("__defaults__", set()) @@ -52,7 +53,9 @@ def load_interfaces(force=False): from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass() + sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass( + "openpype_interfaces" + ) log = PypeLogger.get_logger("InterfacesLoader") @@ -102,7 +105,9 @@ def load_modules(force=False): from openpype.lib import modules_from_path - sys.modules["openpype_modules"] = openpype_modules = _ModuleClass() + sys.modules["openpype_modules"] = openpype_modules = _ModuleClass( + "openpype_modules" + ) log = PypeLogger.get_logger("ModulesLoader") From 0ec3bb18d9878b9c67d5f29147f2b66596a22865 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:03:51 +0200 Subject: [PATCH 23/77] added _InterfacesClass for interfaces --- openpype/modules/base.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 9ae799d6d7c..2086a5a2801 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -47,14 +47,18 @@ def items(self): return self.__attributes__.items() +class _InterfacesClass(_ModuleClass): + pass + + def load_interfaces(force=False): if not force and "openpype_interfaces" in sys.modules: return from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass( - "openpype_interfaces" + sys.modules["openpype_interfaces"] = openpype_interfaces = ( + _InterfacesClass("openpype_interfaces") ) log = PypeLogger.get_logger("InterfacesLoader") From a9616ce560e0d515a46825d3f055f19708b6de01 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:05:12 +0200 Subject: [PATCH 24/77] Interface return missing interface if is not found --- openpype/modules/base.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 2086a5a2801..d2a1b8ed933 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -25,10 +25,11 @@ def __init__(self, name): super(_ModuleClass, self).__setattr__("__defaults__", set()) def __getattr__(self, attr_name): - return self.__attributes__.get( - attr_name, - type("Missing.{}".format(attr_name), (), {}) - ) + if attr_name not in self.__attributes__: + raise ImportError("No module named {}.{}".format( + self.name, attr_name + )) + return self.__attributes__[attr_name] def __iter__(self): for module in self.values(): @@ -48,7 +49,16 @@ def items(self): class _InterfacesClass(_ModuleClass): - pass + def __getattr__(self, attr_name): + if attr_name not in self.__attributes__: + # Fake Interface if is not missing + self.__attributes__[attr_name] = type( + "{}".format(attr_name), + (MissingInteface, ), + {} + ) + + return self.__attributes__[attr_name] def load_interfaces(force=False): @@ -162,6 +172,10 @@ class OpenPypeInterface: pass +class MissingInteface(OpenPypeInterface): + pass + + @six.add_metaclass(ABCMeta) class PypeModule: """Base class of pype module. From bf4d85d5872afd9c3ebb9e8834bcc11877b07fa8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:05:24 +0200 Subject: [PATCH 25/77] fix remaining ISettingsChangeListener imports --- openpype/settings/lib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index ec9846eef7e..4a363910b8e 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -162,7 +162,8 @@ def save_project_settings(project_name, overrides): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener default_values = get_default_settings()[PROJECT_SETTINGS_KEY] if project_name: @@ -223,7 +224,8 @@ def save_project_anatomy(project_name, anatomy_data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener default_values = get_default_settings()[PROJECT_ANATOMY_KEY] if project_name: From d1dfa251d9fe02d12ffc74041b8a9aaf6439638c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:05:52 +0200 Subject: [PATCH 26/77] simplified fake interface --- openpype/modules/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index d2a1b8ed933..877c363f61e 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -53,7 +53,7 @@ def __getattr__(self, attr_name): if attr_name not in self.__attributes__: # Fake Interface if is not missing self.__attributes__[attr_name] = type( - "{}".format(attr_name), + attr_name, (MissingInteface, ), {} ) From 2b9f4794abb3422daac91b2d6a4cce9c3a008c40 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:14:55 +0200 Subject: [PATCH 27/77] removed unused file --- openpype/modules/modules_import.py | 84 ------------------------------ 1 file changed, 84 deletions(-) delete mode 100644 openpype/modules/modules_import.py diff --git a/openpype/modules/modules_import.py b/openpype/modules/modules_import.py deleted file mode 100644 index 24441c6d20c..00000000000 --- a/openpype/modules/modules_import.py +++ /dev/null @@ -1,84 +0,0 @@ -import sys -import six - - -class __ModuleClass: - __attributes__ = {} - __defaults__ = set() - - def __getattr__(self, attr_name): - return self.__attributes__.get( - attr_name, - type("Missing.{}".format(attr_name), (), {}) - ) - - def __setattr__(self, attr_name, value): - self.__attributes__[attr_name] = value - - def keys(self): - return self.__attributes__.keys() - - def values(self): - return self.__attributes__.values() - - def items(self): - return self.__attributes__.items() - - -def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) - if full_module_name in sys.modules: - return sys.modules[full_module_name] - - import imp - - dst_module = sys.modules[dst_module_name] - - fp, pathname, description = imp.find_module(module_name, [dirpath]) - module = imp.load_module(full_module_name, fp, pathname, description) - setattr(dst_module, module_name, module) - - return module - - -def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) - if full_module_name in sys.modules: - return sys.modules[full_module_name] - - import importlib.util - from importlib._bootstrap_external import PathFinder - - dst_module = sys.modules[dst_module_name] - loader = PathFinder.find_module(full_module_name, [dirpath]) - - spec = importlib.util.spec_from_loader( - full_module_name, loader, origin=dirpath - ) - - module = importlib.util.module_from_spec(spec) - - if dst_module is not None: - setattr(dst_module, module_name, module) - - sys.modules[full_module_name] = module - - loader.exec_module(module) - - return module - - -def load_module_from_dirpath(dirpath, folder_name, dst_module_name): - if six.PY3: - module = _load_module_from_dirpath_py3( - dirpath, folder_name, dst_module_name - ) - else: - module = _load_module_from_dirpath_py2( - dirpath, folder_name, dst_module_name - ) - return module - - -sys.modules["openpype_modules"] = __ModuleClass() -sys.modules["openpype_interfaces"] = __ModuleClass() From bf1db0f57c4924257dadcbb48026703cb9c4d46c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:05:29 +0200 Subject: [PATCH 28/77] created folder default modules --- openpype/modules/{ => default_modules}/clockify/__init__.py | 0 openpype/modules/{ => default_modules}/clockify/clockify_api.py | 0 .../modules/{ => default_modules}/clockify/clockify_module.py | 0 openpype/modules/{ => default_modules}/clockify/constants.py | 0 .../clockify/ftrack/server/action_clockify_sync_server.py | 2 +- .../clockify/ftrack/user/action_clockify_sync_local.py | 2 +- .../clockify/launcher_actions/ClockifyStart.py | 0 .../clockify/launcher_actions/ClockifySync.py | 0 openpype/modules/{ => default_modules}/clockify/widgets.py | 0 9 files changed, 2 insertions(+), 2 deletions(-) rename openpype/modules/{ => default_modules}/clockify/__init__.py (100%) rename openpype/modules/{ => default_modules}/clockify/clockify_api.py (100%) rename openpype/modules/{ => default_modules}/clockify/clockify_module.py (100%) rename openpype/modules/{ => default_modules}/clockify/constants.py (100%) rename openpype/modules/{ => default_modules}/clockify/ftrack/server/action_clockify_sync_server.py (98%) rename openpype/modules/{ => default_modules}/clockify/ftrack/user/action_clockify_sync_local.py (98%) rename openpype/modules/{ => default_modules}/clockify/launcher_actions/ClockifyStart.py (100%) rename openpype/modules/{ => default_modules}/clockify/launcher_actions/ClockifySync.py (100%) rename openpype/modules/{ => default_modules}/clockify/widgets.py (100%) diff --git a/openpype/modules/clockify/__init__.py b/openpype/modules/default_modules/clockify/__init__.py similarity index 100% rename from openpype/modules/clockify/__init__.py rename to openpype/modules/default_modules/clockify/__init__.py diff --git a/openpype/modules/clockify/clockify_api.py b/openpype/modules/default_modules/clockify/clockify_api.py similarity index 100% rename from openpype/modules/clockify/clockify_api.py rename to openpype/modules/default_modules/clockify/clockify_api.py diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/default_modules/clockify/clockify_module.py similarity index 100% rename from openpype/modules/clockify/clockify_module.py rename to openpype/modules/default_modules/clockify/clockify_module.py diff --git a/openpype/modules/clockify/constants.py b/openpype/modules/default_modules/clockify/constants.py similarity index 100% rename from openpype/modules/clockify/constants.py rename to openpype/modules/default_modules/clockify/constants.py diff --git a/openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py similarity index 98% rename from openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py rename to openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py index 495f87dc7e6..8379414c0cf 100644 --- a/openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py +++ b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py @@ -1,6 +1,6 @@ import os import json -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction from openpype.modules.clockify.clockify_api import ClockifyAPI diff --git a/openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py similarity index 98% rename from openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py rename to openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py index 4f4579a8bff..3d55ee92b6f 100644 --- a/openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py +++ b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.modules.clockify.clockify_api import ClockifyAPI diff --git a/openpype/modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py similarity index 100% rename from openpype/modules/clockify/launcher_actions/ClockifyStart.py rename to openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py diff --git a/openpype/modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py similarity index 100% rename from openpype/modules/clockify/launcher_actions/ClockifySync.py rename to openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py diff --git a/openpype/modules/clockify/widgets.py b/openpype/modules/default_modules/clockify/widgets.py similarity index 100% rename from openpype/modules/clockify/widgets.py rename to openpype/modules/default_modules/clockify/widgets.py From 68b1183d815e4d66867024e0f8b6cf49016c276b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:05:57 +0200 Subject: [PATCH 29/77] moved avalon apps module --- .../modules/{ => default_modules}/avalon_apps/__init__.py | 0 .../modules/{ => default_modules}/avalon_apps/avalon_app.py | 0 .../modules/{ => default_modules}/avalon_apps/rest_api.py | 5 +---- 3 files changed, 1 insertion(+), 4 deletions(-) rename openpype/modules/{ => default_modules}/avalon_apps/__init__.py (100%) rename openpype/modules/{ => default_modules}/avalon_apps/avalon_app.py (100%) rename openpype/modules/{ => default_modules}/avalon_apps/rest_api.py (97%) diff --git a/openpype/modules/avalon_apps/__init__.py b/openpype/modules/default_modules/avalon_apps/__init__.py similarity index 100% rename from openpype/modules/avalon_apps/__init__.py rename to openpype/modules/default_modules/avalon_apps/__init__.py diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/default_modules/avalon_apps/avalon_app.py similarity index 100% rename from openpype/modules/avalon_apps/avalon_app.py rename to openpype/modules/default_modules/avalon_apps/avalon_app.py diff --git a/openpype/modules/avalon_apps/rest_api.py b/openpype/modules/default_modules/avalon_apps/rest_api.py similarity index 97% rename from openpype/modules/avalon_apps/rest_api.py rename to openpype/modules/default_modules/avalon_apps/rest_api.py index b77c256398d..533050fc0c6 100644 --- a/openpype/modules/avalon_apps/rest_api.py +++ b/openpype/modules/default_modules/avalon_apps/rest_api.py @@ -1,16 +1,13 @@ import os -import re import json import datetime -import bson from bson.objectid import ObjectId -import bson.json_util from aiohttp.web_response import Response from avalon.api import AvalonMongoDB -from openpype.modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webserver.base_routes import RestApiEndpoint class _RestApiEndpoint(RestApiEndpoint): From c5798467e23f8551701e33ff14117c85c032f86d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:06:26 +0200 Subject: [PATCH 30/77] moved deadline module --- openpype/modules/{ => default_modules}/deadline/__init__.py | 0 .../modules/{ => default_modules}/deadline/deadline_module.py | 0 .../deadline/plugins/publish/submit_aftereffects_deadline.py | 0 .../deadline/plugins/publish/submit_harmony_deadline.py | 0 .../deadline/plugins/publish/submit_maya_deadline.py | 0 .../deadline/plugins/publish/submit_nuke_deadline.py | 0 .../deadline/plugins/publish/submit_publish_job.py | 0 .../deadline/plugins/publish/validate_deadline_connection.py | 0 .../plugins/publish/validate_expected_and_rendered_files.py | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/deadline/__init__.py (100%) rename openpype/modules/{ => default_modules}/deadline/deadline_module.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_aftereffects_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_harmony_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_maya_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_nuke_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_publish_job.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/validate_deadline_connection.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/validate_expected_and_rendered_files.py (100%) diff --git a/openpype/modules/deadline/__init__.py b/openpype/modules/default_modules/deadline/__init__.py similarity index 100% rename from openpype/modules/deadline/__init__.py rename to openpype/modules/default_modules/deadline/__init__.py diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/default_modules/deadline/deadline_module.py similarity index 100% rename from openpype/modules/deadline/deadline_module.py rename to openpype/modules/default_modules/deadline/deadline_module.py diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_aftereffects_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_aftereffects_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_harmony_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_harmony_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_maya_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_maya_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_maya_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_publish_job.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py b/openpype/modules/default_modules/deadline/plugins/publish/validate_deadline_connection.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/validate_deadline_connection.py rename to openpype/modules/default_modules/deadline/plugins/publish/validate_deadline_connection.py diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/default_modules/deadline/plugins/publish/validate_expected_and_rendered_files.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py rename to openpype/modules/default_modules/deadline/plugins/publish/validate_expected_and_rendered_files.py From 7b5ef747e9d1e69f66bcc8e4b775d4f813e600ee Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:07:48 +0200 Subject: [PATCH 31/77] moved webserver module --- openpype/modules/{ => default_modules}/webserver/__init__.py | 0 openpype/modules/{ => default_modules}/webserver/base_routes.py | 0 .../{ => default_modules}/webserver/host_console_listener.py | 0 openpype/modules/{ => default_modules}/webserver/interfaces.py | 0 openpype/modules/{ => default_modules}/webserver/server.py | 0 .../modules/{ => default_modules}/webserver/webserver_module.py | 2 +- 6 files changed, 1 insertion(+), 1 deletion(-) rename openpype/modules/{ => default_modules}/webserver/__init__.py (100%) rename openpype/modules/{ => default_modules}/webserver/base_routes.py (100%) rename openpype/modules/{ => default_modules}/webserver/host_console_listener.py (100%) rename openpype/modules/{ => default_modules}/webserver/interfaces.py (100%) rename openpype/modules/{ => default_modules}/webserver/server.py (100%) rename openpype/modules/{ => default_modules}/webserver/webserver_module.py (98%) diff --git a/openpype/modules/webserver/__init__.py b/openpype/modules/default_modules/webserver/__init__.py similarity index 100% rename from openpype/modules/webserver/__init__.py rename to openpype/modules/default_modules/webserver/__init__.py diff --git a/openpype/modules/webserver/base_routes.py b/openpype/modules/default_modules/webserver/base_routes.py similarity index 100% rename from openpype/modules/webserver/base_routes.py rename to openpype/modules/default_modules/webserver/base_routes.py diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/default_modules/webserver/host_console_listener.py similarity index 100% rename from openpype/modules/webserver/host_console_listener.py rename to openpype/modules/default_modules/webserver/host_console_listener.py diff --git a/openpype/modules/webserver/interfaces.py b/openpype/modules/default_modules/webserver/interfaces.py similarity index 100% rename from openpype/modules/webserver/interfaces.py rename to openpype/modules/default_modules/webserver/interfaces.py diff --git a/openpype/modules/webserver/server.py b/openpype/modules/default_modules/webserver/server.py similarity index 100% rename from openpype/modules/webserver/server.py rename to openpype/modules/default_modules/webserver/server.py diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py similarity index 98% rename from openpype/modules/webserver/webserver_module.py rename to openpype/modules/default_modules/webserver/webserver_module.py index 192baad013a..f81bf52410e 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -50,7 +50,7 @@ def _add_resources_statics(self): ) def _add_listeners(self): - from openpype.modules.webserver import host_console_listener + from openpype_modules.webserver import host_console_listener self._host_listener = host_console_listener.HostListener( self.server_manager, self From 6291e01003ce072fd37639688c4febcc35dc5397 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:10:34 +0200 Subject: [PATCH 32/77] moved idle manager module --- openpype/modules/{ => default_modules}/idle_manager/__init__.py | 0 .../modules/{ => default_modules}/idle_manager/idle_module.py | 0 .../modules/{ => default_modules}/idle_manager/idle_threads.py | 0 openpype/modules/{ => default_modules}/idle_manager/interfaces.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/idle_manager/__init__.py (100%) rename openpype/modules/{ => default_modules}/idle_manager/idle_module.py (100%) rename openpype/modules/{ => default_modules}/idle_manager/idle_threads.py (100%) rename openpype/modules/{ => default_modules}/idle_manager/interfaces.py (100%) diff --git a/openpype/modules/idle_manager/__init__.py b/openpype/modules/default_modules/idle_manager/__init__.py similarity index 100% rename from openpype/modules/idle_manager/__init__.py rename to openpype/modules/default_modules/idle_manager/__init__.py diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/default_modules/idle_manager/idle_module.py similarity index 100% rename from openpype/modules/idle_manager/idle_module.py rename to openpype/modules/default_modules/idle_manager/idle_module.py diff --git a/openpype/modules/idle_manager/idle_threads.py b/openpype/modules/default_modules/idle_manager/idle_threads.py similarity index 100% rename from openpype/modules/idle_manager/idle_threads.py rename to openpype/modules/default_modules/idle_manager/idle_threads.py diff --git a/openpype/modules/idle_manager/interfaces.py b/openpype/modules/default_modules/idle_manager/interfaces.py similarity index 100% rename from openpype/modules/idle_manager/interfaces.py rename to openpype/modules/default_modules/idle_manager/interfaces.py From 3468a9a58be61109a4a84f0095b410af01bc38e9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:11:15 +0200 Subject: [PATCH 33/77] moved muster module --- openpype/modules/{ => default_modules}/muster/__init__.py | 0 openpype/modules/{ => default_modules}/muster/muster.py | 0 openpype/modules/{ => default_modules}/muster/rest_api.py | 0 openpype/modules/{ => default_modules}/muster/widget_login.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/muster/__init__.py (100%) rename openpype/modules/{ => default_modules}/muster/muster.py (100%) rename openpype/modules/{ => default_modules}/muster/rest_api.py (100%) rename openpype/modules/{ => default_modules}/muster/widget_login.py (100%) diff --git a/openpype/modules/muster/__init__.py b/openpype/modules/default_modules/muster/__init__.py similarity index 100% rename from openpype/modules/muster/__init__.py rename to openpype/modules/default_modules/muster/__init__.py diff --git a/openpype/modules/muster/muster.py b/openpype/modules/default_modules/muster/muster.py similarity index 100% rename from openpype/modules/muster/muster.py rename to openpype/modules/default_modules/muster/muster.py diff --git a/openpype/modules/muster/rest_api.py b/openpype/modules/default_modules/muster/rest_api.py similarity index 100% rename from openpype/modules/muster/rest_api.py rename to openpype/modules/default_modules/muster/rest_api.py diff --git a/openpype/modules/muster/widget_login.py b/openpype/modules/default_modules/muster/widget_login.py similarity index 100% rename from openpype/modules/muster/widget_login.py rename to openpype/modules/default_modules/muster/widget_login.py From fca039fdfae21eb29eb55f0ef7bdc20cd7c67ea5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:11:43 +0200 Subject: [PATCH 34/77] moved settings module --- .../modules/{ => default_modules}/settings_module/__init__.py | 0 .../modules/{ => default_modules}/settings_module/interfaces.py | 0 .../{ => default_modules}/settings_module/settings_action.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/settings_module/__init__.py (100%) rename openpype/modules/{ => default_modules}/settings_module/interfaces.py (100%) rename openpype/modules/{ => default_modules}/settings_module/settings_action.py (100%) diff --git a/openpype/modules/settings_module/__init__.py b/openpype/modules/default_modules/settings_module/__init__.py similarity index 100% rename from openpype/modules/settings_module/__init__.py rename to openpype/modules/default_modules/settings_module/__init__.py diff --git a/openpype/modules/settings_module/interfaces.py b/openpype/modules/default_modules/settings_module/interfaces.py similarity index 100% rename from openpype/modules/settings_module/interfaces.py rename to openpype/modules/default_modules/settings_module/interfaces.py diff --git a/openpype/modules/settings_module/settings_action.py b/openpype/modules/default_modules/settings_module/settings_action.py similarity index 100% rename from openpype/modules/settings_module/settings_action.py rename to openpype/modules/default_modules/settings_module/settings_action.py From 6fb0d1fece0dc15f630313c8af1e88fc0a163191 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:12:16 +0200 Subject: [PATCH 35/77] moved timers manager module --- openpype/modules/{ => default_modules}/timers_manager/__init__.py | 0 .../modules/{ => default_modules}/timers_manager/interfaces.py | 0 openpype/modules/{ => default_modules}/timers_manager/rest_api.py | 0 .../{ => default_modules}/timers_manager/timers_manager.py | 0 .../{ => default_modules}/timers_manager/widget_user_idle.py | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/timers_manager/__init__.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/interfaces.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/rest_api.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/timers_manager.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/widget_user_idle.py (100%) diff --git a/openpype/modules/timers_manager/__init__.py b/openpype/modules/default_modules/timers_manager/__init__.py similarity index 100% rename from openpype/modules/timers_manager/__init__.py rename to openpype/modules/default_modules/timers_manager/__init__.py diff --git a/openpype/modules/timers_manager/interfaces.py b/openpype/modules/default_modules/timers_manager/interfaces.py similarity index 100% rename from openpype/modules/timers_manager/interfaces.py rename to openpype/modules/default_modules/timers_manager/interfaces.py diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/default_modules/timers_manager/rest_api.py similarity index 100% rename from openpype/modules/timers_manager/rest_api.py rename to openpype/modules/default_modules/timers_manager/rest_api.py diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py similarity index 100% rename from openpype/modules/timers_manager/timers_manager.py rename to openpype/modules/default_modules/timers_manager/timers_manager.py diff --git a/openpype/modules/timers_manager/widget_user_idle.py b/openpype/modules/default_modules/timers_manager/widget_user_idle.py similarity index 100% rename from openpype/modules/timers_manager/widget_user_idle.py rename to openpype/modules/default_modules/timers_manager/widget_user_idle.py From 224273c1f11ef1749a8d4d780756d54323de822d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:13:16 +0200 Subject: [PATCH 36/77] moved sync server module --- .../{ => default_modules}/sync_server/README.md | 0 .../{ => default_modules}/sync_server/__init__.py | 0 .../sync_server/providers/__init__.py | 0 .../sync_server/providers/abstract_provider.py | 0 .../sync_server/providers/gdrive.py | 0 .../sync_server/providers/lib.py | 0 .../sync_server/providers/local_drive.py | 0 .../sync_server/providers/resources/folder.png | Bin .../sync_server/providers/resources/gdrive.png | Bin .../sync_server/providers/resources/local_drive.png | Bin .../sync_server/providers/resources/studio.png | Bin .../sync_server/resources/paused.png | Bin .../sync_server/resources/synced.png | Bin .../sync_server/sync_server.py | 0 .../sync_server/sync_server_module.py | 0 .../{ => default_modules}/sync_server/tray/app.py | 0 .../sync_server/tray/delegates.py | 0 .../{ => default_modules}/sync_server/tray/lib.py | 0 .../sync_server/tray/models.py | 0 .../sync_server/tray/widgets.py | 0 .../{ => default_modules}/sync_server/utils.py | 0 21 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/sync_server/README.md (100%) rename openpype/modules/{ => default_modules}/sync_server/__init__.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/__init__.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/abstract_provider.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/gdrive.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/lib.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/local_drive.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/folder.png (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/gdrive.png (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/local_drive.png (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/studio.png (100%) rename openpype/modules/{ => default_modules}/sync_server/resources/paused.png (100%) rename openpype/modules/{ => default_modules}/sync_server/resources/synced.png (100%) rename openpype/modules/{ => default_modules}/sync_server/sync_server.py (100%) rename openpype/modules/{ => default_modules}/sync_server/sync_server_module.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/app.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/delegates.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/lib.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/models.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/widgets.py (100%) rename openpype/modules/{ => default_modules}/sync_server/utils.py (100%) diff --git a/openpype/modules/sync_server/README.md b/openpype/modules/default_modules/sync_server/README.md similarity index 100% rename from openpype/modules/sync_server/README.md rename to openpype/modules/default_modules/sync_server/README.md diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/default_modules/sync_server/__init__.py similarity index 100% rename from openpype/modules/sync_server/__init__.py rename to openpype/modules/default_modules/sync_server/__init__.py diff --git a/openpype/modules/sync_server/providers/__init__.py b/openpype/modules/default_modules/sync_server/providers/__init__.py similarity index 100% rename from openpype/modules/sync_server/providers/__init__.py rename to openpype/modules/default_modules/sync_server/providers/__init__.py diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/default_modules/sync_server/providers/abstract_provider.py similarity index 100% rename from openpype/modules/sync_server/providers/abstract_provider.py rename to openpype/modules/default_modules/sync_server/providers/abstract_provider.py diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/default_modules/sync_server/providers/gdrive.py similarity index 100% rename from openpype/modules/sync_server/providers/gdrive.py rename to openpype/modules/default_modules/sync_server/providers/gdrive.py diff --git a/openpype/modules/sync_server/providers/lib.py b/openpype/modules/default_modules/sync_server/providers/lib.py similarity index 100% rename from openpype/modules/sync_server/providers/lib.py rename to openpype/modules/default_modules/sync_server/providers/lib.py diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/default_modules/sync_server/providers/local_drive.py similarity index 100% rename from openpype/modules/sync_server/providers/local_drive.py rename to openpype/modules/default_modules/sync_server/providers/local_drive.py diff --git a/openpype/modules/sync_server/providers/resources/folder.png b/openpype/modules/default_modules/sync_server/providers/resources/folder.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/folder.png rename to openpype/modules/default_modules/sync_server/providers/resources/folder.png diff --git a/openpype/modules/sync_server/providers/resources/gdrive.png b/openpype/modules/default_modules/sync_server/providers/resources/gdrive.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/gdrive.png rename to openpype/modules/default_modules/sync_server/providers/resources/gdrive.png diff --git a/openpype/modules/sync_server/providers/resources/local_drive.png b/openpype/modules/default_modules/sync_server/providers/resources/local_drive.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/local_drive.png rename to openpype/modules/default_modules/sync_server/providers/resources/local_drive.png diff --git a/openpype/modules/sync_server/providers/resources/studio.png b/openpype/modules/default_modules/sync_server/providers/resources/studio.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/studio.png rename to openpype/modules/default_modules/sync_server/providers/resources/studio.png diff --git a/openpype/modules/sync_server/resources/paused.png b/openpype/modules/default_modules/sync_server/resources/paused.png similarity index 100% rename from openpype/modules/sync_server/resources/paused.png rename to openpype/modules/default_modules/sync_server/resources/paused.png diff --git a/openpype/modules/sync_server/resources/synced.png b/openpype/modules/default_modules/sync_server/resources/synced.png similarity index 100% rename from openpype/modules/sync_server/resources/synced.png rename to openpype/modules/default_modules/sync_server/resources/synced.png diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/default_modules/sync_server/sync_server.py similarity index 100% rename from openpype/modules/sync_server/sync_server.py rename to openpype/modules/default_modules/sync_server/sync_server.py diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/default_modules/sync_server/sync_server_module.py similarity index 100% rename from openpype/modules/sync_server/sync_server_module.py rename to openpype/modules/default_modules/sync_server/sync_server_module.py diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/default_modules/sync_server/tray/app.py similarity index 100% rename from openpype/modules/sync_server/tray/app.py rename to openpype/modules/default_modules/sync_server/tray/app.py diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/default_modules/sync_server/tray/delegates.py similarity index 100% rename from openpype/modules/sync_server/tray/delegates.py rename to openpype/modules/default_modules/sync_server/tray/delegates.py diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/default_modules/sync_server/tray/lib.py similarity index 100% rename from openpype/modules/sync_server/tray/lib.py rename to openpype/modules/default_modules/sync_server/tray/lib.py diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/default_modules/sync_server/tray/models.py similarity index 100% rename from openpype/modules/sync_server/tray/models.py rename to openpype/modules/default_modules/sync_server/tray/models.py diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/default_modules/sync_server/tray/widgets.py similarity index 100% rename from openpype/modules/sync_server/tray/widgets.py rename to openpype/modules/default_modules/sync_server/tray/widgets.py diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/default_modules/sync_server/utils.py similarity index 100% rename from openpype/modules/sync_server/utils.py rename to openpype/modules/default_modules/sync_server/utils.py From a2887d9023eb912680b9d74cfdb898b2752a2684 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:13:31 +0200 Subject: [PATCH 37/77] moved standalone publish action --- .../modules/{ => default_modules}/standalonepublish_action.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/standalonepublish_action.py (100%) diff --git a/openpype/modules/standalonepublish_action.py b/openpype/modules/default_modules/standalonepublish_action.py similarity index 100% rename from openpype/modules/standalonepublish_action.py rename to openpype/modules/default_modules/standalonepublish_action.py From 8f79bac4234bbcba16578a596aa1a571e8f149ea Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:13:54 +0200 Subject: [PATCH 38/77] moved log viewer module --- openpype/modules/{ => default_modules}/log_viewer/__init__.py | 0 .../modules/{ => default_modules}/log_viewer/log_view_module.py | 0 .../modules/{ => default_modules}/log_viewer/tray/__init__.py | 0 openpype/modules/{ => default_modules}/log_viewer/tray/app.py | 0 openpype/modules/{ => default_modules}/log_viewer/tray/models.py | 0 openpype/modules/{ => default_modules}/log_viewer/tray/widgets.py | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/log_viewer/__init__.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/log_view_module.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/__init__.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/app.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/models.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/widgets.py (100%) diff --git a/openpype/modules/log_viewer/__init__.py b/openpype/modules/default_modules/log_viewer/__init__.py similarity index 100% rename from openpype/modules/log_viewer/__init__.py rename to openpype/modules/default_modules/log_viewer/__init__.py diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/default_modules/log_viewer/log_view_module.py similarity index 100% rename from openpype/modules/log_viewer/log_view_module.py rename to openpype/modules/default_modules/log_viewer/log_view_module.py diff --git a/openpype/modules/log_viewer/tray/__init__.py b/openpype/modules/default_modules/log_viewer/tray/__init__.py similarity index 100% rename from openpype/modules/log_viewer/tray/__init__.py rename to openpype/modules/default_modules/log_viewer/tray/__init__.py diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/default_modules/log_viewer/tray/app.py similarity index 100% rename from openpype/modules/log_viewer/tray/app.py rename to openpype/modules/default_modules/log_viewer/tray/app.py diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/default_modules/log_viewer/tray/models.py similarity index 100% rename from openpype/modules/log_viewer/tray/models.py rename to openpype/modules/default_modules/log_viewer/tray/models.py diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/default_modules/log_viewer/tray/widgets.py similarity index 100% rename from openpype/modules/log_viewer/tray/widgets.py rename to openpype/modules/default_modules/log_viewer/tray/widgets.py From e42b03c0c48193edd52a5a013c8fb98a8d1b33ef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:14:12 +0200 Subject: [PATCH 39/77] moved project manager action --- openpype/modules/{ => default_modules}/project_manager_action.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/project_manager_action.py (100%) diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/default_modules/project_manager_action.py similarity index 100% rename from openpype/modules/project_manager_action.py rename to openpype/modules/default_modules/project_manager_action.py From 3259929658bd39601a8c4ab77554db7bc9b4e936 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:15:42 +0200 Subject: [PATCH 40/77] moved launcher action module --- openpype/modules/{ => default_modules}/launcher_action.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/launcher_action.py (100%) diff --git a/openpype/modules/launcher_action.py b/openpype/modules/default_modules/launcher_action.py similarity index 100% rename from openpype/modules/launcher_action.py rename to openpype/modules/default_modules/launcher_action.py From f25e242df9305ed3191ae82ef3e3396de17a766e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:16:06 +0200 Subject: [PATCH 41/77] moved default interfaces --- openpype/modules/{ => default_modules}/interfaces.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/interfaces.py (100%) diff --git a/openpype/modules/interfaces.py b/openpype/modules/default_modules/interfaces.py similarity index 100% rename from openpype/modules/interfaces.py rename to openpype/modules/default_modules/interfaces.py From df5434e895370fcee4d0b1c60dce8bceac5aa57a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:16:39 +0200 Subject: [PATCH 42/77] moved slack module --- .../modules/{ => default_modules}/slack/README.md | 0 .../modules/{ => default_modules}/slack/__init__.py | 0 .../slack/launch_hooks/pre_python2_vendor.py | 0 .../{ => default_modules}/slack/manifest.yml | 0 .../slack/plugins/publish/collect_slack_family.py | 0 .../slack/plugins/publish/integrate_slack_api.py | 0 .../python2_vendor/python-slack-sdk-1/.appveyor.yml | 0 .../python2_vendor/python-slack-sdk-1/.coveragerc | 0 .../slack/python2_vendor/python-slack-sdk-1/.flake8 | 0 .../python-slack-sdk-1/.github/contributing.md | 0 .../python-slack-sdk-1/.github/issue_template.md | 0 .../python-slack-sdk-1/.github/maintainers_guide.md | 0 .../.github/pull_request_template.md | 0 .../python2_vendor/python-slack-sdk-1/.gitignore | 0 .../python2_vendor/python-slack-sdk-1/.travis.yml | 0 .../slack/python2_vendor/python-slack-sdk-1/LICENSE | 0 .../python2_vendor/python-slack-sdk-1/MANIFEST.in | 0 .../python2_vendor/python-slack-sdk-1/README.rst | 0 .../python-slack-sdk-1/docs-src/.gitignore | 0 .../python-slack-sdk-1/docs-src/Makefile | 0 .../docs-src/_themes/slack/conf.py | 0 .../docs-src/_themes/slack/layout.html | 0 .../docs-src/_themes/slack/localtoc.html | 0 .../docs-src/_themes/slack/relations.html | 0 .../docs-src/_themes/slack/sidebar.html | 0 .../docs-src/_themes/slack/static/default.css_t | 0 .../docs-src/_themes/slack/static/docs.css_t | 0 .../docs-src/_themes/slack/static/pygments.css_t | 0 .../docs-src/_themes/slack/theme.conf | 0 .../python-slack-sdk-1/docs-src/about.rst | 0 .../python-slack-sdk-1/docs-src/auth.rst | 0 .../python-slack-sdk-1/docs-src/basic_usage.rst | 0 .../python-slack-sdk-1/docs-src/changelog.rst | 0 .../python-slack-sdk-1/docs-src/conf.py | 0 .../python-slack-sdk-1/docs-src/conversations.rst | 0 .../python-slack-sdk-1/docs-src/faq.rst | 0 .../python-slack-sdk-1/docs-src/index.rst | 0 .../python-slack-sdk-1/docs-src/make.bat | 0 .../python-slack-sdk-1/docs-src/metadata.rst | 0 .../docs-src/real_time_messaging.rst | 0 .../slack/python2_vendor/python-slack-sdk-1/docs.sh | 0 .../python-slack-sdk-1/docs/.buildinfo | 0 .../python-slack-sdk-1/docs/.nojekyll | 0 .../python-slack-sdk-1/docs/_static/ajax-loader.gif | Bin .../python-slack-sdk-1/docs/_static/basic.css | 0 .../python-slack-sdk-1/docs/_static/classic.css | 0 .../docs/_static/comment-bright.png | Bin .../docs/_static/comment-close.png | Bin .../python-slack-sdk-1/docs/_static/comment.png | Bin .../python-slack-sdk-1/docs/_static/default.css | 0 .../python-slack-sdk-1/docs/_static/docs.css | 0 .../python-slack-sdk-1/docs/_static/doctools.js | 0 .../docs/_static/documentation_options.js | 0 .../docs/_static/down-pressed.png | Bin .../python-slack-sdk-1/docs/_static/down.png | Bin .../python-slack-sdk-1/docs/_static/file.png | Bin .../python-slack-sdk-1/docs/_static/jquery-3.2.1.js | 0 .../python-slack-sdk-1/docs/_static/jquery.js | 0 .../docs/_static/language_data.js | 0 .../python-slack-sdk-1/docs/_static/minus.png | Bin .../python-slack-sdk-1/docs/_static/plus.png | Bin .../python-slack-sdk-1/docs/_static/pygments.css | 0 .../python-slack-sdk-1/docs/_static/searchtools.js | 0 .../python-slack-sdk-1/docs/_static/sidebar.js | 0 .../docs/_static/underscore-1.3.1.js | 0 .../python-slack-sdk-1/docs/_static/underscore.js | 0 .../python-slack-sdk-1/docs/_static/up-pressed.png | Bin .../python-slack-sdk-1/docs/_static/up.png | Bin .../python-slack-sdk-1/docs/_static/websupport.js | 0 .../python-slack-sdk-1/docs/about.html | 0 .../python-slack-sdk-1/docs/auth.html | 0 .../python-slack-sdk-1/docs/basic_usage.html | 0 .../python-slack-sdk-1/docs/changelog.html | 0 .../python-slack-sdk-1/docs/conversations.html | 0 .../python2_vendor/python-slack-sdk-1/docs/faq.html | 0 .../python-slack-sdk-1/docs/genindex.html | 0 .../python-slack-sdk-1/docs/index.html | 0 .../python-slack-sdk-1/docs/metadata.html | 0 .../python-slack-sdk-1/docs/objects.inv | 0 .../docs/real_time_messaging.html | 0 .../python-slack-sdk-1/docs/search.html | 0 .../python-slack-sdk-1/docs/searchindex.js | 0 .../python-slack-sdk-1/requirements.txt | 0 .../python2_vendor/python-slack-sdk-1/setup.cfg | 0 .../python2_vendor/python-slack-sdk-1/setup.py | 0 .../python-slack-sdk-1/slackclient/__init__.py | 0 .../python-slack-sdk-1/slackclient/channel.py | 0 .../python-slack-sdk-1/slackclient/client.py | 0 .../python-slack-sdk-1/slackclient/exceptions.py | 0 .../python-slack-sdk-1/slackclient/im.py | 0 .../python-slack-sdk-1/slackclient/server.py | 0 .../python-slack-sdk-1/slackclient/slackrequest.py | 0 .../python-slack-sdk-1/slackclient/user.py | 0 .../python-slack-sdk-1/slackclient/util.py | 0 .../python-slack-sdk-1/slackclient/version.py | 0 .../python-slack-sdk-1/test_requirements.txt | 0 .../python-slack-sdk-1/tests/conftest.py | 0 .../tests/data/channel.created.json | 0 .../python-slack-sdk-1/tests/data/im.created.json | 0 .../python-slack-sdk-1/tests/data/rtm.start.json | 0 .../python-slack-sdk-1/tests/data/slack_logo.png | Bin .../python-slack-sdk-1/tests/test_channel.py | 0 .../python-slack-sdk-1/tests/test_server.py | 0 .../python-slack-sdk-1/tests/test_slackclient.py | 0 .../python-slack-sdk-1/tests/test_slackrequest.py | 0 .../slack/python2_vendor/python-slack-sdk-1/tox.ini | 0 .../{ => default_modules}/slack/slack_module.py | 0 107 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/slack/README.md (100%) rename openpype/modules/{ => default_modules}/slack/__init__.py (100%) rename openpype/modules/{ => default_modules}/slack/launch_hooks/pre_python2_vendor.py (100%) rename openpype/modules/{ => default_modules}/slack/manifest.yml (100%) rename openpype/modules/{ => default_modules}/slack/plugins/publish/collect_slack_family.py (100%) rename openpype/modules/{ => default_modules}/slack/plugins/publish/integrate_slack_api.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.coveragerc (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.flake8 (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.gitignore (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.travis.yml (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/LICENSE (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/README.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs.sh (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/about.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/auth.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/faq.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/index.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/search.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/requirements.txt (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/setup.cfg (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/setup.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tox.ini (100%) rename openpype/modules/{ => default_modules}/slack/slack_module.py (100%) diff --git a/openpype/modules/slack/README.md b/openpype/modules/default_modules/slack/README.md similarity index 100% rename from openpype/modules/slack/README.md rename to openpype/modules/default_modules/slack/README.md diff --git a/openpype/modules/slack/__init__.py b/openpype/modules/default_modules/slack/__init__.py similarity index 100% rename from openpype/modules/slack/__init__.py rename to openpype/modules/default_modules/slack/__init__.py diff --git a/openpype/modules/slack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py similarity index 100% rename from openpype/modules/slack/launch_hooks/pre_python2_vendor.py rename to openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py diff --git a/openpype/modules/slack/manifest.yml b/openpype/modules/default_modules/slack/manifest.yml similarity index 100% rename from openpype/modules/slack/manifest.yml rename to openpype/modules/default_modules/slack/manifest.yml diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/default_modules/slack/plugins/publish/collect_slack_family.py similarity index 100% rename from openpype/modules/slack/plugins/publish/collect_slack_family.py rename to openpype/modules/default_modules/slack/plugins/publish/collect_slack_family.py diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/default_modules/slack/plugins/publish/integrate_slack_api.py similarity index 100% rename from openpype/modules/slack/plugins/publish/integrate_slack_api.py rename to openpype/modules/default_modules/slack/plugins/publish/integrate_slack_api.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.flake8 b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.flake8 similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.flake8 rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.flake8 diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.gitignore b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.gitignore similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.gitignore rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.gitignore diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/LICENSE b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/LICENSE similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/LICENSE rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/LICENSE diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/README.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/README.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/README.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/README.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs.sh b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs.sh similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs.sh rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs.sh diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tox.ini b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tox.ini similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tox.ini rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tox.ini diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/default_modules/slack/slack_module.py similarity index 100% rename from openpype/modules/slack/slack_module.py rename to openpype/modules/default_modules/slack/slack_module.py From 3406c47fda3efa19f4f312cf412df99495c8803d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:19:35 +0200 Subject: [PATCH 43/77] moved ftrack module --- .../{ => default_modules}/ftrack/__init__.py | 0 .../action_clone_review_session.py | 2 +- .../action_multiple_notes.py | 2 +- .../action_prepare_project.py | 2 +- .../action_push_frame_values_to_task.py | 2 +- .../action_sync_to_avalon.py | 4 +- .../event_del_avalon_id_from_new.py | 6 +- .../event_first_version_status.py | 2 +- .../event_next_task_update.py | 2 +- .../event_push_frame_values_to_task.py | 2 +- .../event_radio_buttons.py | 2 +- .../event_sync_to_avalon.py | 2 +- .../event_task_to_parent_status.py | 2 +- .../event_task_to_version_status.py | 2 +- .../event_thumbnail_updates.py | 2 +- .../event_user_assigment.py | 4 +- .../event_version_to_task_statuses.py | 2 +- .../action_applications.py | 2 +- .../action_batch_task_creation.py | 2 +- .../action_clean_hierarchical_attributes.py | 2 +- .../action_client_review_sort.py | 2 +- .../action_component_open.py | 2 +- .../action_create_cust_attrs.py | 2 +- .../action_create_folders.py | 2 +- .../action_create_project_structure.py | 2 +- .../action_delete_asset.py | 2 +- .../action_delete_old_versions.py | 2 +- .../event_handlers_user/action_delivery.py | 4 +- .../event_handlers_user/action_djvview.py | 2 +- .../event_handlers_user/action_job_killer.py | 2 +- .../action_multiple_notes.py | 2 +- .../action_prepare_project.py | 2 +- .../ftrack/event_handlers_user/action_rv.py | 2 +- .../ftrack/event_handlers_user/action_seed.py | 2 +- .../action_store_thumbnails_to_avalon.py | 4 +- .../action_sync_to_avalon.py | 4 +- .../ftrack/event_handlers_user/action_test.py | 2 +- .../action_thumbnail_to_childern.py | 2 +- .../action_thumbnail_to_parent.py | 2 +- .../action_where_run_ask.py | 2 +- .../action_where_run_show.py | 2 +- .../ftrack/ftrack_module.py | 2 +- .../ftrack/ftrack_server/__init__.py | 0 .../ftrack/ftrack_server/event_server_cli.py | 8 +- .../ftrack/ftrack_server/ftrack_server.py | 0 .../ftrack/ftrack_server/lib.py | 2 +- .../ftrack/ftrack_server/socket_thread.py | 0 .../ftrack/interfaces.py | 0 .../launch_hooks/post_ftrack_changes.py | 0 .../ftrack/launch_hooks/pre_python2_vendor.py | 2 +- .../ftrack/lib/__init__.py | 0 .../ftrack/lib/avalon_sync.py | 0 .../ftrack/lib/constants.py | 0 .../ftrack/lib/credentials.py | 0 .../ftrack/lib/custom_attributes.json | 0 .../ftrack/lib/custom_attributes.py | 0 .../ftrack/lib/ftrack_action_handler.py | 0 .../ftrack/lib/ftrack_base_handler.py | 2 +- .../ftrack/lib/ftrack_event_handler.py | 0 .../ftrack/lib/settings.py | 0 .../integrate_ftrack_comments.py | 0 .../plugins/publish/collect_ftrack_api.py | 0 .../plugins/publish/collect_ftrack_family.py | 0 .../plugins/publish/integrate_ftrack_api.py | 0 .../integrate_ftrack_component_overwrite.py | 0 .../publish/integrate_ftrack_instances.py | 0 .../plugins/publish/integrate_ftrack_note.py | 0 .../publish/integrate_hierarchy_ftrack.py | 2 +- .../publish/integrate_remove_components.py | 0 .../validate_custom_ftrack_attributes.py | 0 .../python2_vendor/arrow/.github/FUNDING.yml | 1 + .../.github/ISSUE_TEMPLATE/bug_report.md | 27 + .../.github/ISSUE_TEMPLATE/documentation.md | 17 + .../.github/ISSUE_TEMPLATE/feature_request.md | 17 + .../arrow/.github/pull_request_template.md | 22 + .../workflows/continuous_integration.yml | 123 + .../ftrack/python2_vendor/arrow/.gitignore | 211 + .../arrow/.pre-commit-config.yaml | 41 + .../ftrack/python2_vendor/arrow/CHANGELOG.rst | 598 +++ .../ftrack/python2_vendor/arrow/LICENSE | 201 + .../ftrack/python2_vendor/arrow/MANIFEST.in | 3 + .../ftrack/python2_vendor/arrow/Makefile | 44 + .../ftrack/python2_vendor/arrow/README.rst | 133 + .../python2_vendor/arrow/arrow/__init__.py | 18 + .../python2_vendor/arrow/arrow/_version.py | 1 + .../ftrack/python2_vendor/arrow/arrow/api.py | 54 + .../python2_vendor/arrow/arrow/arrow.py | 1584 ++++++ .../python2_vendor/arrow/arrow/constants.py | 9 + .../python2_vendor/arrow/arrow/factory.py | 301 ++ .../python2_vendor/arrow/arrow/formatter.py | 139 + .../python2_vendor/arrow/arrow/locales.py | 4267 +++++++++++++++++ .../python2_vendor/arrow/arrow/parser.py | 596 +++ .../ftrack/python2_vendor/arrow/arrow/util.py | 115 + .../ftrack/python2_vendor/arrow/docs/Makefile | 20 + .../ftrack/python2_vendor/arrow/docs/conf.py | 62 + .../python2_vendor/arrow/docs/index.rst | 566 +++ .../ftrack/python2_vendor/arrow/docs/make.bat | 35 + .../python2_vendor/arrow/docs/releases.rst | 3 + .../python2_vendor/arrow/requirements.txt | 14 + .../ftrack/python2_vendor/arrow/setup.cfg | 2 + .../ftrack/python2_vendor/arrow/setup.py | 50 + .../python2_vendor/arrow/tests/__init__.py | 0 .../python2_vendor/arrow/tests/conftest.py | 76 + .../python2_vendor/arrow/tests/test_api.py | 28 + .../python2_vendor/arrow/tests/test_arrow.py | 2150 +++++++++ .../arrow/tests/test_factory.py | 390 ++ .../arrow/tests/test_formatter.py | 282 ++ .../arrow/tests/test_locales.py | 1352 ++++++ .../python2_vendor/arrow/tests/test_parser.py | 1657 +++++++ .../python2_vendor/arrow/tests/test_util.py | 81 + .../python2_vendor/arrow/tests/utils.py | 16 + .../ftrack/python2_vendor/arrow/tox.ini | 53 + .../backports/__init__.py | 0 .../backports/configparser/__init__.py | 0 .../backports/configparser/helpers.py | 0 .../backports/functools_lru_cache.py | 0 .../builtins/builtins/__init__.py | 0 .../ftrack-python-api/.gitignore | 42 + .../ftrack-python-api/LICENSE.python | 254 + .../ftrack-python-api/LICENSE.txt | 176 + .../ftrack-python-api/MANIFEST.in | 4 + .../ftrack-python-api/README.rst | 34 + .../ftrack-python-api/bitbucket-pipelines.yml | 24 + .../ftrack-python-api/doc/_static/ftrack.css | 16 + .../doc/api_reference/accessor/base.rst | 8 + .../doc/api_reference/accessor/disk.rst | 8 + .../doc/api_reference/accessor/index.rst | 14 + .../doc/api_reference/accessor/server.rst | 8 + .../doc/api_reference/attribute.rst | 8 + .../doc/api_reference/cache.rst | 8 + .../doc/api_reference/collection.rst | 8 + .../api_reference/entity/asset_version.rst | 8 + .../doc/api_reference/entity/base.rst | 8 + .../doc/api_reference/entity/component.rst | 8 + .../doc/api_reference/entity/factory.rst | 8 + .../doc/api_reference/entity/index.rst | 14 + .../doc/api_reference/entity/job.rst | 8 + .../doc/api_reference/entity/location.rst | 8 + .../doc/api_reference/entity/note.rst | 8 + .../api_reference/entity/project_schema.rst | 8 + .../doc/api_reference/entity/user.rst | 8 + .../doc/api_reference/event/base.rst | 8 + .../doc/api_reference/event/expression.rst | 8 + .../doc/api_reference/event/hub.rst | 8 + .../doc/api_reference/event/index.rst | 14 + .../doc/api_reference/event/subscriber.rst | 8 + .../doc/api_reference/event/subscription.rst | 8 + .../doc/api_reference/exception.rst | 8 + .../doc/api_reference/formatter.rst | 8 + .../doc/api_reference/index.rst | 20 + .../doc/api_reference/inspection.rst | 8 + .../doc/api_reference/logging.rst | 8 + .../doc/api_reference/operation.rst | 8 + .../doc/api_reference/plugin.rst | 8 + .../doc/api_reference/query.rst | 8 + .../resource_identifier_transformer/base.rst | 10 + .../resource_identifier_transformer/index.rst | 16 + .../doc/api_reference/session.rst | 8 + .../doc/api_reference/structure/base.rst | 8 + .../doc/api_reference/structure/id.rst | 8 + .../doc/api_reference/structure/index.rst | 14 + .../doc/api_reference/structure/origin.rst | 8 + .../doc/api_reference/structure/standard.rst | 8 + .../doc/api_reference/symbol.rst | 8 + .../ftrack-python-api/doc/caching.rst | 175 + .../ftrack-python-api/doc/conf.py | 102 + .../ftrack-python-api/doc/docutils.conf | 2 + .../doc/environment_variables.rst | 56 + .../ftrack-python-api/doc/event_list.rst | 137 + .../example/assignments_and_allocations.rst | 82 + .../doc/example/component.rst | 23 + .../doc/example/custom_attribute.rst | 94 + .../doc/example/encode_media.rst | 53 + .../doc/example/entity_links.rst | 56 + .../ftrack-python-api/doc/example/index.rst | 52 + .../doc/example/invite_user.rst | 31 + .../ftrack-python-api/doc/example/job.rst | 97 + .../doc/example/link_attribute.rst | 55 + .../ftrack-python-api/doc/example/list.rst | 46 + .../manage_custom_attribute_configuration.rst | 320 ++ .../doc/example/metadata.rst | 43 + .../ftrack-python-api/doc/example/note.rst | 169 + .../ftrack-python-api/doc/example/project.rst | 65 + .../doc/example/publishing.rst | 73 + .../doc/example/review_session.rst | 87 + .../ftrack-python-api/doc/example/scope.rst | 27 + .../doc/example/security_roles.rst | 73 + .../doc/example/sync_ldap_users.rst | 30 + .../doc/example/task_template.rst | 56 + .../doc/example/thumbnail.rst | 71 + .../ftrack-python-api/doc/example/timer.rst | 37 + .../doc/example/web_review.rst | 78 + .../ftrack-python-api/doc/glossary.rst | 76 + .../ftrack-python-api/doc/handling_events.rst | 315 ++ .../image/configuring_plugins_directory.png | Bin 0 -> 7313 bytes .../ftrack-python-api/doc/index.rst | 42 + .../ftrack-python-api/doc/installing.rst | 77 + .../ftrack-python-api/doc/introduction.rst | 26 + .../doc/locations/configuring.rst | 87 + .../ftrack-python-api/doc/locations/index.rst | 18 + .../doc/locations/overview.rst | 143 + .../doc/locations/tutorial.rst | 193 + .../ftrack-python-api/doc/querying.rst | 263 + .../ftrack-python-api/doc/release/index.rst | 18 + .../doc/release/migrating_from_old_api.rst | 613 +++ .../doc/release/migration.rst | 98 + .../doc/release/release_notes.rst | 1478 ++++++ .../doc/resource/example_plugin.py | 24 + .../doc/resource/example_plugin_safe.py | 0 .../resource/example_plugin_using_session.py | 37 + .../doc/security_and_authentication.rst | 38 + .../ftrack-python-api/doc/tutorial.rst | 156 + .../doc/understanding_sessions.rst | 281 ++ .../doc/working_with_entities.rst | 434 ++ .../ftrack-python-api/pytest.ini | 7 + .../resource/plugin/configure_locations.py | 39 + .../resource/plugin/construct_entity_type.py | 46 + .../ftrack-python-api/setup.cfg | 6 + .../python2_vendor/ftrack-python-api/setup.py | 81 + .../ftrack-python-api/source/__init__.py | 1 + .../source/ftrack_api/__init__.py | 32 + .../_centralized_storage_scenario.py | 656 +++ .../source/ftrack_api/_python_ntpath.py | 534 +++ .../source/ftrack_api/_version.py | 1 + .../source/ftrack_api/_weakref.py | 66 + .../source/ftrack_api/accessor/__init__.py | 2 + .../source/ftrack_api/accessor/base.py | 124 + .../source/ftrack_api/accessor/disk.py | 250 + .../source/ftrack_api/accessor/server.py | 240 + .../source/ftrack_api/attribute.py | 707 +++ .../source/ftrack_api/cache.py | 579 +++ .../source/ftrack_api/collection.py | 507 ++ .../source/ftrack_api/data.py | 119 + .../source/ftrack_api/entity/__init__.py | 2 + .../source/ftrack_api/entity/asset_version.py | 91 + .../source/ftrack_api/entity/base.py | 402 ++ .../source/ftrack_api/entity/component.py | 74 + .../source/ftrack_api/entity/factory.py | 435 ++ .../source/ftrack_api/entity/job.py | 48 + .../source/ftrack_api/entity/location.py | 733 +++ .../source/ftrack_api/entity/note.py | 105 + .../ftrack_api/entity/project_schema.py | 94 + .../source/ftrack_api/entity/user.py | 123 + .../source/ftrack_api/event/__init__.py | 2 + .../source/ftrack_api/event/base.py | 85 + .../source/ftrack_api/event/expression.py | 282 ++ .../source/ftrack_api/event/hub.py | 1091 +++++ .../source/ftrack_api/event/subscriber.py | 27 + .../source/ftrack_api/event/subscription.py | 23 + .../source/ftrack_api/exception.py | 392 ++ .../source/ftrack_api/formatter.py | 131 + .../source/ftrack_api/inspection.py | 135 + .../source/ftrack_api/logging.py | 43 + .../source/ftrack_api/operation.py | 115 + .../source/ftrack_api/plugin.py | 121 + .../source/ftrack_api/query.py | 202 + .../__init__.py | 2 + .../resource_identifier_transformer/base.py | 50 + .../source/ftrack_api/session.py | 2515 ++++++++++ .../source/ftrack_api/structure/__init__.py | 2 + .../source/ftrack_api/structure/base.py | 38 + .../source/ftrack_api/structure/entity_id.py | 12 + .../source/ftrack_api/structure/id.py | 91 + .../source/ftrack_api/structure/origin.py | 28 + .../source/ftrack_api/structure/standard.py | 217 + .../source/ftrack_api/symbol.py | 77 + .../test/fixture/media/colour_wheel.mov | Bin 0 -> 17627 bytes .../test/fixture/media/image-resized-10.png | Bin 0 -> 115 bytes .../test/fixture/media/image.png | Bin 0 -> 883 bytes .../fixture/plugin/configure_locations.py | 40 + .../fixture/plugin/construct_entity_type.py | 52 + .../fixture/plugin/count_session_event.py | 41 + .../ftrack-python-api/test/unit/__init__.py | 2 + .../test/unit/accessor/__init__.py | 2 + .../test/unit/accessor/test_disk.py | 267 ++ .../test/unit/accessor/test_server.py | 41 + .../ftrack-python-api/test/unit/conftest.py | 539 +++ .../test/unit/entity/__init__.py | 2 + .../test/unit/entity/test_asset_version.py | 54 + .../test/unit/entity/test_base.py | 14 + .../test/unit/entity/test_component.py | 70 + .../test/unit/entity/test_factory.py | 25 + .../test/unit/entity/test_job.py | 42 + .../test/unit/entity/test_location.py | 516 ++ .../test/unit/entity/test_metadata.py | 135 + .../test/unit/entity/test_note.py | 67 + .../test/unit/entity/test_project_schema.py | 64 + .../test/unit/entity/test_scopes.py | 24 + .../test/unit/entity/test_user.py | 49 + .../test/unit/event/__init__.py | 2 + .../unit/event/event_hub_server_heartbeat.py | 92 + .../test/unit/event/test_base.py | 36 + .../test/unit/event/test_expression.py | 174 + .../test/unit/event/test_hub.py | 701 +++ .../test/unit/event/test_subscriber.py | 33 + .../test/unit/event/test_subscription.py | 28 + .../__init__.py | 2 + .../test_base.py | 36 + .../test/unit/structure/__init__.py | 2 + .../test/unit/structure/test_base.py | 31 + .../test/unit/structure/test_entity_id.py | 49 + .../test/unit/structure/test_id.py | 115 + .../test/unit/structure/test_origin.py | 33 + .../test/unit/structure/test_standard.py | 309 ++ .../test/unit/test_attribute.py | 146 + .../ftrack-python-api/test/unit/test_cache.py | 416 ++ .../test/unit/test_collection.py | 574 +++ .../test/unit/test_custom_attribute.py | 251 + .../ftrack-python-api/test/unit/test_data.py | 129 + .../test/unit/test_formatter.py | 70 + .../test/unit/test_inspection.py | 101 + .../test/unit/test_operation.py | 79 + .../test/unit/test_package.py | 48 + .../test/unit/test_plugin.py | 192 + .../ftrack-python-api/test/unit/test_query.py | 164 + .../test/unit/test_session.py | 1519 ++++++ .../ftrack-python-api/test/unit/test_timer.py | 74 + .../ftrack/scripts/sub_event_processor.py | 4 +- .../ftrack/scripts/sub_event_status.py | 4 +- .../ftrack/scripts/sub_event_storer.py | 6 +- .../ftrack/scripts/sub_legacy_server.py | 2 +- .../ftrack/scripts/sub_user_server.py | 4 +- .../ftrack/tray/__init__.py | 0 .../ftrack/tray/ftrack_tray.py | 0 .../ftrack/tray/login_dialog.py | 2 +- .../ftrack/tray/login_tools.py | 0 openpype/modules/ftrack/python2_vendor/arrow | 1 - .../ftrack/python2_vendor/ftrack-python-api | 1 - 328 files changed, 42186 insertions(+), 69 deletions(-) rename openpype/modules/{ => default_modules}/ftrack/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_clone_review_session.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_multiple_notes.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_prepare_project.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_push_frame_values_to_task.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_sync_to_avalon.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_del_avalon_id_from_new.py (90%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_first_version_status.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_next_task_update.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_push_frame_values_to_task.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_radio_buttons.py (96%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_sync_to_avalon.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_task_to_parent_status.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_task_to_version_status.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_thumbnail_updates.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_user_assigment.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_version_to_task_statuses.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_applications.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_batch_task_creation.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_client_review_sort.py (97%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_component_open.py (96%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_create_cust_attrs.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_create_folders.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_create_project_structure.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_delete_asset.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_delete_old_versions.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_delivery.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_djvview.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_job_killer.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_multiple_notes.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_prepare_project.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_rv.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_seed.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_sync_to_avalon.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_test.py (89%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_thumbnail_to_childern.py (96%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_thumbnail_to_parent.py (97%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_where_run_ask.py (94%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_where_run_show.py (98%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_module.py (99%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/event_server_cli.py (98%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/ftrack_server.py (100%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/lib.py (99%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/socket_thread.py (100%) rename openpype/modules/{ => default_modules}/ftrack/interfaces.py (100%) rename openpype/modules/{ => default_modules}/ftrack/launch_hooks/post_ftrack_changes.py (100%) rename openpype/modules/{ => default_modules}/ftrack/launch_hooks/pre_python2_vendor.py (96%) rename openpype/modules/{ => default_modules}/ftrack/lib/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/avalon_sync.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/constants.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/credentials.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/custom_attributes.json (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/custom_attributes.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/ftrack_action_handler.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/ftrack_base_handler.py (99%) rename openpype/modules/{ => default_modules}/ftrack/lib/ftrack_event_handler.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/settings.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/collect_ftrack_api.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/collect_ftrack_family.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_api.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_instances.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_note.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_hierarchy_ftrack.py (99%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_remove_components.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/validate_custom_ftrack_attributes.py (100%) create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/builtins/builtins/__init__.py (100%) create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_event_processor.py (95%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_event_status.py (98%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_event_storer.py (96%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_legacy_server.py (97%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_user_server.py (93%) rename openpype/modules/{ => default_modules}/ftrack/tray/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/tray/ftrack_tray.py (100%) rename openpype/modules/{ => default_modules}/ftrack/tray/login_dialog.py (99%) rename openpype/modules/{ => default_modules}/ftrack/tray/login_tools.py (100%) delete mode 160000 openpype/modules/ftrack/python2_vendor/arrow delete mode 160000 openpype/modules/ftrack/python2_vendor/ftrack-python-api diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/default_modules/ftrack/__init__.py similarity index 100% rename from openpype/modules/ftrack/__init__.py rename to openpype/modules/default_modules/ftrack/__init__.py diff --git a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py index 59c8bffb754..1ad7a17785e 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py @@ -1,6 +1,6 @@ import json -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction def clone_review_session(session, entity): diff --git a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py index 9ad7b1a9692..f9aac2c80a1 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class MultipleNotesServer(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/action_prepare_project.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py index 3a96ae3311b..85317031b28 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py @@ -4,7 +4,7 @@ from openpype.api import ProjectSettings from openpype.lib import create_project -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( ServerAction, get_openpype_attr, CUST_ATTR_AUTO_SYNC diff --git a/openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py index b38e18d089c..3f63ce6face 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py @@ -2,7 +2,7 @@ import json import collections import ftrack_api -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class PushHierValuesToNonHier(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py index 8f78f998ac7..d449c4b7dfd 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py @@ -1,8 +1,8 @@ import time import traceback -from openpype.modules.ftrack.lib import ServerAction -from openpype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory +from openpype_modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory class SyncToAvalonServer(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py similarity index 90% rename from openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py index 078596cc2ee..35b5d809fd0 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py @@ -1,6 +1,6 @@ -from openpype.modules.ftrack.lib import BaseEvent -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY -from openpype.modules.ftrack.event_handlers_server.event_sync_to_avalon import ( +from openpype_modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.event_handlers_server.event_sync_to_avalon import ( SyncToAvalonEvent ) diff --git a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_first_version_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py index 511f62a2071..ecc6c95d90f 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class FirstVersionStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_next_task_update.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_next_task_update.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py index ad62beb2966..a65ae46545c 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_next_task_update.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class NextTaskUpdate(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 81719258e1e..10b165e7f6c 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -2,7 +2,7 @@ import datetime import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseEvent, query_custom_attributes ) diff --git a/openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py index 1ebd7b68d28..99ad3aec374 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py @@ -1,5 +1,5 @@ import ftrack_api -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class RadioButtons(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 1dd056adeec..93a0404c0bf 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -17,7 +17,7 @@ from avalon import schema from avalon.api import AvalonMongoDB -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( get_openpype_attr, CUST_ATTR_ID_KEY, CUST_ATTR_AUTO_SYNC, diff --git a/openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py index 4192a4bed0b..a0e039926e0 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class TaskStatusToParent(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py index f2d37230216..b77849c6789 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class TaskToVersionStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py index cbeeeee5c5a..64673f792c3 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class ThumbnailEvents(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/event_user_assigment.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py index a0734e14a11..efc1e767755 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py @@ -2,8 +2,8 @@ import re import subprocess -from openpype.modules.ftrack.lib import BaseEvent -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from avalon.api import AvalonMongoDB from bson.objectid import ObjectId diff --git a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py index f215bedcc25..e36c3eecd98 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class VersionToTaskStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_user/action_applications.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_applications.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py index 23c96e1b9fd..54de6f1fd64 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_applications.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py @@ -1,7 +1,7 @@ import os from uuid import uuid4 -from openpype.modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.lib import BaseAction from openpype.lib import ( ApplicationManager, ApplicationLaunchFailed, diff --git a/openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py index b9f0e7c5d3a..c7fb1af98b9 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py @@ -2,7 +2,7 @@ Taken from https://github.com/tokejepsen/ftrack-hooks/tree/master/batch_tasks """ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class BatchTasksAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py index 45cc9adf553..dc97ed972d8 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py @@ -1,6 +1,6 @@ import collections import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, get_openpype_attr diff --git a/openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py index 7c9a2881d6c..5ad5f10e8e8 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon try: from functools import cmp_to_key except Exception: diff --git a/openpype/modules/ftrack/event_handlers_user/action_component_open.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_user/action_component_open.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py index b3cdac07226..c731713c106 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_component_open.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py @@ -1,7 +1,7 @@ import os import sys import subprocess -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ComponentOpen(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py index 63605eda5e7..599d2eb2572 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -2,7 +2,7 @@ import json import arrow import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_folders.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py index 075b8d3d25c..994dbd90e48 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py @@ -1,5 +1,5 @@ import os -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon import lib as avalonlib from openpype.api import ( Anatomy, diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py index 035a1c60de8..121c9f652bc 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -2,7 +2,7 @@ import re import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy, get_project_settings diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delete_asset.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index c20491349fc..f860065b26f 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -4,7 +4,7 @@ from queue import Queue from bson.objectid import ObjectId -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py index dbddc7a95ed..063f086e9c4 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,7 +5,7 @@ import clique from pymongo import UpdateOne -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB from openpype.api import Anatomy diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delivery.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py index 2e7599647a2..1f28b189007 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py @@ -6,8 +6,8 @@ from bson.objectid import ObjectId from openpype.api import Anatomy, config -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype.lib.delivery import ( path_from_representation, get_format_dict, diff --git a/openpype/modules/ftrack/event_handlers_user/action_djvview.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_djvview.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py index c05fbed2d09..c603a2d2003 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_djvview.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py @@ -1,7 +1,7 @@ import os import subprocess from operator import itemgetter -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class DJVViewAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_job_killer.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_job_killer.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py index 47ed1e78957..af24e0280db 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_job_killer.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class JobKiller(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py index 8db65fe39ba..4a89c6d7e9e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class MultipleNotes(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_prepare_project.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py index ea0bfa29715..c266d24fd35 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py @@ -4,7 +4,7 @@ from openpype.api import ProjectSettings from openpype.lib import create_project -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, get_openpype_attr, diff --git a/openpype/modules/ftrack/event_handlers_user/action_rv.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_rv.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py index 3172b742615..71d790f7e76 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_rv.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py @@ -3,7 +3,7 @@ import traceback import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon import ftrack_api from avalon import io, api diff --git a/openpype/modules/ftrack/event_handlers_user/action_seed.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_seed.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py index 1f01f0af1d9..4021d70c0ac 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_seed.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py @@ -1,6 +1,6 @@ import os from operator import itemgetter -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class SeedDebugProject(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py index 4464e51d3d1..4820925844b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py @@ -4,11 +4,11 @@ import requests from bson.objectid import ObjectId -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy from avalon.api import AvalonMongoDB -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY class StoreThumbnailsToAvalon(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py index 89fac7cf80c..d6ca561bbed 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py @@ -1,8 +1,8 @@ import time import traceback -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory class SyncToAvalonLocal(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_test.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py similarity index 89% rename from openpype/modules/ftrack/event_handlers_user/action_test.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py index 206c67de509..bd71ba5bf97 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_test.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class TestAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py index a12f25b57d6..3b909601604 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ThumbToChildren(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py index 284723bb0f0..2f0110b7aa2 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ThumbToParent(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py similarity index 94% rename from openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index 6950d45ecdf..8e81ae4a1b4 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ActionAskWhereIRun(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_show.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_show.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_where_run_show.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_show.py index 4ce1a439a3a..8ac9fc272d1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_show.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_show.py @@ -1,7 +1,7 @@ import platform import socket import getpass -from openpype.modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.lib import BaseAction class ActionShowWhereIRun(BaseAction): diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/default_modules/ftrack/ftrack_module.py similarity index 99% rename from openpype/modules/ftrack/ftrack_module.py rename to openpype/modules/default_modules/ftrack/ftrack_module.py index 6fce308b19b..6fd27372617 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/default_modules/ftrack/ftrack_module.py @@ -231,7 +231,7 @@ def on_project_anatomy_save( return import ftrack_api - from openpype.modules.ftrack.lib import get_openpype_attr + from openpype_modules.ftrack.lib import get_openpype_attr try: session = self.create_ftrack_session() diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/default_modules/ftrack/ftrack_server/__init__.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/__init__.py rename to openpype/modules/default_modules/ftrack/ftrack_server/__init__.py diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py similarity index 98% rename from openpype/modules/ftrack/ftrack_server/event_server_cli.py rename to openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py index 8bba22b4754..1e14929d96e 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py @@ -18,17 +18,17 @@ get_pype_execute_args, OpenPypeMongoConnection ) -from openpype.modules.ftrack import FTRACK_MODULE_DIR -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack.lib import ( credentials, get_ftrack_url_from_settings ) -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.lib import ( check_ftrack_url, get_ftrack_event_mongo_info ) -from openpype.modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.ftrack_server import socket_thread class MongoPermissionsError(Exception): diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/default_modules/ftrack/ftrack_server/ftrack_server.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/ftrack_server.py rename to openpype/modules/default_modules/ftrack/ftrack_server/ftrack_server.py diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/default_modules/ftrack/ftrack_server/lib.py similarity index 99% rename from openpype/modules/ftrack/ftrack_server/lib.py rename to openpype/modules/default_modules/ftrack/ftrack_server/lib.py index 88f849e765c..e80d6a3a6b1 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/lib.py @@ -22,7 +22,7 @@ from weakref import WeakMethod except ImportError: from ftrack_api._weakref import WeakMethod -from openpype.modules.ftrack.lib import get_ftrack_event_mongo_info +from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import OpenPypeMongoConnection from openpype.api import Logger diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/default_modules/ftrack/ftrack_server/socket_thread.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/socket_thread.py rename to openpype/modules/default_modules/ftrack/ftrack_server/socket_thread.py diff --git a/openpype/modules/ftrack/interfaces.py b/openpype/modules/default_modules/ftrack/interfaces.py similarity index 100% rename from openpype/modules/ftrack/interfaces.py rename to openpype/modules/default_modules/ftrack/interfaces.py diff --git a/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py b/openpype/modules/default_modules/ftrack/launch_hooks/post_ftrack_changes.py similarity index 100% rename from openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py rename to openpype/modules/default_modules/ftrack/launch_hooks/post_ftrack_changes.py diff --git a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py similarity index 96% rename from openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py rename to openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py index d34b6533fbe..0dd894bebf6 100644 --- a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook -from openpype.modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import FTRACK_MODULE_DIR class PrePython2Support(PreLaunchHook): diff --git a/openpype/modules/ftrack/lib/__init__.py b/openpype/modules/default_modules/ftrack/lib/__init__.py similarity index 100% rename from openpype/modules/ftrack/lib/__init__.py rename to openpype/modules/default_modules/ftrack/lib/__init__.py diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/default_modules/ftrack/lib/avalon_sync.py similarity index 100% rename from openpype/modules/ftrack/lib/avalon_sync.py rename to openpype/modules/default_modules/ftrack/lib/avalon_sync.py diff --git a/openpype/modules/ftrack/lib/constants.py b/openpype/modules/default_modules/ftrack/lib/constants.py similarity index 100% rename from openpype/modules/ftrack/lib/constants.py rename to openpype/modules/default_modules/ftrack/lib/constants.py diff --git a/openpype/modules/ftrack/lib/credentials.py b/openpype/modules/default_modules/ftrack/lib/credentials.py similarity index 100% rename from openpype/modules/ftrack/lib/credentials.py rename to openpype/modules/default_modules/ftrack/lib/credentials.py diff --git a/openpype/modules/ftrack/lib/custom_attributes.json b/openpype/modules/default_modules/ftrack/lib/custom_attributes.json similarity index 100% rename from openpype/modules/ftrack/lib/custom_attributes.json rename to openpype/modules/default_modules/ftrack/lib/custom_attributes.json diff --git a/openpype/modules/ftrack/lib/custom_attributes.py b/openpype/modules/default_modules/ftrack/lib/custom_attributes.py similarity index 100% rename from openpype/modules/ftrack/lib/custom_attributes.py rename to openpype/modules/default_modules/ftrack/lib/custom_attributes.py diff --git a/openpype/modules/ftrack/lib/ftrack_action_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_action_handler.py similarity index 100% rename from openpype/modules/ftrack/lib/ftrack_action_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_action_handler.py diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py similarity index 99% rename from openpype/modules/ftrack/lib/ftrack_base_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py index 011ce8db9dc..8a29b400294 100644 --- a/openpype/modules/ftrack/lib/ftrack_base_handler.py +++ b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py @@ -9,7 +9,7 @@ from openpype.settings import get_project_settings import ftrack_api -from openpype.modules.ftrack import ftrack_server +from openpype_modules.ftrack import ftrack_server class MissingPermision(Exception): diff --git a/openpype/modules/ftrack/lib/ftrack_event_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_event_handler.py similarity index 100% rename from openpype/modules/ftrack/lib/ftrack_event_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_event_handler.py diff --git a/openpype/modules/ftrack/lib/settings.py b/openpype/modules/default_modules/ftrack/lib/settings.py similarity index 100% rename from openpype/modules/ftrack/lib/settings.py rename to openpype/modules/default_modules/ftrack/lib/settings.py diff --git a/openpype/modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py b/openpype/modules/default_modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py similarity index 100% rename from openpype/modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py rename to openpype/modules/default_modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_api.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_api.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_api.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_api.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_instances.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_instances.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_note.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_note.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py similarity index 99% rename from openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 118a73a6363..2fd5296d24a 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -9,7 +9,7 @@ CUST_ATTR_GROUP = "openpype" -# Copy of `get_pype_attr` from openpype.modules.ftrack.lib +# Copy of `get_pype_attr` from openpype_modules.ftrack.lib # TODO import from openpype's ftrack module when possible to not break Python 2 def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] diff --git a/openpype/modules/ftrack/plugins/publish/integrate_remove_components.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_remove_components.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_remove_components.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_remove_components.py diff --git a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py b/openpype/modules/default_modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py rename to openpype/modules/default_modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml new file mode 100644 index 00000000000..c3608357a43 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml @@ -0,0 +1 @@ +open_collective: arrow diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..e4e242ee42d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: "🐞 Bug Report" +about: Find a bug? Create a report to help us improve. +title: '' +labels: 'bug' +assignees: '' +--- + + + +## Issue Description + + + +## System Info + +- 🖥 **OS name and version**: +- 🐍 **Python version**: +- 🏹 **Arrow version**: diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md new file mode 100644 index 00000000000..753ed0c620a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md @@ -0,0 +1,17 @@ +--- +name: "📚 Documentation" +about: Find errors or problems in the docs (https://arrow.readthedocs.io)? +title: '' +labels: 'documentation' +assignees: '' +--- + + + +## Issue Description + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000000..fcab9213f59 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: "💡 Feature Request" +about: Have an idea for a new feature or improvement? +title: '' +labels: 'enhancement' +assignees: '' +--- + + + +## Feature Request + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md new file mode 100644 index 00000000000..0e07c288af5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md @@ -0,0 +1,22 @@ +## Pull Request Checklist + +Thank you for taking the time to improve Arrow! Before submitting your pull request, please check all *appropriate* boxes: + + +- [ ] 🧪 Added **tests** for changed code. +- [ ] 🛠️ All tests **pass** when run locally (run `tox` or `make test` to find out!). +- [ ] 🧹 All linting checks **pass** when run locally (run `tox -e lint` or `make lint` to find out!). +- [ ] 📚 Updated **documentation** for changed code. +- [ ] ⏩ Code is **up-to-date** with the `master` branch. + +If you have *any* questions about your code changes or any of the points above, please submit your questions along with the pull request and we will try our best to help! + +## Description of Changes + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml new file mode 100644 index 00000000000..d800f399c6f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml @@ -0,0 +1,123 @@ +name: tests + +on: + pull_request: # Run on all pull requests + push: # Run only on pushes to master + branches: + - master + schedule: # Run monthly + - cron: "0 0 1 * *" + +jobs: + test: + name: ${{ matrix.os }} (${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + python-version: ["pypy3", "2.7", "3.5", "3.6", "3.7", "3.8", "3.9-dev"] + os: [ubuntu-latest, macos-latest, windows-latest] + exclude: + # pypy3 randomly fails on Windows builds + - os: windows-latest + python-version: "pypy3" + + steps: + # Check out latest code + - uses: actions/checkout@v2 + + # Configure pip cache + - name: Cache pip (Linux) + uses: actions/cache@v2 + if: startsWith(runner.os, 'Linux') + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Cache pip (macOS) + uses: actions/cache@v2 + if: startsWith(runner.os, 'macOS') + with: + path: ~/Library/Caches/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Cache pip (Windows) + uses: actions/cache@v2 + if: startsWith(runner.os, 'Windows') + with: + path: ~\AppData\Local\pip\Cache + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + # Set up Python + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + # Install dependencies + - name: Install dependencies + run: | + pip install -U pip setuptools wheel + pip install -U tox tox-gh-actions + + # Run tests + - name: Test with tox + run: tox + + # Upload coverage report + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + with: + file: coverage.xml + + lint: + runs-on: ubuntu-latest + + steps: + # Check out latest code + - uses: actions/checkout@v2 + + # Set up Python + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + # Configure pip cache + - name: Cache pip + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + # Configure pre-commit cache + - name: Cache pre-commit + uses: actions/cache@v2 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} + restore-keys: | + ${{ runner.os }}-pre-commit- + + # Install dependencies + - name: Install dependencies + run: | + pip install -U pip setuptools wheel + pip install -U tox + + # Lint code + - name: Lint code + run: tox -e lint + + # Lint docs + - name: Lint docs + run: tox -e docs diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore new file mode 100644 index 00000000000..0448d0cf0c6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore @@ -0,0 +1,211 @@ +README.rst.new + +# Small entry point file for debugging tasks +test.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +local/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Swap +[._]*.s[a-v][a-z] +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +.idea/ +.vscode/ + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml new file mode 100644 index 00000000000..1f5128595ba --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml @@ -0,0 +1,41 @@ +default_language_version: + python: python3 +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: fix-encoding-pragma + exclude: ^arrow/_version.py + - id: requirements-txt-fixer + - id: check-ast + - id: check-yaml + - id: check-case-conflict + - id: check-docstring-first + - id: check-merge-conflict + - id: debug-statements + - repo: https://github.com/timothycrosley/isort + rev: 5.4.2 + hooks: + - id: isort + - repo: https://github.com/asottile/pyupgrade + rev: v2.7.2 + hooks: + - id: pyupgrade + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.6.0 + hooks: + - id: python-no-eval + - id: python-check-blanket-noqa + - id: rst-backticks + - repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + args: [--safe, --quiet] + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.3 + hooks: + - id: flake8 + additional_dependencies: [flake8-bugbear] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst new file mode 100644 index 00000000000..0b55a4522c7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst @@ -0,0 +1,598 @@ +Changelog +========= + +0.17.0 (2020-10-2) +------------------- + +- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. This is the last major release to support Python 2.7 and Python 3.5. +- [NEW] Arrow now properly handles imaginary datetimes during DST shifts. For example: + +..code-block:: python + >>> just_before = arrow.get(2013, 3, 31, 1, 55, tzinfo="Europe/Paris") + >>> just_before.shift(minutes=+10) + + +..code-block:: python + >>> before = arrow.get("2018-03-10 23:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") + >>> after = arrow.get("2018-03-11 04:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") + >>> result=[(t, t.to("utc")) for t in arrow.Arrow.range("hour", before, after)] + >>> for r in result: + ... print(r) + ... + (, ) + (, ) + (, ) + (, ) + (, ) + +- [NEW] Added ``humanize`` week granularity translation for Tagalog. +- [CHANGE] Calls to the ``timestamp`` property now emit a ``DeprecationWarning``. In a future release, ``timestamp`` will be changed to a method to align with Python's datetime module. If you would like to continue using the property, please change your code to use the ``int_timestamp`` or ``float_timestamp`` properties instead. +- [CHANGE] Expanded and improved Catalan locale. +- [FIX] Fixed a bug that caused ``Arrow.range()`` to incorrectly cut off ranges in certain scenarios when using month, quarter, or year endings. +- [FIX] Fixed a bug that caused day of week token parsing to be case sensitive. +- [INTERNAL] A number of functions were reordered in arrow.py for better organization and grouping of related methods. This change will have no impact on usage. +- [INTERNAL] A minimum tox version is now enforced for compatibility reasons. Contributors must use tox >3.18.0 going forward. + +0.16.0 (2020-08-23) +------------------- + +- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.16.x and 0.17.x releases are the last to support Python 2.7 and 3.5. +- [NEW] Implemented `PEP 495 `_ to handle ambiguous datetimes. This is achieved by the addition of the ``fold`` attribute for Arrow objects. For example: + +.. code-block:: python + + >>> before = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm') + + >>> before.fold + 0 + >>> before.ambiguous + True + >>> after = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm', fold=1) + + >>> after = before.replace(fold=1) + + +- [NEW] Added ``normalize_whitespace`` flag to ``arrow.get``. This is useful for parsing log files and/or any files that may contain inconsistent spacing. For example: + +.. code-block:: python + + >>> arrow.get("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True) + + >>> arrow.get("2013-036 \t 04:05:06Z", normalize_whitespace=True) + + +0.15.8 (2020-07-23) +------------------- + +- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.15.x, 0.16.x, and 0.17.x releases are the last to support Python 2.7 and 3.5. +- [NEW] Added ``humanize`` week granularity translation for Czech. +- [FIX] ``arrow.get`` will now pick sane defaults when weekdays are passed with particular token combinations, see `#446 `_. +- [INTERNAL] Moved arrow to an organization. The repo can now be found `here `_. +- [INTERNAL] Started issuing deprecation warnings for Python 2.7 and 3.5. +- [INTERNAL] Added Python 3.9 to CI pipeline. + +0.15.7 (2020-06-19) +------------------- + +- [NEW] Added a number of built-in format strings. See the `docs `_ for a complete list of supported formats. For example: + +.. code-block:: python + + >>> arw = arrow.utcnow() + >>> arw.format(arrow.FORMAT_COOKIE) + 'Wednesday, 27-May-2020 10:30:35 UTC' + +- [NEW] Arrow is now fully compatible with Python 3.9 and PyPy3. +- [NEW] Added Makefile, tox.ini, and requirements.txt files to the distribution bundle. +- [NEW] Added French Canadian and Swahili locales. +- [NEW] Added ``humanize`` week granularity translation for Hebrew, Greek, Macedonian, Swedish, Slovak. +- [FIX] ms and μs timestamps are now normalized in ``arrow.get()``, ``arrow.fromtimestamp()``, and ``arrow.utcfromtimestamp()``. For example: + +.. code-block:: python + + >>> ts = 1591161115194556 + >>> arw = arrow.get(ts) + + >>> arw.timestamp + 1591161115 + +- [FIX] Refactored and updated Macedonian, Hebrew, Korean, and Portuguese locales. + +0.15.6 (2020-04-29) +------------------- + +- [NEW] Added support for parsing and formatting `ISO 8601 week dates `_ via a new token ``W``, for example: + +.. code-block:: python + + >>> arrow.get("2013-W29-6", "W") + + >>> utc=arrow.utcnow() + >>> utc + + >>> utc.format("W") + '2020-W04-4' + +- [NEW] Formatting with ``x`` token (microseconds) is now possible, for example: + +.. code-block:: python + + >>> dt = arrow.utcnow() + >>> dt.format("x") + '1585669870688329' + >>> dt.format("X") + '1585669870' + +- [NEW] Added ``humanize`` week granularity translation for German, Italian, Polish & Taiwanese locales. +- [FIX] Consolidated and simplified German locales. +- [INTERNAL] Moved testing suite from nosetest/Chai to pytest/pytest-mock. +- [INTERNAL] Converted xunit-style setup and teardown functions in tests to pytest fixtures. +- [INTERNAL] Setup Github Actions for CI alongside Travis. +- [INTERNAL] Help support Arrow's future development by donating to the project on `Open Collective `_. + +0.15.5 (2020-01-03) +------------------- + +- [WARN] Python 2 reached EOL on 2020-01-01. arrow will **drop support** for Python 2 in a future release to be decided (see `#739 `_). +- [NEW] Added bounds parameter to ``span_range``, ``interval`` and ``span`` methods. This allows you to include or exclude the start and end values. +- [NEW] ``arrow.get()`` can now create arrow objects from a timestamp with a timezone, for example: + +.. code-block:: python + + >>> arrow.get(1367900664, tzinfo=tz.gettz('US/Pacific')) + + +- [NEW] ``humanize`` can now combine multiple levels of granularity, for example: + +.. code-block:: python + + >>> later140 = arrow.utcnow().shift(seconds=+8400) + >>> later140.humanize(granularity="minute") + 'in 139 minutes' + >>> later140.humanize(granularity=["hour", "minute"]) + 'in 2 hours and 19 minutes' + +- [NEW] Added Hong Kong locale (``zh_hk``). +- [NEW] Added ``humanize`` week granularity translation for Dutch. +- [NEW] Numbers are now displayed when using the seconds granularity in ``humanize``. +- [CHANGE] ``range`` now supports both the singular and plural forms of the ``frames`` argument (e.g. day and days). +- [FIX] Improved parsing of strings that contain punctuation. +- [FIX] Improved behaviour of ``humanize`` when singular seconds are involved. + +0.15.4 (2019-11-02) +------------------- + +- [FIX] Fixed an issue that caused package installs to fail on Conda Forge. + +0.15.3 (2019-11-02) +------------------- + +- [NEW] ``factory.get()`` can now create arrow objects from a ISO calendar tuple, for example: + +.. code-block:: python + + >>> arrow.get((2013, 18, 7)) + + +- [NEW] Added a new token ``x`` to allow parsing of integer timestamps with milliseconds and microseconds. +- [NEW] Formatting now supports escaping of characters using the same syntax as parsing, for example: + +.. code-block:: python + + >>> arw = arrow.now() + >>> fmt = "YYYY-MM-DD h [h] m" + >>> arw.format(fmt) + '2019-11-02 3 h 32' + +- [NEW] Added ``humanize`` week granularity translations for Chinese, Spanish and Vietnamese. +- [CHANGE] Added ``ParserError`` to module exports. +- [FIX] Added support for midnight at end of day. See `#703 `_ for details. +- [INTERNAL] Created Travis build for macOS. +- [INTERNAL] Test parsing and formatting against full timezone database. + +0.15.2 (2019-09-14) +------------------- + +- [NEW] Added ``humanize`` week granularity translations for Portuguese and Brazilian Portuguese. +- [NEW] Embedded changelog within docs and added release dates to versions. +- [FIX] Fixed a bug that caused test failures on Windows only, see `#668 `_ for details. + +0.15.1 (2019-09-10) +------------------- + +- [NEW] Added ``humanize`` week granularity translations for Japanese. +- [FIX] Fixed a bug that caused Arrow to fail when passed a negative timestamp string. +- [FIX] Fixed a bug that caused Arrow to fail when passed a datetime object with ``tzinfo`` of type ``StaticTzInfo``. + +0.15.0 (2019-09-08) +------------------- + +- [NEW] Added support for DDD and DDDD ordinal date tokens. The following functionality is now possible: ``arrow.get("1998-045")``, ``arrow.get("1998-45", "YYYY-DDD")``, ``arrow.get("1998-045", "YYYY-DDDD")``. +- [NEW] ISO 8601 basic format for dates and times is now supported (e.g. ``YYYYMMDDTHHmmssZ``). +- [NEW] Added ``humanize`` week granularity translations for French, Russian and Swiss German locales. +- [CHANGE] Timestamps of type ``str`` are no longer supported **without a format string** in the ``arrow.get()`` method. This change was made to support the ISO 8601 basic format and to address bugs such as `#447 `_. + +The following will NOT work in v0.15.0: + +.. code-block:: python + + >>> arrow.get("1565358758") + >>> arrow.get("1565358758.123413") + +The following will work in v0.15.0: + +.. code-block:: python + + >>> arrow.get("1565358758", "X") + >>> arrow.get("1565358758.123413", "X") + >>> arrow.get(1565358758) + >>> arrow.get(1565358758.123413) + +- [CHANGE] When a meridian token (a|A) is passed and no meridians are available for the specified locale (e.g. unsupported or untranslated) a ``ParserError`` is raised. +- [CHANGE] The timestamp token (``X``) will now match float timestamps of type ``str``: ``arrow.get(“1565358758.123415”, “X”)``. +- [CHANGE] Strings with leading and/or trailing whitespace will no longer be parsed without a format string. Please see `the docs `_ for ways to handle this. +- [FIX] The timestamp token (``X``) will now only match on strings that **strictly contain integers and floats**, preventing incorrect matches. +- [FIX] Most instances of ``arrow.get()`` returning an incorrect ``Arrow`` object from a partial parsing match have been eliminated. The following issue have been addressed: `#91 `_, `#196 `_, `#396 `_, `#434 `_, `#447 `_, `#456 `_, `#519 `_, `#538 `_, `#560 `_. + +0.14.7 (2019-09-04) +------------------- + +- [CHANGE] ``ArrowParseWarning`` will no longer be printed on every call to ``arrow.get()`` with a datetime string. The purpose of the warning was to start a conversation about the upcoming 0.15.0 changes and we appreciate all the feedback that the community has given us! + +0.14.6 (2019-08-28) +------------------- + +- [NEW] Added support for ``week`` granularity in ``Arrow.humanize()``. For example, ``arrow.utcnow().shift(weeks=-1).humanize(granularity="week")`` outputs "a week ago". This change introduced two new untranslated words, ``week`` and ``weeks``, to all locale dictionaries, so locale contributions are welcome! +- [NEW] Fully translated the Brazilian Portugese locale. +- [CHANGE] Updated the Macedonian locale to inherit from a Slavic base. +- [FIX] Fixed a bug that caused ``arrow.get()`` to ignore tzinfo arguments of type string (e.g. ``arrow.get(tzinfo="Europe/Paris")``). +- [FIX] Fixed a bug that occurred when ``arrow.Arrow()`` was instantiated with a ``pytz`` tzinfo object. +- [FIX] Fixed a bug that caused Arrow to fail when passed a sub-second token, that when rounded, had a value greater than 999999 (e.g. ``arrow.get("2015-01-12T01:13:15.9999995")``). Arrow should now accurately propagate the rounding for large sub-second tokens. + +0.14.5 (2019-08-09) +------------------- + +- [NEW] Added Afrikaans locale. +- [CHANGE] Removed deprecated ``replace`` shift functionality. Users looking to pass plural properties to the ``replace`` function to shift values should use ``shift`` instead. +- [FIX] Fixed bug that occurred when ``factory.get()`` was passed a locale kwarg. + +0.14.4 (2019-07-30) +------------------- + +- [FIX] Fixed a regression in 0.14.3 that prevented a tzinfo argument of type string to be passed to the ``get()`` function. Functionality such as ``arrow.get("2019072807", "YYYYMMDDHH", tzinfo="UTC")`` should work as normal again. +- [CHANGE] Moved ``backports.functools_lru_cache`` dependency from ``extra_requires`` to ``install_requires`` for ``Python 2.7`` installs to fix `#495 `_. + +0.14.3 (2019-07-28) +------------------- + +- [NEW] Added full support for Python 3.8. +- [CHANGE] Added warnings for upcoming factory.get() parsing changes in 0.15.0. Please see `#612 `_ for full details. +- [FIX] Extensive refactor and update of documentation. +- [FIX] factory.get() can now construct from kwargs. +- [FIX] Added meridians to Spanish Locale. + +0.14.2 (2019-06-06) +------------------- + +- [CHANGE] Travis CI builds now use tox to lint and run tests. +- [FIX] Fixed UnicodeDecodeError on certain locales (#600). + +0.14.1 (2019-06-06) +------------------- + +- [FIX] Fixed ``ImportError: No module named 'dateutil'`` (#598). + +0.14.0 (2019-06-06) +------------------- + +- [NEW] Added provisional support for Python 3.8. +- [CHANGE] Removed support for EOL Python 3.4. +- [FIX] Updated setup.py with modern Python standards. +- [FIX] Upgraded dependencies to latest versions. +- [FIX] Enabled flake8 and black on travis builds. +- [FIX] Formatted code using black and isort. + +0.13.2 (2019-05-30) +------------------- + +- [NEW] Add is_between method. +- [FIX] Improved humanize behaviour for near zero durations (#416). +- [FIX] Correct humanize behaviour with future days (#541). +- [FIX] Documentation updates. +- [FIX] Improvements to German Locale. + +0.13.1 (2019-02-17) +------------------- + +- [NEW] Add support for Python 3.7. +- [CHANGE] Remove deprecation decorators for Arrow.range(), Arrow.span_range() and Arrow.interval(), all now return generators, wrap with list() to get old behavior. +- [FIX] Documentation and docstring updates. + +0.13.0 (2019-01-09) +------------------- + +- [NEW] Added support for Python 3.6. +- [CHANGE] Drop support for Python 2.6/3.3. +- [CHANGE] Return generator instead of list for Arrow.range(), Arrow.span_range() and Arrow.interval(). +- [FIX] Make arrow.get() work with str & tzinfo combo. +- [FIX] Make sure special RegEx characters are escaped in format string. +- [NEW] Added support for ZZZ when formatting. +- [FIX] Stop using datetime.utcnow() in internals, use datetime.now(UTC) instead. +- [FIX] Return NotImplemented instead of TypeError in arrow math internals. +- [NEW] Added Estonian Locale. +- [FIX] Small fixes to Greek locale. +- [FIX] TagalogLocale improvements. +- [FIX] Added test requirements to setup. +- [FIX] Improve docs for get, now and utcnow methods. +- [FIX] Correct typo in depreciation warning. + +0.12.1 +------ + +- [FIX] Allow universal wheels to be generated and reliably installed. +- [FIX] Make humanize respect only_distance when granularity argument is also given. + +0.12.0 +------ + +- [FIX] Compatibility fix for Python 2.x + +0.11.0 +------ + +- [FIX] Fix grammar of ArabicLocale +- [NEW] Add Nepali Locale +- [FIX] Fix month name + rename AustriaLocale -> AustrianLocale +- [FIX] Fix typo in Basque Locale +- [FIX] Fix grammar in PortugueseBrazilian locale +- [FIX] Remove pip --user-mirrors flag +- [NEW] Add Indonesian Locale + +0.10.0 +------ + +- [FIX] Fix getattr off by one for quarter +- [FIX] Fix negative offset for UTC +- [FIX] Update arrow.py + +0.9.0 +----- + +- [NEW] Remove duplicate code +- [NEW] Support gnu date iso 8601 +- [NEW] Add support for universal wheels +- [NEW] Slovenian locale +- [NEW] Slovak locale +- [NEW] Romanian locale +- [FIX] respect limit even if end is defined range +- [FIX] Separate replace & shift functions +- [NEW] Added tox +- [FIX] Fix supported Python versions in documentation +- [NEW] Azerbaijani locale added, locale issue fixed in Turkish. +- [FIX] Format ParserError's raise message + +0.8.0 +----- + +- [] + +0.7.1 +----- + +- [NEW] Esperanto locale (batisteo) + +0.7.0 +----- + +- [FIX] Parse localized strings #228 (swistakm) +- [FIX] Modify tzinfo parameter in ``get`` api #221 (bottleimp) +- [FIX] Fix Czech locale (PrehistoricTeam) +- [FIX] Raise TypeError when adding/subtracting non-dates (itsmeolivia) +- [FIX] Fix pytz conversion error (Kudo) +- [FIX] Fix overzealous time truncation in span_range (kdeldycke) +- [NEW] Humanize for time duration #232 (ybrs) +- [NEW] Add Thai locale (sipp11) +- [NEW] Adding Belarusian (be) locale (oire) +- [NEW] Search date in strings (beenje) +- [NEW] Note that arrow's tokens differ from strptime's. (offby1) + +0.6.0 +----- + +- [FIX] Added support for Python 3 +- [FIX] Avoid truncating oversized epoch timestamps. Fixes #216. +- [FIX] Fixed month abbreviations for Ukrainian +- [FIX] Fix typo timezone +- [FIX] A couple of dialect fixes and two new languages +- [FIX] Spanish locale: ``Miercoles`` should have acute accent +- [Fix] Fix Finnish grammar +- [FIX] Fix typo in 'Arrow.floor' docstring +- [FIX] Use read() utility to open README +- [FIX] span_range for week frame +- [NEW] Add minimal support for fractional seconds longer than six digits. +- [NEW] Adding locale support for Marathi (mr) +- [NEW] Add count argument to span method +- [NEW] Improved docs + +0.5.1 - 0.5.4 +------------- + +- [FIX] test the behavior of simplejson instead of calling for_json directly (tonyseek) +- [FIX] Add Hebrew Locale (doodyparizada) +- [FIX] Update documentation location (andrewelkins) +- [FIX] Update setup.py Development Status level (andrewelkins) +- [FIX] Case insensitive month match (cshowe) + +0.5.0 +----- + +- [NEW] struct_time addition. (mhworth) +- [NEW] Version grep (eirnym) +- [NEW] Default to ISO 8601 format (emonty) +- [NEW] Raise TypeError on comparison (sniekamp) +- [NEW] Adding Macedonian(mk) locale (krisfremen) +- [FIX] Fix for ISO seconds and fractional seconds (sdispater) (andrewelkins) +- [FIX] Use correct Dutch wording for "hours" (wbolster) +- [FIX] Complete the list of english locales (indorilftw) +- [FIX] Change README to reStructuredText (nyuszika7h) +- [FIX] Parse lower-cased 'h' (tamentis) +- [FIX] Slight modifications to Dutch locale (nvie) + +0.4.4 +----- + +- [NEW] Include the docs in the released tarball +- [NEW] Czech localization Czech localization for Arrow +- [NEW] Add fa_ir to locales +- [FIX] Fixes parsing of time strings with a final Z +- [FIX] Fixes ISO parsing and formatting for fractional seconds +- [FIX] test_fromtimestamp sp +- [FIX] some typos fixed +- [FIX] removed an unused import statement +- [FIX] docs table fix +- [FIX] Issue with specify 'X' template and no template at all to arrow.get +- [FIX] Fix "import" typo in docs/index.rst +- [FIX] Fix unit tests for zero passed +- [FIX] Update layout.html +- [FIX] In Norwegian and new Norwegian months and weekdays should not be capitalized +- [FIX] Fixed discrepancy between specifying 'X' to arrow.get and specifying no template + +0.4.3 +----- + +- [NEW] Turkish locale (Emre) +- [NEW] Arabic locale (Mosab Ahmad) +- [NEW] Danish locale (Holmars) +- [NEW] Icelandic locale (Holmars) +- [NEW] Hindi locale (Atmb4u) +- [NEW] Malayalam locale (Atmb4u) +- [NEW] Finnish locale (Stormpat) +- [NEW] Portuguese locale (Danielcorreia) +- [NEW] ``h`` and ``hh`` strings are now supported (Averyonghub) +- [FIX] An incorrect inflection in the Polish locale has been fixed (Avalanchy) +- [FIX] ``arrow.get`` now properly handles ``Date`` (Jaapz) +- [FIX] Tests are now declared in ``setup.py`` and the manifest (Pypingou) +- [FIX] ``__version__`` has been added to ``__init__.py`` (Sametmax) +- [FIX] ISO 8601 strings can be parsed without a separator (Ivandiguisto / Root) +- [FIX] Documentation is now more clear regarding some inputs on ``arrow.get`` (Eriktaubeneck) +- [FIX] Some documentation links have been fixed (Vrutsky) +- [FIX] Error messages for parse errors are now more descriptive (Maciej Albin) +- [FIX] The parser now correctly checks for separators in strings (Mschwager) + +0.4.2 +----- + +- [NEW] Factory ``get`` method now accepts a single ``Arrow`` argument. +- [NEW] Tokens SSSS, SSSSS and SSSSSS are supported in parsing. +- [NEW] ``Arrow`` objects have a ``float_timestamp`` property. +- [NEW] Vietnamese locale (Iu1nguoi) +- [NEW] Factory ``get`` method now accepts a list of format strings (Dgilland) +- [NEW] A MANIFEST.in file has been added (Pypingou) +- [NEW] Tests can be run directly from ``setup.py`` (Pypingou) +- [FIX] Arrow docs now list 'day of week' format tokens correctly (Rudolphfroger) +- [FIX] Several issues with the Korean locale have been resolved (Yoloseem) +- [FIX] ``humanize`` now correctly returns unicode (Shvechikov) +- [FIX] ``Arrow`` objects now pickle / unpickle correctly (Yoloseem) + +0.4.1 +----- + +- [NEW] Table / explanation of formatting & parsing tokens in docs +- [NEW] Brazilian locale (Augusto2112) +- [NEW] Dutch locale (OrangeTux) +- [NEW] Italian locale (Pertux) +- [NEW] Austrain locale (LeChewbacca) +- [NEW] Tagalog locale (Marksteve) +- [FIX] Corrected spelling and day numbers in German locale (LeChewbacca) +- [FIX] Factory ``get`` method should now handle unicode strings correctly (Bwells) +- [FIX] Midnight and noon should now parse and format correctly (Bwells) + +0.4.0 +----- + +- [NEW] Format-free ISO 8601 parsing in factory ``get`` method +- [NEW] Support for 'week' / 'weeks' in ``span``, ``range``, ``span_range``, ``floor`` and ``ceil`` +- [NEW] Support for 'weeks' in ``replace`` +- [NEW] Norwegian locale (Martinp) +- [NEW] Japanese locale (CortYuming) +- [FIX] Timezones no longer show the wrong sign when formatted (Bean) +- [FIX] Microseconds are parsed correctly from strings (Bsidhom) +- [FIX] Locale day-of-week is no longer off by one (Cynddl) +- [FIX] Corrected plurals of Ukrainian and Russian nouns (Catchagain) +- [CHANGE] Old 0.1 ``arrow`` module method removed +- [CHANGE] Dropped timestamp support in ``range`` and ``span_range`` (never worked correctly) +- [CHANGE] Dropped parsing of single string as tz string in factory ``get`` method (replaced by ISO 8601) + +0.3.5 +----- + +- [NEW] French locale (Cynddl) +- [NEW] Spanish locale (Slapresta) +- [FIX] Ranges handle multiple timezones correctly (Ftobia) + +0.3.4 +----- + +- [FIX] Humanize no longer sometimes returns the wrong month delta +- [FIX] ``__format__`` works correctly with no format string + +0.3.3 +----- + +- [NEW] Python 2.6 support +- [NEW] Initial support for locale-based parsing and formatting +- [NEW] ArrowFactory class, now proxied as the module API +- [NEW] ``factory`` api method to obtain a factory for a custom type +- [FIX] Python 3 support and tests completely ironed out + +0.3.2 +----- + +- [NEW] Python 3+ support + +0.3.1 +----- + +- [FIX] The old ``arrow`` module function handles timestamps correctly as it used to + +0.3.0 +----- + +- [NEW] ``Arrow.replace`` method +- [NEW] Accept timestamps, datetimes and Arrows for datetime inputs, where reasonable +- [FIX] ``range`` and ``span_range`` respect end and limit parameters correctly +- [CHANGE] Arrow objects are no longer mutable +- [CHANGE] Plural attribute name semantics altered: single -> absolute, plural -> relative +- [CHANGE] Plural names no longer supported as properties (e.g. ``arrow.utcnow().years``) + +0.2.1 +----- + +- [NEW] Support for localized humanization +- [NEW] English, Russian, Greek, Korean, Chinese locales + +0.2.0 +----- + +- **REWRITE** +- [NEW] Date parsing +- [NEW] Date formatting +- [NEW] ``floor``, ``ceil`` and ``span`` methods +- [NEW] ``datetime`` interface implementation +- [NEW] ``clone`` method +- [NEW] ``get``, ``now`` and ``utcnow`` API methods + +0.1.6 +----- + +- [NEW] Humanized time deltas +- [NEW] ``__eq__`` implemented +- [FIX] Issues with conversions related to daylight savings time resolved +- [CHANGE] ``__str__`` uses ISO formatting + +0.1.5 +----- + +- **Started tracking changes** +- [NEW] Parsing of ISO-formatted time zone offsets (e.g. '+02:30', '-05:00') +- [NEW] Resolved some issues with timestamps and delta / Olson time zones diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE new file mode 100644 index 00000000000..2bef500de74 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Chris Smith + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in new file mode 100644 index 00000000000..d9955ed96ae --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in @@ -0,0 +1,3 @@ +include LICENSE CHANGELOG.rst README.rst Makefile requirements.txt tox.ini +recursive-include tests *.py +recursive-include docs *.py *.rst *.bat Makefile diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile new file mode 100644 index 00000000000..f294985dc61 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile @@ -0,0 +1,44 @@ +.PHONY: auto test docs clean + +auto: build38 + +build27: PYTHON_VER = python2.7 +build35: PYTHON_VER = python3.5 +build36: PYTHON_VER = python3.6 +build37: PYTHON_VER = python3.7 +build38: PYTHON_VER = python3.8 +build39: PYTHON_VER = python3.9 + +build27 build35 build36 build37 build38 build39: clean + virtualenv venv --python=$(PYTHON_VER) + . venv/bin/activate; \ + pip install -r requirements.txt; \ + pre-commit install + +test: + rm -f .coverage coverage.xml + . venv/bin/activate; pytest + +lint: + . venv/bin/activate; pre-commit run --all-files --show-diff-on-failure + +docs: + rm -rf docs/_build + . venv/bin/activate; cd docs; make html + +clean: clean-dist + rm -rf venv .pytest_cache ./**/__pycache__ + rm -f .coverage coverage.xml ./**/*.pyc + +clean-dist: + rm -rf dist build .egg .eggs arrow.egg-info + +build-dist: + . venv/bin/activate; \ + pip install -U setuptools twine wheel; \ + python setup.py sdist bdist_wheel + +upload-dist: + . venv/bin/activate; twine upload dist/* + +publish: test clean-dist build-dist upload-dist clean-dist diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst new file mode 100644 index 00000000000..69f6c50d813 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst @@ -0,0 +1,133 @@ +Arrow: Better dates & times for Python +====================================== + +.. start-inclusion-marker-do-not-remove + +.. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master + :alt: Build Status + :target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster + +.. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg + :alt: Coverage + :target: https://codecov.io/gh/arrow-py/arrow + +.. image:: https://img.shields.io/pypi/v/arrow.svg + :alt: PyPI Version + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/pypi/pyversions/arrow.svg + :alt: Supported Python Versions + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/pypi/l/arrow.svg + :alt: License + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :alt: Code Style: Black + :target: https://github.com/psf/black + + +**Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code. + +Arrow is named after the `arrow of time `_ and is heavily inspired by `moment.js `_ and `requests `_. + +Why use Arrow over built-in modules? +------------------------------------ + +Python's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective: + +- Too many modules: datetime, time, calendar, dateutil, pytz and more +- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc. +- Timezones and timestamp conversions are verbose and unpleasant +- Timezone naivety is the norm +- Gaps in functionality: ISO 8601 parsing, timespans, humanization + +Features +-------- + +- Fully-implemented, drop-in replacement for datetime +- Supports Python 2.7, 3.5, 3.6, 3.7, 3.8 and 3.9 +- Timezone-aware and UTC by default +- Provides super-simple creation options for many common input scenarios +- :code:`shift` method with support for relative offsets, including weeks +- Formats and parses strings automatically +- Wide support for ISO 8601 +- Timezone conversion +- Timestamp available as a property +- Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year +- Humanizes and supports a growing list of contributed locales +- Extensible for your own Arrow-derived types + +Quick Start +----------- + +Installation +~~~~~~~~~~~~ + +To install Arrow, use `pip `_ or `pipenv `_: + +.. code-block:: console + + $ pip install -U arrow + +Example Usage +~~~~~~~~~~~~~ + +.. code-block:: python + + >>> import arrow + >>> arrow.get('2013-05-11T21:23:58.970460+07:00') + + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc = utc.shift(hours=-1) + >>> utc + + + >>> local = utc.to('US/Pacific') + >>> local + + + >>> local.timestamp + 1368303838 + + >>> local.format() + '2013-05-11 13:23:58 -07:00' + + >>> local.format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-11 13:23:58 -07:00' + + >>> local.humanize() + 'an hour ago' + + >>> local.humanize(locale='ko_kr') + '1시간 전' + +.. end-inclusion-marker-do-not-remove + +Documentation +------------- + +For full documentation, please visit `arrow.readthedocs.io `_. + +Contributing +------------ + +Contributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing: + +#. Find an issue or feature to tackle on the `issue tracker `_. Issues marked with the `"good first issue" label `_ may be a great place to start! +#. Fork `this repository `_ on GitHub and begin making changes in a branch. +#. Add a few tests to ensure that the bug was fixed or the feature works as expected. +#. Run the entire test suite and linting checks by running one of the following commands: :code:`tox` (if you have `tox `_ installed) **OR** :code:`make build38 && make test && make lint` (if you do not have Python 3.8 installed, replace :code:`build38` with the latest Python version on your system). +#. Submit a pull request and await feedback 😃. + +If you have any questions along the way, feel free to ask them `here `_. + +Support Arrow +------------- + +`Open Collective `_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective `_. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py new file mode 100644 index 00000000000..2883527be89 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from ._version import __version__ +from .api import get, now, utcnow +from .arrow import Arrow +from .factory import ArrowFactory +from .formatter import ( + FORMAT_ATOM, + FORMAT_COOKIE, + FORMAT_RFC822, + FORMAT_RFC850, + FORMAT_RFC1036, + FORMAT_RFC1123, + FORMAT_RFC2822, + FORMAT_RFC3339, + FORMAT_RSS, + FORMAT_W3C, +) +from .parser import ParserError diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py new file mode 100644 index 00000000000..fd86b3ee915 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py @@ -0,0 +1 @@ +__version__ = "0.17.0" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py new file mode 100644 index 00000000000..a6b7be3de28 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +Provides the default implementation of :class:`ArrowFactory ` +methods for use as a module API. + +""" + +from __future__ import absolute_import + +from arrow.factory import ArrowFactory + +# internal default factory. +_factory = ArrowFactory() + + +def get(*args, **kwargs): + """Calls the default :class:`ArrowFactory ` ``get`` method.""" + + return _factory.get(*args, **kwargs) + + +get.__doc__ = _factory.get.__doc__ + + +def utcnow(): + """Calls the default :class:`ArrowFactory ` ``utcnow`` method.""" + + return _factory.utcnow() + + +utcnow.__doc__ = _factory.utcnow.__doc__ + + +def now(tz=None): + """Calls the default :class:`ArrowFactory ` ``now`` method.""" + + return _factory.now(tz) + + +now.__doc__ = _factory.now.__doc__ + + +def factory(type): + """Returns an :class:`.ArrowFactory` for the specified :class:`Arrow ` + or derived type. + + :param type: the type, :class:`Arrow ` or derived. + + """ + + return ArrowFactory(type) + + +__all__ = ["get", "utcnow", "now", "factory"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py new file mode 100644 index 00000000000..4fe95417891 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py @@ -0,0 +1,1584 @@ +# -*- coding: utf-8 -*- +""" +Provides the :class:`Arrow ` class, an enhanced ``datetime`` +replacement. + +""" + +from __future__ import absolute_import + +import calendar +import sys +import warnings +from datetime import datetime, timedelta +from datetime import tzinfo as dt_tzinfo +from math import trunc + +from dateutil import tz as dateutil_tz +from dateutil.relativedelta import relativedelta + +from arrow import formatter, locales, parser, util + +if sys.version_info[:2] < (3, 6): # pragma: no cover + with warnings.catch_warnings(): + warnings.simplefilter("default", DeprecationWarning) + warnings.warn( + "Arrow will drop support for Python 2.7 and 3.5 in the upcoming v1.0.0 release. Please upgrade to " + "Python 3.6+ to continue receiving updates for Arrow.", + DeprecationWarning, + ) + + +class Arrow(object): + """An :class:`Arrow ` object. + + Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing + additional functionality. + + :param year: the calendar year. + :param month: the calendar month. + :param day: the calendar day. + :param hour: (optional) the hour. Defaults to 0. + :param minute: (optional) the minute, Defaults to 0. + :param second: (optional) the second, Defaults to 0. + :param microsecond: (optional) the microsecond. Defaults to 0. + :param tzinfo: (optional) A timezone expression. Defaults to UTC. + :param fold: (optional) 0 or 1, used to disambiguate repeated times. Defaults to 0. + + .. _tz-expr: + + Recognized timezone expressions: + + - A ``tzinfo`` object. + - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. + - A ``str`` in ISO 8601 style, as in '+07:00'. + - A ``str``, one of the following: 'local', 'utc', 'UTC'. + + Usage:: + + >>> import arrow + >>> arrow.Arrow(2013, 5, 5, 12, 30, 45) + + + """ + + resolution = datetime.resolution + + _ATTRS = ["year", "month", "day", "hour", "minute", "second", "microsecond"] + _ATTRS_PLURAL = ["{}s".format(a) for a in _ATTRS] + _MONTHS_PER_QUARTER = 3 + _SECS_PER_MINUTE = float(60) + _SECS_PER_HOUR = float(60 * 60) + _SECS_PER_DAY = float(60 * 60 * 24) + _SECS_PER_WEEK = float(60 * 60 * 24 * 7) + _SECS_PER_MONTH = float(60 * 60 * 24 * 30.5) + _SECS_PER_YEAR = float(60 * 60 * 24 * 365.25) + + def __init__( + self, + year, + month, + day, + hour=0, + minute=0, + second=0, + microsecond=0, + tzinfo=None, + **kwargs + ): + if tzinfo is None: + tzinfo = dateutil_tz.tzutc() + # detect that tzinfo is a pytz object (issue #626) + elif ( + isinstance(tzinfo, dt_tzinfo) + and hasattr(tzinfo, "localize") + and hasattr(tzinfo, "zone") + and tzinfo.zone + ): + tzinfo = parser.TzinfoParser.parse(tzinfo.zone) + elif util.isstr(tzinfo): + tzinfo = parser.TzinfoParser.parse(tzinfo) + + fold = kwargs.get("fold", 0) + + # use enfold here to cover direct arrow.Arrow init on 2.7/3.5 + self._datetime = dateutil_tz.enfold( + datetime(year, month, day, hour, minute, second, microsecond, tzinfo), + fold=fold, + ) + + # factories: single object, both original and from datetime. + + @classmethod + def now(cls, tzinfo=None): + """Constructs an :class:`Arrow ` object, representing "now" in the given + timezone. + + :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. + + Usage:: + + >>> arrow.now('Asia/Baku') + + + """ + + if tzinfo is None: + tzinfo = dateutil_tz.tzlocal() + + dt = datetime.now(tzinfo) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def utcnow(cls): + """Constructs an :class:`Arrow ` object, representing "now" in UTC + time. + + Usage:: + + >>> arrow.utcnow() + + + """ + + dt = datetime.now(dateutil_tz.tzutc()) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def fromtimestamp(cls, timestamp, tzinfo=None): + """Constructs an :class:`Arrow ` object from a timestamp, converted to + the given timezone. + + :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. + :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. + """ + + if tzinfo is None: + tzinfo = dateutil_tz.tzlocal() + elif util.isstr(tzinfo): + tzinfo = parser.TzinfoParser.parse(tzinfo) + + if not util.is_timestamp(timestamp): + raise ValueError( + "The provided timestamp '{}' is invalid.".format(timestamp) + ) + + timestamp = util.normalize_timestamp(float(timestamp)) + dt = datetime.fromtimestamp(timestamp, tzinfo) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def utcfromtimestamp(cls, timestamp): + """Constructs an :class:`Arrow ` object from a timestamp, in UTC time. + + :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. + + """ + + if not util.is_timestamp(timestamp): + raise ValueError( + "The provided timestamp '{}' is invalid.".format(timestamp) + ) + + timestamp = util.normalize_timestamp(float(timestamp)) + dt = datetime.utcfromtimestamp(timestamp) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dateutil_tz.tzutc(), + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def fromdatetime(cls, dt, tzinfo=None): + """Constructs an :class:`Arrow ` object from a ``datetime`` and + optional replacement timezone. + + :param dt: the ``datetime`` + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to ``dt``'s + timezone, or UTC if naive. + + If you only want to replace the timezone of naive datetimes:: + + >>> dt + datetime.datetime(2013, 5, 5, 0, 0, tzinfo=tzutc()) + >>> arrow.Arrow.fromdatetime(dt, dt.tzinfo or 'US/Pacific') + + + """ + + if tzinfo is None: + if dt.tzinfo is None: + tzinfo = dateutil_tz.tzutc() + else: + tzinfo = dt.tzinfo + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def fromdate(cls, date, tzinfo=None): + """Constructs an :class:`Arrow ` object from a ``date`` and optional + replacement timezone. Time values are set to 0. + + :param date: the ``date`` + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to UTC. + """ + + if tzinfo is None: + tzinfo = dateutil_tz.tzutc() + + return cls(date.year, date.month, date.day, tzinfo=tzinfo) + + @classmethod + def strptime(cls, date_str, fmt, tzinfo=None): + """Constructs an :class:`Arrow ` object from a date string and format, + in the style of ``datetime.strptime``. Optionally replaces the parsed timezone. + + :param date_str: the date string. + :param fmt: the format string. + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to the parsed + timezone if ``fmt`` contains a timezone directive, otherwise UTC. + + Usage:: + + >>> arrow.Arrow.strptime('20-01-2019 15:49:10', '%d-%m-%Y %H:%M:%S') + + + """ + + dt = datetime.strptime(date_str, fmt) + if tzinfo is None: + tzinfo = dt.tzinfo + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo, + fold=getattr(dt, "fold", 0), + ) + + # factories: ranges and spans + + @classmethod + def range(cls, frame, start, end=None, tz=None, limit=None): + """Returns an iterator of :class:`Arrow ` objects, representing + points in time between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param tz: (optional) A :ref:`timezone expression `. Defaults to + ``start``'s timezone, or UTC if ``start`` is naive. + :param limit: (optional) A maximum number of tuples to return. + + **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to + return the entire range. Call with ``limit`` alone to return a maximum # of results from + the start. Call with both to cap a range at a maximum # of results. + + **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before + iterating. As such, either call with naive objects and ``tz``, or aware objects from the + same timezone and no ``tz``. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + Usage:: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print(repr(r)) + ... + + + + + + + **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned iterator:: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 13, 30) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print(repr(r)) + ... + + + + """ + + _, frame_relative, relative_steps = cls._get_frames(frame) + + tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) + + start = cls._get_datetime(start).replace(tzinfo=tzinfo) + end, limit = cls._get_iteration_params(end, limit) + end = cls._get_datetime(end).replace(tzinfo=tzinfo) + + current = cls.fromdatetime(start) + original_day = start.day + day_is_clipped = False + i = 0 + + while current <= end and i < limit: + i += 1 + yield current + + values = [getattr(current, f) for f in cls._ATTRS] + current = cls(*values, tzinfo=tzinfo).shift( + **{frame_relative: relative_steps} + ) + + if frame in ["month", "quarter", "year"] and current.day < original_day: + day_is_clipped = True + + if day_is_clipped and not cls._is_last_day_of_month(current): + current = current.replace(day=original_day) + + def span(self, frame, count=1, bounds="[)"): + """Returns two new :class:`Arrow ` objects, representing the timespan + of the :class:`Arrow ` object in a given timeframe. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param count: (optional) the number of frames to span. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the span. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '[)' is used. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Usage:: + + >>> arrow.utcnow() + + + >>> arrow.utcnow().span('hour') + (, ) + + >>> arrow.utcnow().span('day') + (, ) + + >>> arrow.utcnow().span('day', count=2) + (, ) + + >>> arrow.utcnow().span('day', bounds='[]') + (, ) + + """ + + util.validate_bounds(bounds) + + frame_absolute, frame_relative, relative_steps = self._get_frames(frame) + + if frame_absolute == "week": + attr = "day" + elif frame_absolute == "quarter": + attr = "month" + else: + attr = frame_absolute + + index = self._ATTRS.index(attr) + frames = self._ATTRS[: index + 1] + + values = [getattr(self, f) for f in frames] + + for _ in range(3 - len(values)): + values.append(1) + + floor = self.__class__(*values, tzinfo=self.tzinfo) + + if frame_absolute == "week": + floor = floor.shift(days=-(self.isoweekday() - 1)) + elif frame_absolute == "quarter": + floor = floor.shift(months=-((self.month - 1) % 3)) + + ceil = floor.shift(**{frame_relative: count * relative_steps}) + + if bounds[0] == "(": + floor = floor.shift(microseconds=+1) + + if bounds[1] == ")": + ceil = ceil.shift(microseconds=-1) + + return floor, ceil + + def floor(self, frame): + """Returns a new :class:`Arrow ` object, representing the "floor" + of the timespan of the :class:`Arrow ` object in a given timeframe. + Equivalent to the first element in the 2-tuple returned by + :func:`span `. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + + Usage:: + + >>> arrow.utcnow().floor('hour') + + """ + + return self.span(frame)[0] + + def ceil(self, frame): + """Returns a new :class:`Arrow ` object, representing the "ceiling" + of the timespan of the :class:`Arrow ` object in a given timeframe. + Equivalent to the second element in the 2-tuple returned by + :func:`span `. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + + Usage:: + + >>> arrow.utcnow().ceil('hour') + + """ + + return self.span(frame)[1] + + @classmethod + def span_range(cls, frame, start, end, tz=None, limit=None, bounds="[)"): + """Returns an iterator of tuples, each :class:`Arrow ` objects, + representing a series of timespans between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param tz: (optional) A :ref:`timezone expression `. Defaults to + ``start``'s timezone, or UTC if ``start`` is naive. + :param limit: (optional) A maximum number of tuples to return. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in each span in the range. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '[)' is used. + + **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to + return the entire range. Call with ``limit`` alone to return a maximum # of results from + the start. Call with both to cap a range at a maximum # of results. + + **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before + iterating. As such, either call with naive objects and ``tz``, or aware objects from the + same timezone and no ``tz``. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned + iterator of timespans. + + Usage: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.span_range('hour', start, end): + ... print(r) + ... + (, ) + (, ) + (, ) + (, ) + (, ) + (, ) + + """ + + tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) + start = cls.fromdatetime(start, tzinfo).span(frame)[0] + _range = cls.range(frame, start, end, tz, limit) + return (r.span(frame, bounds=bounds) for r in _range) + + @classmethod + def interval(cls, frame, start, end, interval=1, tz=None, bounds="[)"): + """Returns an iterator of tuples, each :class:`Arrow ` objects, + representing a series of intervals between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param interval: (optional) Time interval for the given time frame. + :param tz: (optional) A timezone expression. Defaults to UTC. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the intervals. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '[)' is used. + + Supported frame values: year, quarter, month, week, day, hour, minute, second + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + Recognized timezone expressions: + + - A ``tzinfo`` object. + - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. + - A ``str`` in ISO 8601 style, as in '+07:00'. + - A ``str``, one of the following: 'local', 'utc', 'UTC'. + + Usage: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.interval('hour', start, end, 2): + ... print r + ... + (, ) + (, ) + (, ) + """ + if interval < 1: + raise ValueError("interval has to be a positive integer") + + spanRange = iter(cls.span_range(frame, start, end, tz, bounds=bounds)) + while True: + try: + intvlStart, intvlEnd = next(spanRange) + for _ in range(interval - 1): + _, intvlEnd = next(spanRange) + yield intvlStart, intvlEnd + except StopIteration: + return + + # representations + + def __repr__(self): + return "<{} [{}]>".format(self.__class__.__name__, self.__str__()) + + def __str__(self): + return self._datetime.isoformat() + + def __format__(self, formatstr): + + if len(formatstr) > 0: + return self.format(formatstr) + + return str(self) + + def __hash__(self): + return self._datetime.__hash__() + + # attributes and properties + + def __getattr__(self, name): + + if name == "week": + return self.isocalendar()[1] + + if name == "quarter": + return int((self.month - 1) / self._MONTHS_PER_QUARTER) + 1 + + if not name.startswith("_"): + value = getattr(self._datetime, name, None) + + if value is not None: + return value + + return object.__getattribute__(self, name) + + @property + def tzinfo(self): + """Gets the ``tzinfo`` of the :class:`Arrow ` object. + + Usage:: + + >>> arw=arrow.utcnow() + >>> arw.tzinfo + tzutc() + + """ + + return self._datetime.tzinfo + + @tzinfo.setter + def tzinfo(self, tzinfo): + """ Sets the ``tzinfo`` of the :class:`Arrow ` object. """ + + self._datetime = self._datetime.replace(tzinfo=tzinfo) + + @property + def datetime(self): + """Returns a datetime representation of the :class:`Arrow ` object. + + Usage:: + + >>> arw=arrow.utcnow() + >>> arw.datetime + datetime.datetime(2019, 1, 24, 16, 35, 27, 276649, tzinfo=tzutc()) + + """ + + return self._datetime + + @property + def naive(self): + """Returns a naive datetime representation of the :class:`Arrow ` + object. + + Usage:: + + >>> nairobi = arrow.now('Africa/Nairobi') + >>> nairobi + + >>> nairobi.naive + datetime.datetime(2019, 1, 23, 19, 27, 12, 297999) + + """ + + return self._datetime.replace(tzinfo=None) + + @property + def timestamp(self): + """Returns a timestamp representation of the :class:`Arrow ` object, in + UTC time. + + Usage:: + + >>> arrow.utcnow().timestamp + 1548260567 + + """ + + warnings.warn( + "For compatibility with the datetime.timestamp() method this property will be replaced with a method in " + "the 1.0.0 release, please switch to the .int_timestamp property for identical behaviour as soon as " + "possible.", + DeprecationWarning, + ) + return calendar.timegm(self._datetime.utctimetuple()) + + @property + def int_timestamp(self): + """Returns a timestamp representation of the :class:`Arrow ` object, in + UTC time. + + Usage:: + + >>> arrow.utcnow().int_timestamp + 1548260567 + + """ + + return calendar.timegm(self._datetime.utctimetuple()) + + @property + def float_timestamp(self): + """Returns a floating-point representation of the :class:`Arrow ` + object, in UTC time. + + Usage:: + + >>> arrow.utcnow().float_timestamp + 1548260516.830896 + + """ + + # IDEA get rid of this in 1.0.0 and wrap datetime.timestamp() + # Or for compatibility retain this but make it call the timestamp method + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + return self.timestamp + float(self.microsecond) / 1000000 + + @property + def fold(self): + """ Returns the ``fold`` value of the :class:`Arrow ` object. """ + + # in python < 3.6 _datetime will be a _DatetimeWithFold if fold=1 and a datetime with no fold attribute + # otherwise, so we need to return zero to cover the latter case + return getattr(self._datetime, "fold", 0) + + @property + def ambiguous(self): + """ Returns a boolean indicating whether the :class:`Arrow ` object is ambiguous.""" + + return dateutil_tz.datetime_ambiguous(self._datetime) + + @property + def imaginary(self): + """Indicates whether the :class: `Arrow ` object exists in the current timezone.""" + + return not dateutil_tz.datetime_exists(self._datetime) + + # mutation and duplication. + + def clone(self): + """Returns a new :class:`Arrow ` object, cloned from the current one. + + Usage: + + >>> arw = arrow.utcnow() + >>> cloned = arw.clone() + + """ + + return self.fromdatetime(self._datetime) + + def replace(self, **kwargs): + """Returns a new :class:`Arrow ` object with attributes updated + according to inputs. + + Use property names to set their value absolutely:: + + >>> import arrow + >>> arw = arrow.utcnow() + >>> arw + + >>> arw.replace(year=2014, month=6) + + + You can also replace the timezone without conversion, using a + :ref:`timezone expression `:: + + >>> arw.replace(tzinfo=tz.tzlocal()) + + + """ + + absolute_kwargs = {} + + for key, value in kwargs.items(): + + if key in self._ATTRS: + absolute_kwargs[key] = value + elif key in ["week", "quarter"]: + raise AttributeError("setting absolute {} is not supported".format(key)) + elif key not in ["tzinfo", "fold"]: + raise AttributeError('unknown attribute: "{}"'.format(key)) + + current = self._datetime.replace(**absolute_kwargs) + + tzinfo = kwargs.get("tzinfo") + + if tzinfo is not None: + tzinfo = self._get_tzinfo(tzinfo) + current = current.replace(tzinfo=tzinfo) + + fold = kwargs.get("fold") + + # TODO revisit this once we drop support for 2.7/3.5 + if fold is not None: + current = dateutil_tz.enfold(current, fold=fold) + + return self.fromdatetime(current) + + def shift(self, **kwargs): + """Returns a new :class:`Arrow ` object with attributes updated + according to inputs. + + Use pluralized property names to relatively shift their current value: + + >>> import arrow + >>> arw = arrow.utcnow() + >>> arw + + >>> arw.shift(years=1, months=-1) + + + Day-of-the-week relative shifting can use either Python's weekday numbers + (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's + day instances (MO, TU .. SU). When using weekday numbers, the returned + date will always be greater than or equal to the starting date. + + Using the above code (which is a Saturday) and asking it to shift to Saturday: + + >>> arw.shift(weekday=5) + + + While asking for a Monday: + + >>> arw.shift(weekday=0) + + + """ + + relative_kwargs = {} + additional_attrs = ["weeks", "quarters", "weekday"] + + for key, value in kwargs.items(): + + if key in self._ATTRS_PLURAL or key in additional_attrs: + relative_kwargs[key] = value + else: + raise AttributeError( + "Invalid shift time frame. Please select one of the following: {}.".format( + ", ".join(self._ATTRS_PLURAL + additional_attrs) + ) + ) + + # core datetime does not support quarters, translate to months. + relative_kwargs.setdefault("months", 0) + relative_kwargs["months"] += ( + relative_kwargs.pop("quarters", 0) * self._MONTHS_PER_QUARTER + ) + + current = self._datetime + relativedelta(**relative_kwargs) + + if not dateutil_tz.datetime_exists(current): + current = dateutil_tz.resolve_imaginary(current) + + return self.fromdatetime(current) + + def to(self, tz): + """Returns a new :class:`Arrow ` object, converted + to the target timezone. + + :param tz: A :ref:`timezone expression `. + + Usage:: + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc.to('US/Pacific') + + + >>> utc.to(tz.tzlocal()) + + + >>> utc.to('-07:00') + + + >>> utc.to('local') + + + >>> utc.to('local').to('utc') + + + """ + + if not isinstance(tz, dt_tzinfo): + tz = parser.TzinfoParser.parse(tz) + + dt = self._datetime.astimezone(tz) + + return self.__class__( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + # string output and formatting + + def format(self, fmt="YYYY-MM-DD HH:mm:ssZZ", locale="en_us"): + """Returns a string representation of the :class:`Arrow ` object, + formatted according to a format string. + + :param fmt: the format string. + + Usage:: + + >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-09 03:56:47 -00:00' + + >>> arrow.utcnow().format('X') + '1368071882' + + >>> arrow.utcnow().format('MMMM DD, YYYY') + 'May 09, 2013' + + >>> arrow.utcnow().format() + '2013-05-09 03:56:47 -00:00' + + """ + + return formatter.DateTimeFormatter(locale).format(self._datetime, fmt) + + def humanize( + self, other=None, locale="en_us", only_distance=False, granularity="auto" + ): + """Returns a localized, humanized representation of a relative difference in time. + + :param other: (optional) an :class:`Arrow ` or ``datetime`` object. + Defaults to now in the current :class:`Arrow ` object's timezone. + :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'. + :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part. + :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute', + 'hour', 'day', 'week', 'month' or 'year' or a list of any combination of these strings + + Usage:: + + >>> earlier = arrow.utcnow().shift(hours=-2) + >>> earlier.humanize() + '2 hours ago' + + >>> later = earlier.shift(hours=4) + >>> later.humanize(earlier) + 'in 4 hours' + + """ + + locale_name = locale + locale = locales.get_locale(locale) + + if other is None: + utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) + dt = utc.astimezone(self._datetime.tzinfo) + + elif isinstance(other, Arrow): + dt = other._datetime + + elif isinstance(other, datetime): + if other.tzinfo is None: + dt = other.replace(tzinfo=self._datetime.tzinfo) + else: + dt = other.astimezone(self._datetime.tzinfo) + + else: + raise TypeError( + "Invalid 'other' argument of type '{}'. " + "Argument must be of type None, Arrow, or datetime.".format( + type(other).__name__ + ) + ) + + if isinstance(granularity, list) and len(granularity) == 1: + granularity = granularity[0] + + delta = int(round(util.total_seconds(self._datetime - dt))) + sign = -1 if delta < 0 else 1 + diff = abs(delta) + delta = diff + + try: + if granularity == "auto": + if diff < 10: + return locale.describe("now", only_distance=only_distance) + + if diff < 45: + seconds = sign * delta + return locale.describe( + "seconds", seconds, only_distance=only_distance + ) + + elif diff < 90: + return locale.describe("minute", sign, only_distance=only_distance) + elif diff < 2700: + minutes = sign * int(max(delta / 60, 2)) + return locale.describe( + "minutes", minutes, only_distance=only_distance + ) + + elif diff < 5400: + return locale.describe("hour", sign, only_distance=only_distance) + elif diff < 79200: + hours = sign * int(max(delta / 3600, 2)) + return locale.describe("hours", hours, only_distance=only_distance) + + # anything less than 48 hours should be 1 day + elif diff < 172800: + return locale.describe("day", sign, only_distance=only_distance) + elif diff < 554400: + days = sign * int(max(delta / 86400, 2)) + return locale.describe("days", days, only_distance=only_distance) + + elif diff < 907200: + return locale.describe("week", sign, only_distance=only_distance) + elif diff < 2419200: + weeks = sign * int(max(delta / 604800, 2)) + return locale.describe("weeks", weeks, only_distance=only_distance) + + elif diff < 3888000: + return locale.describe("month", sign, only_distance=only_distance) + elif diff < 29808000: + self_months = self._datetime.year * 12 + self._datetime.month + other_months = dt.year * 12 + dt.month + + months = sign * int(max(abs(other_months - self_months), 2)) + + return locale.describe( + "months", months, only_distance=only_distance + ) + + elif diff < 47260800: + return locale.describe("year", sign, only_distance=only_distance) + else: + years = sign * int(max(delta / 31536000, 2)) + return locale.describe("years", years, only_distance=only_distance) + + elif util.isstr(granularity): + if granularity == "second": + delta = sign * delta + if abs(delta) < 2: + return locale.describe("now", only_distance=only_distance) + elif granularity == "minute": + delta = sign * delta / self._SECS_PER_MINUTE + elif granularity == "hour": + delta = sign * delta / self._SECS_PER_HOUR + elif granularity == "day": + delta = sign * delta / self._SECS_PER_DAY + elif granularity == "week": + delta = sign * delta / self._SECS_PER_WEEK + elif granularity == "month": + delta = sign * delta / self._SECS_PER_MONTH + elif granularity == "year": + delta = sign * delta / self._SECS_PER_YEAR + else: + raise AttributeError( + "Invalid level of granularity. Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'" + ) + + if trunc(abs(delta)) != 1: + granularity += "s" + return locale.describe(granularity, delta, only_distance=only_distance) + + else: + timeframes = [] + if "year" in granularity: + years = sign * delta / self._SECS_PER_YEAR + delta %= self._SECS_PER_YEAR + timeframes.append(["year", years]) + + if "month" in granularity: + months = sign * delta / self._SECS_PER_MONTH + delta %= self._SECS_PER_MONTH + timeframes.append(["month", months]) + + if "week" in granularity: + weeks = sign * delta / self._SECS_PER_WEEK + delta %= self._SECS_PER_WEEK + timeframes.append(["week", weeks]) + + if "day" in granularity: + days = sign * delta / self._SECS_PER_DAY + delta %= self._SECS_PER_DAY + timeframes.append(["day", days]) + + if "hour" in granularity: + hours = sign * delta / self._SECS_PER_HOUR + delta %= self._SECS_PER_HOUR + timeframes.append(["hour", hours]) + + if "minute" in granularity: + minutes = sign * delta / self._SECS_PER_MINUTE + delta %= self._SECS_PER_MINUTE + timeframes.append(["minute", minutes]) + + if "second" in granularity: + seconds = sign * delta + timeframes.append(["second", seconds]) + + if len(timeframes) < len(granularity): + raise AttributeError( + "Invalid level of granularity. " + "Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'." + ) + + for tf in timeframes: + # Make granularity plural if the delta is not equal to 1 + if trunc(abs(tf[1])) != 1: + tf[0] += "s" + return locale.describe_multi(timeframes, only_distance=only_distance) + + except KeyError as e: + raise ValueError( + "Humanization of the {} granularity is not currently translated in the '{}' locale. " + "Please consider making a contribution to this locale.".format( + e, locale_name + ) + ) + + # query functions + + def is_between(self, start, end, bounds="()"): + """Returns a boolean denoting whether the specified date and time is between + the start and end dates and times. + + :param start: an :class:`Arrow ` object. + :param end: an :class:`Arrow ` object. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the range. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '()' is used. + + Usage:: + + >>> start = arrow.get(datetime(2013, 5, 5, 12, 30, 10)) + >>> end = arrow.get(datetime(2013, 5, 5, 12, 30, 36)) + >>> arrow.get(datetime(2013, 5, 5, 12, 30, 27)).is_between(start, end) + True + + >>> start = arrow.get(datetime(2013, 5, 5)) + >>> end = arrow.get(datetime(2013, 5, 8)) + >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[]') + True + + >>> start = arrow.get(datetime(2013, 5, 5)) + >>> end = arrow.get(datetime(2013, 5, 8)) + >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[)') + False + + """ + + util.validate_bounds(bounds) + + if not isinstance(start, Arrow): + raise TypeError( + "Can't parse start date argument type of '{}'".format(type(start)) + ) + + if not isinstance(end, Arrow): + raise TypeError( + "Can't parse end date argument type of '{}'".format(type(end)) + ) + + include_start = bounds[0] == "[" + include_end = bounds[1] == "]" + + target_timestamp = self.float_timestamp + start_timestamp = start.float_timestamp + end_timestamp = end.float_timestamp + + if include_start and include_end: + return ( + target_timestamp >= start_timestamp + and target_timestamp <= end_timestamp + ) + elif include_start and not include_end: + return ( + target_timestamp >= start_timestamp and target_timestamp < end_timestamp + ) + elif not include_start and include_end: + return ( + target_timestamp > start_timestamp and target_timestamp <= end_timestamp + ) + else: + return ( + target_timestamp > start_timestamp and target_timestamp < end_timestamp + ) + + # datetime methods + + def date(self): + """Returns a ``date`` object with the same year, month and day. + + Usage:: + + >>> arrow.utcnow().date() + datetime.date(2019, 1, 23) + + """ + + return self._datetime.date() + + def time(self): + """Returns a ``time`` object with the same hour, minute, second, microsecond. + + Usage:: + + >>> arrow.utcnow().time() + datetime.time(12, 15, 34, 68352) + + """ + + return self._datetime.time() + + def timetz(self): + """Returns a ``time`` object with the same hour, minute, second, microsecond and + tzinfo. + + Usage:: + + >>> arrow.utcnow().timetz() + datetime.time(12, 5, 18, 298893, tzinfo=tzutc()) + + """ + + return self._datetime.timetz() + + def astimezone(self, tz): + """Returns a ``datetime`` object, converted to the specified timezone. + + :param tz: a ``tzinfo`` object. + + Usage:: + + >>> pacific=arrow.now('US/Pacific') + >>> nyc=arrow.now('America/New_York').tzinfo + >>> pacific.astimezone(nyc) + datetime.datetime(2019, 1, 20, 10, 24, 22, 328172, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York')) + + """ + + return self._datetime.astimezone(tz) + + def utcoffset(self): + """Returns a ``timedelta`` object representing the whole number of minutes difference from + UTC time. + + Usage:: + + >>> arrow.now('US/Pacific').utcoffset() + datetime.timedelta(-1, 57600) + + """ + + return self._datetime.utcoffset() + + def dst(self): + """Returns the daylight savings time adjustment. + + Usage:: + + >>> arrow.utcnow().dst() + datetime.timedelta(0) + + """ + + return self._datetime.dst() + + def timetuple(self): + """Returns a ``time.struct_time``, in the current timezone. + + Usage:: + + >>> arrow.utcnow().timetuple() + time.struct_time(tm_year=2019, tm_mon=1, tm_mday=20, tm_hour=15, tm_min=17, tm_sec=8, tm_wday=6, tm_yday=20, tm_isdst=0) + + """ + + return self._datetime.timetuple() + + def utctimetuple(self): + """Returns a ``time.struct_time``, in UTC time. + + Usage:: + + >>> arrow.utcnow().utctimetuple() + time.struct_time(tm_year=2019, tm_mon=1, tm_mday=19, tm_hour=21, tm_min=41, tm_sec=7, tm_wday=5, tm_yday=19, tm_isdst=0) + + """ + + return self._datetime.utctimetuple() + + def toordinal(self): + """Returns the proleptic Gregorian ordinal of the date. + + Usage:: + + >>> arrow.utcnow().toordinal() + 737078 + + """ + + return self._datetime.toordinal() + + def weekday(self): + """Returns the day of the week as an integer (0-6). + + Usage:: + + >>> arrow.utcnow().weekday() + 5 + + """ + + return self._datetime.weekday() + + def isoweekday(self): + """Returns the ISO day of the week as an integer (1-7). + + Usage:: + + >>> arrow.utcnow().isoweekday() + 6 + + """ + + return self._datetime.isoweekday() + + def isocalendar(self): + """Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). + + Usage:: + + >>> arrow.utcnow().isocalendar() + (2019, 3, 6) + + """ + + return self._datetime.isocalendar() + + def isoformat(self, sep="T"): + """Returns an ISO 8601 formatted representation of the date and time. + + Usage:: + + >>> arrow.utcnow().isoformat() + '2019-01-19T18:30:52.442118+00:00' + + """ + + return self._datetime.isoformat(sep) + + def ctime(self): + """Returns a ctime formatted representation of the date and time. + + Usage:: + + >>> arrow.utcnow().ctime() + 'Sat Jan 19 18:26:50 2019' + + """ + + return self._datetime.ctime() + + def strftime(self, format): + """Formats in the style of ``datetime.strftime``. + + :param format: the format string. + + Usage:: + + >>> arrow.utcnow().strftime('%d-%m-%Y %H:%M:%S') + '23-01-2019 12:28:17' + + """ + + return self._datetime.strftime(format) + + def for_json(self): + """Serializes for the ``for_json`` protocol of simplejson. + + Usage:: + + >>> arrow.utcnow().for_json() + '2019-01-19T18:25:36.760079+00:00' + + """ + + return self.isoformat() + + # math + + def __add__(self, other): + + if isinstance(other, (timedelta, relativedelta)): + return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) + + return NotImplemented + + def __radd__(self, other): + return self.__add__(other) + + def __sub__(self, other): + + if isinstance(other, (timedelta, relativedelta)): + return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) + + elif isinstance(other, datetime): + return self._datetime - other + + elif isinstance(other, Arrow): + return self._datetime - other._datetime + + return NotImplemented + + def __rsub__(self, other): + + if isinstance(other, datetime): + return other - self._datetime + + return NotImplemented + + # comparisons + + def __eq__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return False + + return self._datetime == self._get_datetime(other) + + def __ne__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return True + + return not self.__eq__(other) + + def __gt__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime > self._get_datetime(other) + + def __ge__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime >= self._get_datetime(other) + + def __lt__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime < self._get_datetime(other) + + def __le__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime <= self._get_datetime(other) + + def __cmp__(self, other): + if sys.version_info[0] < 3: # pragma: no cover + if not isinstance(other, (Arrow, datetime)): + raise TypeError( + "can't compare '{}' to '{}'".format(type(self), type(other)) + ) + + # internal methods + + @staticmethod + def _get_tzinfo(tz_expr): + + if tz_expr is None: + return dateutil_tz.tzutc() + if isinstance(tz_expr, dt_tzinfo): + return tz_expr + else: + try: + return parser.TzinfoParser.parse(tz_expr) + except parser.ParserError: + raise ValueError("'{}' not recognized as a timezone".format(tz_expr)) + + @classmethod + def _get_datetime(cls, expr): + """Get datetime object for a specified expression.""" + if isinstance(expr, Arrow): + return expr.datetime + elif isinstance(expr, datetime): + return expr + elif util.is_timestamp(expr): + timestamp = float(expr) + return cls.utcfromtimestamp(timestamp).datetime + else: + raise ValueError( + "'{}' not recognized as a datetime or timestamp.".format(expr) + ) + + @classmethod + def _get_frames(cls, name): + + if name in cls._ATTRS: + return name, "{}s".format(name), 1 + elif name[-1] == "s" and name[:-1] in cls._ATTRS: + return name[:-1], name, 1 + elif name in ["week", "weeks"]: + return "week", "weeks", 1 + elif name in ["quarter", "quarters"]: + return "quarter", "months", 3 + + supported = ", ".join( + [ + "year(s)", + "month(s)", + "day(s)", + "hour(s)", + "minute(s)", + "second(s)", + "microsecond(s)", + "week(s)", + "quarter(s)", + ] + ) + raise AttributeError( + "range/span over frame {} not supported. Supported frames: {}".format( + name, supported + ) + ) + + @classmethod + def _get_iteration_params(cls, end, limit): + + if end is None: + + if limit is None: + raise ValueError("one of 'end' or 'limit' is required") + + return cls.max, limit + + else: + if limit is None: + return end, sys.maxsize + return end, limit + + @staticmethod + def _is_last_day_of_month(date): + return date.day == calendar.monthrange(date.year, date.month)[1] + + +Arrow.min = Arrow.fromdatetime(datetime.min) +Arrow.max = Arrow.fromdatetime(datetime.max) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py new file mode 100644 index 00000000000..81e37b26de6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- + +# Output of time.mktime(datetime.max.timetuple()) on macOS +# This value must be hardcoded for compatibility with Windows +# Platform-independent max timestamps are hard to form +# https://stackoverflow.com/q/46133223 +MAX_TIMESTAMP = 253402318799.0 +MAX_TIMESTAMP_MS = MAX_TIMESTAMP * 1000 +MAX_TIMESTAMP_US = MAX_TIMESTAMP * 1000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py new file mode 100644 index 00000000000..05933e81518 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +""" +Implements the :class:`ArrowFactory ` class, +providing factory methods for common :class:`Arrow ` +construction scenarios. + +""" + +from __future__ import absolute_import + +import calendar +from datetime import date, datetime +from datetime import tzinfo as dt_tzinfo +from time import struct_time + +from dateutil import tz as dateutil_tz + +from arrow import parser +from arrow.arrow import Arrow +from arrow.util import is_timestamp, iso_to_gregorian, isstr + + +class ArrowFactory(object): + """A factory for generating :class:`Arrow ` objects. + + :param type: (optional) the :class:`Arrow `-based class to construct from. + Defaults to :class:`Arrow `. + + """ + + def __init__(self, type=Arrow): + self.type = type + + def get(self, *args, **kwargs): + """Returns an :class:`Arrow ` object based on flexible inputs. + + :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en_us'. + :param tzinfo: (optional) a :ref:`timezone expression ` or tzinfo object. + Replaces the timezone unless using an input form that is explicitly UTC or specifies + the timezone in a positional argument. Defaults to UTC. + :param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize + redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing. + Defaults to false. + + Usage:: + + >>> import arrow + + **No inputs** to get current UTC time:: + + >>> arrow.get() + + + **None** to also get current UTC time:: + + >>> arrow.get(None) + + + **One** :class:`Arrow ` object, to get a copy. + + >>> arw = arrow.utcnow() + >>> arrow.get(arw) + + + **One** ``float`` or ``int``, convertible to a floating-point timestamp, to get + that timestamp in UTC:: + + >>> arrow.get(1367992474.293378) + + + >>> arrow.get(1367992474) + + + **One** ISO 8601-formatted ``str``, to parse it:: + + >>> arrow.get('2013-09-29T01:26:43.830580') + + + **One** ISO 8601-formatted ``str``, in basic format, to parse it:: + + >>> arrow.get('20160413T133656.456289') + + + **One** ``tzinfo``, to get the current time **converted** to that timezone:: + + >>> arrow.get(tz.tzlocal()) + + + **One** naive ``datetime``, to get that datetime in UTC:: + + >>> arrow.get(datetime(2013, 5, 5)) + + + **One** aware ``datetime``, to get that datetime:: + + >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) + + + **One** naive ``date``, to get that date in UTC:: + + >>> arrow.get(date(2013, 5, 5)) + + + **One** time.struct time:: + + >>> arrow.get(gmtime(0)) + + + **One** iso calendar ``tuple``, to get that week date in UTC:: + + >>> arrow.get((2013, 18, 7)) + + + **Two** arguments, a naive or aware ``datetime``, and a replacement + :ref:`timezone expression `:: + + >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') + + + **Two** arguments, a naive ``date``, and a replacement + :ref:`timezone expression `:: + + >>> arrow.get(date(2013, 5, 5), 'US/Pacific') + + + **Two** arguments, both ``str``, to parse the first according to the format of the second:: + + >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ') + + + **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try:: + + >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss']) + + + **Three or more** arguments, as for the constructor of a ``datetime``:: + + >>> arrow.get(2013, 5, 5, 12, 30, 45) + + + """ + + arg_count = len(args) + locale = kwargs.pop("locale", "en_us") + tz = kwargs.get("tzinfo", None) + normalize_whitespace = kwargs.pop("normalize_whitespace", False) + + # if kwargs given, send to constructor unless only tzinfo provided + if len(kwargs) > 1: + arg_count = 3 + + # tzinfo kwarg is not provided + if len(kwargs) == 1 and tz is None: + arg_count = 3 + + # () -> now, @ utc. + if arg_count == 0: + if isstr(tz): + tz = parser.TzinfoParser.parse(tz) + return self.type.now(tz) + + if isinstance(tz, dt_tzinfo): + return self.type.now(tz) + + return self.type.utcnow() + + if arg_count == 1: + arg = args[0] + + # (None) -> now, @ utc. + if arg is None: + return self.type.utcnow() + + # try (int, float) -> from timestamp with tz + elif not isstr(arg) and is_timestamp(arg): + if tz is None: + # set to UTC by default + tz = dateutil_tz.tzutc() + return self.type.fromtimestamp(arg, tzinfo=tz) + + # (Arrow) -> from the object's datetime. + elif isinstance(arg, Arrow): + return self.type.fromdatetime(arg.datetime) + + # (datetime) -> from datetime. + elif isinstance(arg, datetime): + return self.type.fromdatetime(arg) + + # (date) -> from date. + elif isinstance(arg, date): + return self.type.fromdate(arg) + + # (tzinfo) -> now, @ tzinfo. + elif isinstance(arg, dt_tzinfo): + return self.type.now(arg) + + # (str) -> parse. + elif isstr(arg): + dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace) + return self.type.fromdatetime(dt, tz) + + # (struct_time) -> from struct_time + elif isinstance(arg, struct_time): + return self.type.utcfromtimestamp(calendar.timegm(arg)) + + # (iso calendar) -> convert then from date + elif isinstance(arg, tuple) and len(arg) == 3: + dt = iso_to_gregorian(*arg) + return self.type.fromdate(dt) + + else: + raise TypeError( + "Can't parse single argument of type '{}'".format(type(arg)) + ) + + elif arg_count == 2: + + arg_1, arg_2 = args[0], args[1] + + if isinstance(arg_1, datetime): + + # (datetime, tzinfo/str) -> fromdatetime replace tzinfo. + if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): + return self.type.fromdatetime(arg_1, arg_2) + else: + raise TypeError( + "Can't parse two arguments of types 'datetime', '{}'".format( + type(arg_2) + ) + ) + + elif isinstance(arg_1, date): + + # (date, tzinfo/str) -> fromdate replace tzinfo. + if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): + return self.type.fromdate(arg_1, tzinfo=arg_2) + else: + raise TypeError( + "Can't parse two arguments of types 'date', '{}'".format( + type(arg_2) + ) + ) + + # (str, format) -> parse. + elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)): + dt = parser.DateTimeParser(locale).parse( + args[0], args[1], normalize_whitespace + ) + return self.type.fromdatetime(dt, tzinfo=tz) + + else: + raise TypeError( + "Can't parse two arguments of types '{}' and '{}'".format( + type(arg_1), type(arg_2) + ) + ) + + # 3+ args -> datetime-like via constructor. + else: + return self.type(*args, **kwargs) + + def utcnow(self): + """Returns an :class:`Arrow ` object, representing "now" in UTC time. + + Usage:: + + >>> import arrow + >>> arrow.utcnow() + + """ + + return self.type.utcnow() + + def now(self, tz=None): + """Returns an :class:`Arrow ` object, representing "now" in the given + timezone. + + :param tz: (optional) A :ref:`timezone expression `. Defaults to local time. + + Usage:: + + >>> import arrow + >>> arrow.now() + + + >>> arrow.now('US/Pacific') + + + >>> arrow.now('+02:00') + + + >>> arrow.now('local') + + """ + + if tz is None: + tz = dateutil_tz.tzlocal() + elif not isinstance(tz, dt_tzinfo): + tz = parser.TzinfoParser.parse(tz) + + return self.type.now(tz) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py new file mode 100644 index 00000000000..9f9d7a44da7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, division + +import calendar +import re + +from dateutil import tz as dateutil_tz + +from arrow import locales, util + +FORMAT_ATOM = "YYYY-MM-DD HH:mm:ssZZ" +FORMAT_COOKIE = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ" +FORMAT_RFC822 = "ddd, DD MMM YY HH:mm:ss Z" +FORMAT_RFC850 = "dddd, DD-MMM-YY HH:mm:ss ZZZ" +FORMAT_RFC1036 = "ddd, DD MMM YY HH:mm:ss Z" +FORMAT_RFC1123 = "ddd, DD MMM YYYY HH:mm:ss Z" +FORMAT_RFC2822 = "ddd, DD MMM YYYY HH:mm:ss Z" +FORMAT_RFC3339 = "YYYY-MM-DD HH:mm:ssZZ" +FORMAT_RSS = "ddd, DD MMM YYYY HH:mm:ss Z" +FORMAT_W3C = "YYYY-MM-DD HH:mm:ssZZ" + + +class DateTimeFormatter(object): + + # This pattern matches characters enclosed in square brackets are matched as + # an atomic group. For more info on atomic groups and how to they are + # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578 + + _FORMAT_RE = re.compile( + r"(\[(?:(?=(?P[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)" + ) + + def __init__(self, locale="en_us"): + + self.locale = locales.get_locale(locale) + + def format(cls, dt, fmt): + + return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt) + + def _format_token(self, dt, token): + + if token and token.startswith("[") and token.endswith("]"): + return token[1:-1] + + if token == "YYYY": + return self.locale.year_full(dt.year) + if token == "YY": + return self.locale.year_abbreviation(dt.year) + + if token == "MMMM": + return self.locale.month_name(dt.month) + if token == "MMM": + return self.locale.month_abbreviation(dt.month) + if token == "MM": + return "{:02d}".format(dt.month) + if token == "M": + return str(dt.month) + + if token == "DDDD": + return "{:03d}".format(dt.timetuple().tm_yday) + if token == "DDD": + return str(dt.timetuple().tm_yday) + if token == "DD": + return "{:02d}".format(dt.day) + if token == "D": + return str(dt.day) + + if token == "Do": + return self.locale.ordinal_number(dt.day) + + if token == "dddd": + return self.locale.day_name(dt.isoweekday()) + if token == "ddd": + return self.locale.day_abbreviation(dt.isoweekday()) + if token == "d": + return str(dt.isoweekday()) + + if token == "HH": + return "{:02d}".format(dt.hour) + if token == "H": + return str(dt.hour) + if token == "hh": + return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) + if token == "h": + return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) + + if token == "mm": + return "{:02d}".format(dt.minute) + if token == "m": + return str(dt.minute) + + if token == "ss": + return "{:02d}".format(dt.second) + if token == "s": + return str(dt.second) + + if token == "SSSSSS": + return str("{:06d}".format(int(dt.microsecond))) + if token == "SSSSS": + return str("{:05d}".format(int(dt.microsecond / 10))) + if token == "SSSS": + return str("{:04d}".format(int(dt.microsecond / 100))) + if token == "SSS": + return str("{:03d}".format(int(dt.microsecond / 1000))) + if token == "SS": + return str("{:02d}".format(int(dt.microsecond / 10000))) + if token == "S": + return str(int(dt.microsecond / 100000)) + + if token == "X": + # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 + return str(calendar.timegm(dt.utctimetuple())) + + if token == "x": + # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 + ts = calendar.timegm(dt.utctimetuple()) + (dt.microsecond / 1000000) + return str(int(ts * 1000000)) + + if token == "ZZZ": + return dt.tzname() + + if token in ["ZZ", "Z"]: + separator = ":" if token == "ZZ" else "" + tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo + total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60) + + sign = "+" if total_minutes >= 0 else "-" + total_minutes = abs(total_minutes) + hour, minute = divmod(total_minutes, 60) + + return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute) + + if token in ("a", "A"): + return self.locale.meridian(dt.hour, token) + + if token == "W": + year, week, day = dt.isocalendar() + return "{}-W{:02d}-{}".format(year, week, day) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py new file mode 100644 index 00000000000..6833da5a781 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py @@ -0,0 +1,4267 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import inspect +import sys +from math import trunc + + +def get_locale(name): + """Returns an appropriate :class:`Locale ` + corresponding to an inpute locale name. + + :param name: the name of the locale. + + """ + + locale_cls = _locales.get(name.lower()) + + if locale_cls is None: + raise ValueError("Unsupported locale '{}'".format(name)) + + return locale_cls() + + +def get_locale_by_class_name(name): + """Returns an appropriate :class:`Locale ` + corresponding to an locale class name. + + :param name: the name of the locale class. + + """ + locale_cls = globals().get(name) + + if locale_cls is None: + raise ValueError("Unsupported locale '{}'".format(name)) + + return locale_cls() + + +# base locale type. + + +class Locale(object): + """ Represents locale-specific data and functionality. """ + + names = [] + + timeframes = { + "now": "", + "second": "", + "seconds": "", + "minute": "", + "minutes": "", + "hour": "", + "hours": "", + "day": "", + "days": "", + "week": "", + "weeks": "", + "month": "", + "months": "", + "year": "", + "years": "", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + past = None + future = None + and_word = None + + month_names = [] + month_abbreviations = [] + + day_names = [] + day_abbreviations = [] + + ordinal_day_re = r"(\d+)" + + def __init__(self): + + self._month_name_to_ordinal = None + + def describe(self, timeframe, delta=0, only_distance=False): + """Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = self._format_timeframe(timeframe, delta) + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + def describe_multi(self, timeframes, only_distance=False): + """Describes a delta within multiple timeframes in plain language. + + :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. + :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords + """ + + humanized = "" + for index, (timeframe, delta) in enumerate(timeframes): + humanized += self._format_timeframe(timeframe, delta) + if index == len(timeframes) - 2 and self.and_word: + humanized += " " + self.and_word + " " + elif index < len(timeframes) - 1: + humanized += " " + + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + def day_name(self, day): + """Returns the day name for a specified day of the week. + + :param day: the ``int`` day of the week (1-7). + + """ + + return self.day_names[day] + + def day_abbreviation(self, day): + """Returns the day abbreviation for a specified day of the week. + + :param day: the ``int`` day of the week (1-7). + + """ + + return self.day_abbreviations[day] + + def month_name(self, month): + """Returns the month name for a specified month of the year. + + :param month: the ``int`` month of the year (1-12). + + """ + + return self.month_names[month] + + def month_abbreviation(self, month): + """Returns the month abbreviation for a specified month of the year. + + :param month: the ``int`` month of the year (1-12). + + """ + + return self.month_abbreviations[month] + + def month_number(self, name): + """Returns the month number for a month specified by name or abbreviation. + + :param name: the month name or abbreviation. + + """ + + if self._month_name_to_ordinal is None: + self._month_name_to_ordinal = self._name_to_ordinal(self.month_names) + self._month_name_to_ordinal.update( + self._name_to_ordinal(self.month_abbreviations) + ) + + return self._month_name_to_ordinal.get(name) + + def year_full(self, year): + """Returns the year for specific locale if available + + :param name: the ``int`` year (4-digit) + """ + return "{:04d}".format(year) + + def year_abbreviation(self, year): + """Returns the year for specific locale if available + + :param name: the ``int`` year (4-digit) + """ + return "{:04d}".format(year)[2:] + + def meridian(self, hour, token): + """Returns the meridian indicator for a specified hour and format token. + + :param hour: the ``int`` hour of the day. + :param token: the format token. + """ + + if token == "a": + return self.meridians["am"] if hour < 12 else self.meridians["pm"] + if token == "A": + return self.meridians["AM"] if hour < 12 else self.meridians["PM"] + + def ordinal_number(self, n): + """Returns the ordinal format of a given integer + + :param n: an integer + """ + return self._ordinal_number(n) + + def _ordinal_number(self, n): + return "{}".format(n) + + def _name_to_ordinal(self, lst): + return dict(map(lambda i: (i[1].lower(), i[0] + 1), enumerate(lst[1:]))) + + def _format_timeframe(self, timeframe, delta): + return self.timeframes[timeframe].format(trunc(abs(delta))) + + def _format_relative(self, humanized, timeframe, delta): + + if timeframe == "now": + return humanized + + direction = self.past if delta < 0 else self.future + + return direction.format(humanized) + + +# base locale type implementations. + + +class EnglishLocale(Locale): + + names = [ + "en", + "en_us", + "en_gb", + "en_au", + "en_be", + "en_jp", + "en_za", + "en_ca", + "en_ph", + ] + + past = "{0} ago" + future = "in {0}" + and_word = "and" + + timeframes = { + "now": "just now", + "second": "a second", + "seconds": "{0} seconds", + "minute": "a minute", + "minutes": "{0} minutes", + "hour": "an hour", + "hours": "{0} hours", + "day": "a day", + "days": "{0} days", + "week": "a week", + "weeks": "{0} weeks", + "month": "a month", + "months": "{0} months", + "year": "a year", + "years": "{0} years", + } + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + month_names = [ + "", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ] + + day_names = [ + "", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + "Sunday", + ] + day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + ordinal_day_re = r"((?P[2-3]?1(?=st)|[2-3]?2(?=nd)|[2-3]?3(?=rd)|[1-3]?[04-9](?=th)|1[1-3](?=th))(st|nd|rd|th))" + + def _ordinal_number(self, n): + if n % 100 not in (11, 12, 13): + remainder = abs(n) % 10 + if remainder == 1: + return "{}st".format(n) + elif remainder == 2: + return "{}nd".format(n) + elif remainder == 3: + return "{}rd".format(n) + return "{}th".format(n) + + def describe(self, timeframe, delta=0, only_distance=False): + """Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = super(EnglishLocale, self).describe(timeframe, delta, only_distance) + if only_distance and timeframe == "now": + humanized = "instantly" + + return humanized + + +class ItalianLocale(Locale): + names = ["it", "it_it"] + past = "{0} fa" + future = "tra {0}" + and_word = "e" + + timeframes = { + "now": "adesso", + "second": "un secondo", + "seconds": "{0} qualche secondo", + "minute": "un minuto", + "minutes": "{0} minuti", + "hour": "un'ora", + "hours": "{0} ore", + "day": "un giorno", + "days": "{0} giorni", + "week": "una settimana,", + "weeks": "{0} settimane", + "month": "un mese", + "months": "{0} mesi", + "year": "un anno", + "years": "{0} anni", + } + + month_names = [ + "", + "gennaio", + "febbraio", + "marzo", + "aprile", + "maggio", + "giugno", + "luglio", + "agosto", + "settembre", + "ottobre", + "novembre", + "dicembre", + ] + month_abbreviations = [ + "", + "gen", + "feb", + "mar", + "apr", + "mag", + "giu", + "lug", + "ago", + "set", + "ott", + "nov", + "dic", + ] + + day_names = [ + "", + "lunedì", + "martedì", + "mercoledì", + "giovedì", + "venerdì", + "sabato", + "domenica", + ] + day_abbreviations = ["", "lun", "mar", "mer", "gio", "ven", "sab", "dom"] + + ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" + + def _ordinal_number(self, n): + return "{}º".format(n) + + +class SpanishLocale(Locale): + names = ["es", "es_es"] + past = "hace {0}" + future = "en {0}" + and_word = "y" + + timeframes = { + "now": "ahora", + "second": "un segundo", + "seconds": "{0} segundos", + "minute": "un minuto", + "minutes": "{0} minutos", + "hour": "una hora", + "hours": "{0} horas", + "day": "un día", + "days": "{0} días", + "week": "una semana", + "weeks": "{0} semanas", + "month": "un mes", + "months": "{0} meses", + "year": "un año", + "years": "{0} años", + } + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + month_names = [ + "", + "enero", + "febrero", + "marzo", + "abril", + "mayo", + "junio", + "julio", + "agosto", + "septiembre", + "octubre", + "noviembre", + "diciembre", + ] + month_abbreviations = [ + "", + "ene", + "feb", + "mar", + "abr", + "may", + "jun", + "jul", + "ago", + "sep", + "oct", + "nov", + "dic", + ] + + day_names = [ + "", + "lunes", + "martes", + "miércoles", + "jueves", + "viernes", + "sábado", + "domingo", + ] + day_abbreviations = ["", "lun", "mar", "mie", "jue", "vie", "sab", "dom"] + + ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" + + def _ordinal_number(self, n): + return "{}º".format(n) + + +class FrenchBaseLocale(Locale): + + past = "il y a {0}" + future = "dans {0}" + and_word = "et" + + timeframes = { + "now": "maintenant", + "second": "une seconde", + "seconds": "{0} quelques secondes", + "minute": "une minute", + "minutes": "{0} minutes", + "hour": "une heure", + "hours": "{0} heures", + "day": "un jour", + "days": "{0} jours", + "week": "une semaine", + "weeks": "{0} semaines", + "month": "un mois", + "months": "{0} mois", + "year": "un an", + "years": "{0} ans", + } + + month_names = [ + "", + "janvier", + "février", + "mars", + "avril", + "mai", + "juin", + "juillet", + "août", + "septembre", + "octobre", + "novembre", + "décembre", + ] + + day_names = [ + "", + "lundi", + "mardi", + "mercredi", + "jeudi", + "vendredi", + "samedi", + "dimanche", + ] + day_abbreviations = ["", "lun", "mar", "mer", "jeu", "ven", "sam", "dim"] + + ordinal_day_re = ( + r"((?P\b1(?=er\b)|[1-3]?[02-9](?=e\b)|[1-3]1(?=e\b))(er|e)\b)" + ) + + def _ordinal_number(self, n): + if abs(n) == 1: + return "{}er".format(n) + return "{}e".format(n) + + +class FrenchLocale(FrenchBaseLocale, Locale): + + names = ["fr", "fr_fr"] + + month_abbreviations = [ + "", + "janv", + "févr", + "mars", + "avr", + "mai", + "juin", + "juil", + "août", + "sept", + "oct", + "nov", + "déc", + ] + + +class FrenchCanadianLocale(FrenchBaseLocale, Locale): + + names = ["fr_ca"] + + month_abbreviations = [ + "", + "janv", + "févr", + "mars", + "avr", + "mai", + "juin", + "juill", + "août", + "sept", + "oct", + "nov", + "déc", + ] + + +class GreekLocale(Locale): + + names = ["el", "el_gr"] + + past = "{0} πριν" + future = "σε {0}" + and_word = "και" + + timeframes = { + "now": "τώρα", + "second": "ένα δεύτερο", + "seconds": "{0} δευτερόλεπτα", + "minute": "ένα λεπτό", + "minutes": "{0} λεπτά", + "hour": "μία ώρα", + "hours": "{0} ώρες", + "day": "μία μέρα", + "days": "{0} μέρες", + "month": "ένα μήνα", + "months": "{0} μήνες", + "year": "ένα χρόνο", + "years": "{0} χρόνια", + } + + month_names = [ + "", + "Ιανουαρίου", + "Φεβρουαρίου", + "Μαρτίου", + "Απριλίου", + "Μαΐου", + "Ιουνίου", + "Ιουλίου", + "Αυγούστου", + "Σεπτεμβρίου", + "Οκτωβρίου", + "Νοεμβρίου", + "Δεκεμβρίου", + ] + month_abbreviations = [ + "", + "Ιαν", + "Φεβ", + "Μαρ", + "Απρ", + "Μαϊ", + "Ιον", + "Ιολ", + "Αυγ", + "Σεπ", + "Οκτ", + "Νοε", + "Δεκ", + ] + + day_names = [ + "", + "Δευτέρα", + "Τρίτη", + "Τετάρτη", + "Πέμπτη", + "Παρασκευή", + "Σάββατο", + "Κυριακή", + ] + day_abbreviations = ["", "Δευ", "Τρι", "Τετ", "Πεμ", "Παρ", "Σαβ", "Κυρ"] + + +class JapaneseLocale(Locale): + + names = ["ja", "ja_jp"] + + past = "{0}前" + future = "{0}後" + + timeframes = { + "now": "現在", + "second": "二番目の", + "seconds": "{0}数秒", + "minute": "1分", + "minutes": "{0}分", + "hour": "1時間", + "hours": "{0}時間", + "day": "1日", + "days": "{0}日", + "week": "1週間", + "weeks": "{0}週間", + "month": "1ヶ月", + "months": "{0}ヶ月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "月曜日", "火曜日", "水曜日", "木曜日", "金曜日", "土曜日", "日曜日"] + day_abbreviations = ["", "月", "火", "水", "木", "金", "土", "日"] + + +class SwedishLocale(Locale): + + names = ["sv", "sv_se"] + + past = "för {0} sen" + future = "om {0}" + and_word = "och" + + timeframes = { + "now": "just nu", + "second": "en sekund", + "seconds": "{0} några sekunder", + "minute": "en minut", + "minutes": "{0} minuter", + "hour": "en timme", + "hours": "{0} timmar", + "day": "en dag", + "days": "{0} dagar", + "week": "en vecka", + "weeks": "{0} veckor", + "month": "en månad", + "months": "{0} månader", + "year": "ett år", + "years": "{0} år", + } + + month_names = [ + "", + "januari", + "februari", + "mars", + "april", + "maj", + "juni", + "juli", + "augusti", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "måndag", + "tisdag", + "onsdag", + "torsdag", + "fredag", + "lördag", + "söndag", + ] + day_abbreviations = ["", "mån", "tis", "ons", "tor", "fre", "lör", "sön"] + + +class FinnishLocale(Locale): + + names = ["fi", "fi_fi"] + + # The finnish grammar is very complex, and its hard to convert + # 1-to-1 to something like English. + + past = "{0} sitten" + future = "{0} kuluttua" + + timeframes = { + "now": ["juuri nyt", "juuri nyt"], + "second": ["sekunti", "sekunti"], + "seconds": ["{0} muutama sekunti", "{0} muutaman sekunnin"], + "minute": ["minuutti", "minuutin"], + "minutes": ["{0} minuuttia", "{0} minuutin"], + "hour": ["tunti", "tunnin"], + "hours": ["{0} tuntia", "{0} tunnin"], + "day": ["päivä", "päivä"], + "days": ["{0} päivää", "{0} päivän"], + "month": ["kuukausi", "kuukauden"], + "months": ["{0} kuukautta", "{0} kuukauden"], + "year": ["vuosi", "vuoden"], + "years": ["{0} vuotta", "{0} vuoden"], + } + + # Months and days are lowercase in Finnish + month_names = [ + "", + "tammikuu", + "helmikuu", + "maaliskuu", + "huhtikuu", + "toukokuu", + "kesäkuu", + "heinäkuu", + "elokuu", + "syyskuu", + "lokakuu", + "marraskuu", + "joulukuu", + ] + + month_abbreviations = [ + "", + "tammi", + "helmi", + "maalis", + "huhti", + "touko", + "kesä", + "heinä", + "elo", + "syys", + "loka", + "marras", + "joulu", + ] + + day_names = [ + "", + "maanantai", + "tiistai", + "keskiviikko", + "torstai", + "perjantai", + "lauantai", + "sunnuntai", + ] + + day_abbreviations = ["", "ma", "ti", "ke", "to", "pe", "la", "su"] + + def _format_timeframe(self, timeframe, delta): + return ( + self.timeframes[timeframe][0].format(abs(delta)), + self.timeframes[timeframe][1].format(abs(delta)), + ) + + def _format_relative(self, humanized, timeframe, delta): + if timeframe == "now": + return humanized[0] + + direction = self.past if delta < 0 else self.future + which = 0 if delta < 0 else 1 + + return direction.format(humanized[which]) + + def _ordinal_number(self, n): + return "{}.".format(n) + + +class ChineseCNLocale(Locale): + + names = ["zh", "zh_cn"] + + past = "{0}前" + future = "{0}后" + + timeframes = { + "now": "刚才", + "second": "一秒", + "seconds": "{0}秒", + "minute": "1分钟", + "minutes": "{0}分钟", + "hour": "1小时", + "hours": "{0}小时", + "day": "1天", + "days": "{0}天", + "week": "一周", + "weeks": "{0}周", + "month": "1个月", + "months": "{0}个月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "一月", + "二月", + "三月", + "四月", + "五月", + "六月", + "七月", + "八月", + "九月", + "十月", + "十一月", + "十二月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class ChineseTWLocale(Locale): + + names = ["zh_tw"] + + past = "{0}前" + future = "{0}後" + and_word = "和" + + timeframes = { + "now": "剛才", + "second": "1秒", + "seconds": "{0}秒", + "minute": "1分鐘", + "minutes": "{0}分鐘", + "hour": "1小時", + "hours": "{0}小時", + "day": "1天", + "days": "{0}天", + "week": "1週", + "weeks": "{0}週", + "month": "1個月", + "months": "{0}個月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "週一", "週二", "週三", "週四", "週五", "週六", "週日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class HongKongLocale(Locale): + + names = ["zh_hk"] + + past = "{0}前" + future = "{0}後" + + timeframes = { + "now": "剛才", + "second": "1秒", + "seconds": "{0}秒", + "minute": "1分鐘", + "minutes": "{0}分鐘", + "hour": "1小時", + "hours": "{0}小時", + "day": "1天", + "days": "{0}天", + "week": "1星期", + "weeks": "{0}星期", + "month": "1個月", + "months": "{0}個月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class KoreanLocale(Locale): + + names = ["ko", "ko_kr"] + + past = "{0} 전" + future = "{0} 후" + + timeframes = { + "now": "지금", + "second": "1초", + "seconds": "{0}초", + "minute": "1분", + "minutes": "{0}분", + "hour": "한시간", + "hours": "{0}시간", + "day": "하루", + "days": "{0}일", + "week": "1주", + "weeks": "{0}주", + "month": "한달", + "months": "{0}개월", + "year": "1년", + "years": "{0}년", + } + + special_dayframes = { + -3: "그끄제", + -2: "그제", + -1: "어제", + 1: "내일", + 2: "모레", + 3: "글피", + 4: "그글피", + } + + special_yearframes = {-2: "제작년", -1: "작년", 1: "내년", 2: "내후년"} + + month_names = [ + "", + "1월", + "2월", + "3월", + "4월", + "5월", + "6월", + "7월", + "8월", + "9월", + "10월", + "11월", + "12월", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] + day_abbreviations = ["", "월", "화", "수", "목", "금", "토", "일"] + + def _ordinal_number(self, n): + ordinals = ["0", "첫", "두", "세", "네", "다섯", "여섯", "일곱", "여덟", "아홉", "열"] + if n < len(ordinals): + return "{}번째".format(ordinals[n]) + return "{}번째".format(n) + + def _format_relative(self, humanized, timeframe, delta): + if timeframe in ("day", "days"): + special = self.special_dayframes.get(delta) + if special: + return special + elif timeframe in ("year", "years"): + special = self.special_yearframes.get(delta) + if special: + return special + + return super(KoreanLocale, self)._format_relative(humanized, timeframe, delta) + + +# derived locale types & implementations. +class DutchLocale(Locale): + + names = ["nl", "nl_nl"] + + past = "{0} geleden" + future = "over {0}" + + timeframes = { + "now": "nu", + "second": "een seconde", + "seconds": "{0} seconden", + "minute": "een minuut", + "minutes": "{0} minuten", + "hour": "een uur", + "hours": "{0} uur", + "day": "een dag", + "days": "{0} dagen", + "week": "een week", + "weeks": "{0} weken", + "month": "een maand", + "months": "{0} maanden", + "year": "een jaar", + "years": "{0} jaar", + } + + # In Dutch names of months and days are not starting with a capital letter + # like in the English language. + month_names = [ + "", + "januari", + "februari", + "maart", + "april", + "mei", + "juni", + "juli", + "augustus", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mrt", + "apr", + "mei", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "maandag", + "dinsdag", + "woensdag", + "donderdag", + "vrijdag", + "zaterdag", + "zondag", + ] + day_abbreviations = ["", "ma", "di", "wo", "do", "vr", "za", "zo"] + + +class SlavicBaseLocale(Locale): + def _format_timeframe(self, timeframe, delta): + + form = self.timeframes[timeframe] + delta = abs(delta) + + if isinstance(form, list): + + if delta % 10 == 1 and delta % 100 != 11: + form = form[0] + elif 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[1] + else: + form = form[2] + + return form.format(delta) + + +class BelarusianLocale(SlavicBaseLocale): + + names = ["be", "be_by"] + + past = "{0} таму" + future = "праз {0}" + + timeframes = { + "now": "зараз", + "second": "секунду", + "seconds": "{0} некалькі секунд", + "minute": "хвіліну", + "minutes": ["{0} хвіліну", "{0} хвіліны", "{0} хвілін"], + "hour": "гадзіну", + "hours": ["{0} гадзіну", "{0} гадзіны", "{0} гадзін"], + "day": "дзень", + "days": ["{0} дзень", "{0} дні", "{0} дзён"], + "month": "месяц", + "months": ["{0} месяц", "{0} месяцы", "{0} месяцаў"], + "year": "год", + "years": ["{0} год", "{0} гады", "{0} гадоў"], + } + + month_names = [ + "", + "студзеня", + "лютага", + "сакавіка", + "красавіка", + "траўня", + "чэрвеня", + "ліпеня", + "жніўня", + "верасня", + "кастрычніка", + "лістапада", + "снежня", + ] + month_abbreviations = [ + "", + "студ", + "лют", + "сак", + "крас", + "трав", + "чэрв", + "ліп", + "жнів", + "вер", + "каст", + "ліст", + "снеж", + ] + + day_names = [ + "", + "панядзелак", + "аўторак", + "серада", + "чацвер", + "пятніца", + "субота", + "нядзеля", + ] + day_abbreviations = ["", "пн", "ат", "ср", "чц", "пт", "сб", "нд"] + + +class PolishLocale(SlavicBaseLocale): + + names = ["pl", "pl_pl"] + + past = "{0} temu" + future = "za {0}" + + # The nouns should be in genitive case (Polish: "dopełniacz") + # in order to correctly form `past` & `future` expressions. + timeframes = { + "now": "teraz", + "second": "sekundę", + "seconds": ["{0} sekund", "{0} sekundy", "{0} sekund"], + "minute": "minutę", + "minutes": ["{0} minut", "{0} minuty", "{0} minut"], + "hour": "godzinę", + "hours": ["{0} godzin", "{0} godziny", "{0} godzin"], + "day": "dzień", + "days": "{0} dni", + "week": "tydzień", + "weeks": ["{0} tygodni", "{0} tygodnie", "{0} tygodni"], + "month": "miesiąc", + "months": ["{0} miesięcy", "{0} miesiące", "{0} miesięcy"], + "year": "rok", + "years": ["{0} lat", "{0} lata", "{0} lat"], + } + + month_names = [ + "", + "styczeń", + "luty", + "marzec", + "kwiecień", + "maj", + "czerwiec", + "lipiec", + "sierpień", + "wrzesień", + "październik", + "listopad", + "grudzień", + ] + month_abbreviations = [ + "", + "sty", + "lut", + "mar", + "kwi", + "maj", + "cze", + "lip", + "sie", + "wrz", + "paź", + "lis", + "gru", + ] + + day_names = [ + "", + "poniedziałek", + "wtorek", + "środa", + "czwartek", + "piątek", + "sobota", + "niedziela", + ] + day_abbreviations = ["", "Pn", "Wt", "Śr", "Czw", "Pt", "So", "Nd"] + + +class RussianLocale(SlavicBaseLocale): + + names = ["ru", "ru_ru"] + + past = "{0} назад" + future = "через {0}" + + timeframes = { + "now": "сейчас", + "second": "Второй", + "seconds": "{0} несколько секунд", + "minute": "минуту", + "minutes": ["{0} минуту", "{0} минуты", "{0} минут"], + "hour": "час", + "hours": ["{0} час", "{0} часа", "{0} часов"], + "day": "день", + "days": ["{0} день", "{0} дня", "{0} дней"], + "week": "неделю", + "weeks": ["{0} неделю", "{0} недели", "{0} недель"], + "month": "месяц", + "months": ["{0} месяц", "{0} месяца", "{0} месяцев"], + "year": "год", + "years": ["{0} год", "{0} года", "{0} лет"], + } + + month_names = [ + "", + "января", + "февраля", + "марта", + "апреля", + "мая", + "июня", + "июля", + "августа", + "сентября", + "октября", + "ноября", + "декабря", + ] + month_abbreviations = [ + "", + "янв", + "фев", + "мар", + "апр", + "май", + "июн", + "июл", + "авг", + "сен", + "окт", + "ноя", + "дек", + ] + + day_names = [ + "", + "понедельник", + "вторник", + "среда", + "четверг", + "пятница", + "суббота", + "воскресенье", + ] + day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "вс"] + + +class AfrikaansLocale(Locale): + + names = ["af", "af_nl"] + + past = "{0} gelede" + future = "in {0}" + + timeframes = { + "now": "nou", + "second": "n sekonde", + "seconds": "{0} sekondes", + "minute": "minuut", + "minutes": "{0} minute", + "hour": "uur", + "hours": "{0} ure", + "day": "een dag", + "days": "{0} dae", + "month": "een maand", + "months": "{0} maande", + "year": "een jaar", + "years": "{0} jaar", + } + + month_names = [ + "", + "Januarie", + "Februarie", + "Maart", + "April", + "Mei", + "Junie", + "Julie", + "Augustus", + "September", + "Oktober", + "November", + "Desember", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mrt", + "Apr", + "Mei", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Des", + ] + + day_names = [ + "", + "Maandag", + "Dinsdag", + "Woensdag", + "Donderdag", + "Vrydag", + "Saterdag", + "Sondag", + ] + day_abbreviations = ["", "Ma", "Di", "Wo", "Do", "Vr", "Za", "So"] + + +class BulgarianLocale(SlavicBaseLocale): + + names = ["bg", "bg_BG"] + + past = "{0} назад" + future = "напред {0}" + + timeframes = { + "now": "сега", + "second": "секунда", + "seconds": "{0} няколко секунди", + "minute": "минута", + "minutes": ["{0} минута", "{0} минути", "{0} минути"], + "hour": "час", + "hours": ["{0} час", "{0} часа", "{0} часа"], + "day": "ден", + "days": ["{0} ден", "{0} дни", "{0} дни"], + "month": "месец", + "months": ["{0} месец", "{0} месеца", "{0} месеца"], + "year": "година", + "years": ["{0} година", "{0} години", "{0} години"], + } + + month_names = [ + "", + "януари", + "февруари", + "март", + "април", + "май", + "юни", + "юли", + "август", + "септември", + "октомври", + "ноември", + "декември", + ] + month_abbreviations = [ + "", + "ян", + "февр", + "март", + "апр", + "май", + "юни", + "юли", + "авг", + "септ", + "окт", + "ноем", + "дек", + ] + + day_names = [ + "", + "понеделник", + "вторник", + "сряда", + "четвъртък", + "петък", + "събота", + "неделя", + ] + day_abbreviations = ["", "пон", "вт", "ср", "четв", "пет", "съб", "нед"] + + +class UkrainianLocale(SlavicBaseLocale): + + names = ["ua", "uk_ua"] + + past = "{0} тому" + future = "за {0}" + + timeframes = { + "now": "зараз", + "second": "секунда", + "seconds": "{0} кілька секунд", + "minute": "хвилину", + "minutes": ["{0} хвилину", "{0} хвилини", "{0} хвилин"], + "hour": "годину", + "hours": ["{0} годину", "{0} години", "{0} годин"], + "day": "день", + "days": ["{0} день", "{0} дні", "{0} днів"], + "month": "місяць", + "months": ["{0} місяць", "{0} місяці", "{0} місяців"], + "year": "рік", + "years": ["{0} рік", "{0} роки", "{0} років"], + } + + month_names = [ + "", + "січня", + "лютого", + "березня", + "квітня", + "травня", + "червня", + "липня", + "серпня", + "вересня", + "жовтня", + "листопада", + "грудня", + ] + month_abbreviations = [ + "", + "січ", + "лют", + "бер", + "квіт", + "трав", + "черв", + "лип", + "серп", + "вер", + "жовт", + "лист", + "груд", + ] + + day_names = [ + "", + "понеділок", + "вівторок", + "середа", + "четвер", + "п’ятниця", + "субота", + "неділя", + ] + day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "нд"] + + +class MacedonianLocale(SlavicBaseLocale): + names = ["mk", "mk_mk"] + + past = "пред {0}" + future = "за {0}" + + timeframes = { + "now": "сега", + "second": "една секунда", + "seconds": ["{0} секунда", "{0} секунди", "{0} секунди"], + "minute": "една минута", + "minutes": ["{0} минута", "{0} минути", "{0} минути"], + "hour": "еден саат", + "hours": ["{0} саат", "{0} саати", "{0} саати"], + "day": "еден ден", + "days": ["{0} ден", "{0} дена", "{0} дена"], + "week": "една недела", + "weeks": ["{0} недела", "{0} недели", "{0} недели"], + "month": "еден месец", + "months": ["{0} месец", "{0} месеци", "{0} месеци"], + "year": "една година", + "years": ["{0} година", "{0} години", "{0} години"], + } + + meridians = {"am": "дп", "pm": "пп", "AM": "претпладне", "PM": "попладне"} + + month_names = [ + "", + "Јануари", + "Февруари", + "Март", + "Април", + "Мај", + "Јуни", + "Јули", + "Август", + "Септември", + "Октомври", + "Ноември", + "Декември", + ] + month_abbreviations = [ + "", + "Јан", + "Фев", + "Мар", + "Апр", + "Мај", + "Јун", + "Јул", + "Авг", + "Септ", + "Окт", + "Ноем", + "Декем", + ] + + day_names = [ + "", + "Понеделник", + "Вторник", + "Среда", + "Четврток", + "Петок", + "Сабота", + "Недела", + ] + day_abbreviations = [ + "", + "Пон", + "Вт", + "Сре", + "Чет", + "Пет", + "Саб", + "Нед", + ] + + +class GermanBaseLocale(Locale): + + past = "vor {0}" + future = "in {0}" + and_word = "und" + + timeframes = { + "now": "gerade eben", + "second": "eine Sekunde", + "seconds": "{0} Sekunden", + "minute": "einer Minute", + "minutes": "{0} Minuten", + "hour": "einer Stunde", + "hours": "{0} Stunden", + "day": "einem Tag", + "days": "{0} Tagen", + "week": "einer Woche", + "weeks": "{0} Wochen", + "month": "einem Monat", + "months": "{0} Monaten", + "year": "einem Jahr", + "years": "{0} Jahren", + } + + timeframes_only_distance = timeframes.copy() + timeframes_only_distance["minute"] = "eine Minute" + timeframes_only_distance["hour"] = "eine Stunde" + timeframes_only_distance["day"] = "ein Tag" + timeframes_only_distance["week"] = "eine Woche" + timeframes_only_distance["month"] = "ein Monat" + timeframes_only_distance["year"] = "ein Jahr" + + month_names = [ + "", + "Januar", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dez", + ] + + day_names = [ + "", + "Montag", + "Dienstag", + "Mittwoch", + "Donnerstag", + "Freitag", + "Samstag", + "Sonntag", + ] + + day_abbreviations = ["", "Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] + + def _ordinal_number(self, n): + return "{}.".format(n) + + def describe(self, timeframe, delta=0, only_distance=False): + """Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + if not only_distance: + return super(GermanBaseLocale, self).describe( + timeframe, delta, only_distance + ) + + # German uses a different case without 'in' or 'ago' + humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) + + return humanized + + +class GermanLocale(GermanBaseLocale, Locale): + + names = ["de", "de_de"] + + +class SwissLocale(GermanBaseLocale, Locale): + + names = ["de_ch"] + + +class AustrianLocale(GermanBaseLocale, Locale): + + names = ["de_at"] + + month_names = [ + "", + "Jänner", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + +class NorwegianLocale(Locale): + + names = ["nb", "nb_no"] + + past = "for {0} siden" + future = "om {0}" + + timeframes = { + "now": "nå nettopp", + "second": "et sekund", + "seconds": "{0} noen sekunder", + "minute": "ett minutt", + "minutes": "{0} minutter", + "hour": "en time", + "hours": "{0} timer", + "day": "en dag", + "days": "{0} dager", + "month": "en måned", + "months": "{0} måneder", + "year": "ett år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "mars", + "april", + "mai", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "mai", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "des", + ] + + day_names = [ + "", + "mandag", + "tirsdag", + "onsdag", + "torsdag", + "fredag", + "lørdag", + "søndag", + ] + day_abbreviations = ["", "ma", "ti", "on", "to", "fr", "lø", "sø"] + + +class NewNorwegianLocale(Locale): + + names = ["nn", "nn_no"] + + past = "for {0} sidan" + future = "om {0}" + + timeframes = { + "now": "no nettopp", + "second": "et sekund", + "seconds": "{0} nokre sekund", + "minute": "ett minutt", + "minutes": "{0} minutt", + "hour": "ein time", + "hours": "{0} timar", + "day": "ein dag", + "days": "{0} dagar", + "month": "en månad", + "months": "{0} månader", + "year": "eit år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "mars", + "april", + "mai", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "mai", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "des", + ] + + day_names = [ + "", + "måndag", + "tysdag", + "onsdag", + "torsdag", + "fredag", + "laurdag", + "sundag", + ] + day_abbreviations = ["", "må", "ty", "on", "to", "fr", "la", "su"] + + +class PortugueseLocale(Locale): + names = ["pt", "pt_pt"] + + past = "há {0}" + future = "em {0}" + and_word = "e" + + timeframes = { + "now": "agora", + "second": "um segundo", + "seconds": "{0} segundos", + "minute": "um minuto", + "minutes": "{0} minutos", + "hour": "uma hora", + "hours": "{0} horas", + "day": "um dia", + "days": "{0} dias", + "week": "uma semana", + "weeks": "{0} semanas", + "month": "um mês", + "months": "{0} meses", + "year": "um ano", + "years": "{0} anos", + } + + month_names = [ + "", + "Janeiro", + "Fevereiro", + "Março", + "Abril", + "Maio", + "Junho", + "Julho", + "Agosto", + "Setembro", + "Outubro", + "Novembro", + "Dezembro", + ] + month_abbreviations = [ + "", + "Jan", + "Fev", + "Mar", + "Abr", + "Mai", + "Jun", + "Jul", + "Ago", + "Set", + "Out", + "Nov", + "Dez", + ] + + day_names = [ + "", + "Segunda-feira", + "Terça-feira", + "Quarta-feira", + "Quinta-feira", + "Sexta-feira", + "Sábado", + "Domingo", + ] + day_abbreviations = ["", "Seg", "Ter", "Qua", "Qui", "Sex", "Sab", "Dom"] + + +class BrazilianPortugueseLocale(PortugueseLocale): + names = ["pt_br"] + + past = "faz {0}" + + +class TagalogLocale(Locale): + + names = ["tl", "tl_ph"] + + past = "nakaraang {0}" + future = "{0} mula ngayon" + + timeframes = { + "now": "ngayon lang", + "second": "isang segundo", + "seconds": "{0} segundo", + "minute": "isang minuto", + "minutes": "{0} minuto", + "hour": "isang oras", + "hours": "{0} oras", + "day": "isang araw", + "days": "{0} araw", + "week": "isang linggo", + "weeks": "{0} linggo", + "month": "isang buwan", + "months": "{0} buwan", + "year": "isang taon", + "years": "{0} taon", + } + + month_names = [ + "", + "Enero", + "Pebrero", + "Marso", + "Abril", + "Mayo", + "Hunyo", + "Hulyo", + "Agosto", + "Setyembre", + "Oktubre", + "Nobyembre", + "Disyembre", + ] + month_abbreviations = [ + "", + "Ene", + "Peb", + "Mar", + "Abr", + "May", + "Hun", + "Hul", + "Ago", + "Set", + "Okt", + "Nob", + "Dis", + ] + + day_names = [ + "", + "Lunes", + "Martes", + "Miyerkules", + "Huwebes", + "Biyernes", + "Sabado", + "Linggo", + ] + day_abbreviations = ["", "Lun", "Mar", "Miy", "Huw", "Biy", "Sab", "Lin"] + + meridians = {"am": "nu", "pm": "nh", "AM": "ng umaga", "PM": "ng hapon"} + + def _ordinal_number(self, n): + return "ika-{}".format(n) + + +class VietnameseLocale(Locale): + + names = ["vi", "vi_vn"] + + past = "{0} trước" + future = "{0} nữa" + + timeframes = { + "now": "hiện tại", + "second": "một giây", + "seconds": "{0} giây", + "minute": "một phút", + "minutes": "{0} phút", + "hour": "một giờ", + "hours": "{0} giờ", + "day": "một ngày", + "days": "{0} ngày", + "week": "một tuần", + "weeks": "{0} tuần", + "month": "một tháng", + "months": "{0} tháng", + "year": "một năm", + "years": "{0} năm", + } + + month_names = [ + "", + "Tháng Một", + "Tháng Hai", + "Tháng Ba", + "Tháng Tư", + "Tháng Năm", + "Tháng Sáu", + "Tháng Bảy", + "Tháng Tám", + "Tháng Chín", + "Tháng Mười", + "Tháng Mười Một", + "Tháng Mười Hai", + ] + month_abbreviations = [ + "", + "Tháng 1", + "Tháng 2", + "Tháng 3", + "Tháng 4", + "Tháng 5", + "Tháng 6", + "Tháng 7", + "Tháng 8", + "Tháng 9", + "Tháng 10", + "Tháng 11", + "Tháng 12", + ] + + day_names = [ + "", + "Thứ Hai", + "Thứ Ba", + "Thứ Tư", + "Thứ Năm", + "Thứ Sáu", + "Thứ Bảy", + "Chủ Nhật", + ] + day_abbreviations = ["", "Thứ 2", "Thứ 3", "Thứ 4", "Thứ 5", "Thứ 6", "Thứ 7", "CN"] + + +class TurkishLocale(Locale): + + names = ["tr", "tr_tr"] + + past = "{0} önce" + future = "{0} sonra" + + timeframes = { + "now": "şimdi", + "second": "bir saniye", + "seconds": "{0} saniye", + "minute": "bir dakika", + "minutes": "{0} dakika", + "hour": "bir saat", + "hours": "{0} saat", + "day": "bir gün", + "days": "{0} gün", + "month": "bir ay", + "months": "{0} ay", + "year": "yıl", + "years": "{0} yıl", + } + + month_names = [ + "", + "Ocak", + "Şubat", + "Mart", + "Nisan", + "Mayıs", + "Haziran", + "Temmuz", + "Ağustos", + "Eylül", + "Ekim", + "Kasım", + "Aralık", + ] + month_abbreviations = [ + "", + "Oca", + "Şub", + "Mar", + "Nis", + "May", + "Haz", + "Tem", + "Ağu", + "Eyl", + "Eki", + "Kas", + "Ara", + ] + + day_names = [ + "", + "Pazartesi", + "Salı", + "Çarşamba", + "Perşembe", + "Cuma", + "Cumartesi", + "Pazar", + ] + day_abbreviations = ["", "Pzt", "Sal", "Çar", "Per", "Cum", "Cmt", "Paz"] + + +class AzerbaijaniLocale(Locale): + + names = ["az", "az_az"] + + past = "{0} əvvəl" + future = "{0} sonra" + + timeframes = { + "now": "indi", + "second": "saniyə", + "seconds": "{0} saniyə", + "minute": "bir dəqiqə", + "minutes": "{0} dəqiqə", + "hour": "bir saat", + "hours": "{0} saat", + "day": "bir gün", + "days": "{0} gün", + "month": "bir ay", + "months": "{0} ay", + "year": "il", + "years": "{0} il", + } + + month_names = [ + "", + "Yanvar", + "Fevral", + "Mart", + "Aprel", + "May", + "İyun", + "İyul", + "Avqust", + "Sentyabr", + "Oktyabr", + "Noyabr", + "Dekabr", + ] + month_abbreviations = [ + "", + "Yan", + "Fev", + "Mar", + "Apr", + "May", + "İyn", + "İyl", + "Avq", + "Sen", + "Okt", + "Noy", + "Dek", + ] + + day_names = [ + "", + "Bazar ertəsi", + "Çərşənbə axşamı", + "Çərşənbə", + "Cümə axşamı", + "Cümə", + "Şənbə", + "Bazar", + ] + day_abbreviations = ["", "Ber", "Çax", "Çər", "Cax", "Cüm", "Şnb", "Bzr"] + + +class ArabicLocale(Locale): + names = [ + "ar", + "ar_ae", + "ar_bh", + "ar_dj", + "ar_eg", + "ar_eh", + "ar_er", + "ar_km", + "ar_kw", + "ar_ly", + "ar_om", + "ar_qa", + "ar_sa", + "ar_sd", + "ar_so", + "ar_ss", + "ar_td", + "ar_ye", + ] + + past = "منذ {0}" + future = "خلال {0}" + + timeframes = { + "now": "الآن", + "second": "ثانية", + "seconds": {"double": "ثانيتين", "ten": "{0} ثوان", "higher": "{0} ثانية"}, + "minute": "دقيقة", + "minutes": {"double": "دقيقتين", "ten": "{0} دقائق", "higher": "{0} دقيقة"}, + "hour": "ساعة", + "hours": {"double": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, + "day": "يوم", + "days": {"double": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, + "month": "شهر", + "months": {"double": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, + "year": "سنة", + "years": {"double": "سنتين", "ten": "{0} سنوات", "higher": "{0} سنة"}, + } + + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "مايو", + "يونيو", + "يوليو", + "أغسطس", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "مايو", + "يونيو", + "يوليو", + "أغسطس", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + + day_names = [ + "", + "الإثنين", + "الثلاثاء", + "الأربعاء", + "الخميس", + "الجمعة", + "السبت", + "الأحد", + ] + day_abbreviations = ["", "إثنين", "ثلاثاء", "أربعاء", "خميس", "جمعة", "سبت", "أحد"] + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + delta = abs(delta) + if isinstance(form, dict): + if delta == 2: + form = form["double"] + elif delta > 2 and delta <= 10: + form = form["ten"] + else: + form = form["higher"] + + return form.format(delta) + + +class LevantArabicLocale(ArabicLocale): + names = ["ar_iq", "ar_jo", "ar_lb", "ar_ps", "ar_sy"] + month_names = [ + "", + "كانون الثاني", + "شباط", + "آذار", + "نيسان", + "أيار", + "حزيران", + "تموز", + "آب", + "أيلول", + "تشرين الأول", + "تشرين الثاني", + "كانون الأول", + ] + month_abbreviations = [ + "", + "كانون الثاني", + "شباط", + "آذار", + "نيسان", + "أيار", + "حزيران", + "تموز", + "آب", + "أيلول", + "تشرين الأول", + "تشرين الثاني", + "كانون الأول", + ] + + +class AlgeriaTunisiaArabicLocale(ArabicLocale): + names = ["ar_tn", "ar_dz"] + month_names = [ + "", + "جانفي", + "فيفري", + "مارس", + "أفريل", + "ماي", + "جوان", + "جويلية", + "أوت", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + month_abbreviations = [ + "", + "جانفي", + "فيفري", + "مارس", + "أفريل", + "ماي", + "جوان", + "جويلية", + "أوت", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + + +class MauritaniaArabicLocale(ArabicLocale): + names = ["ar_mr"] + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "إبريل", + "مايو", + "يونيو", + "يوليو", + "أغشت", + "شتمبر", + "أكتوبر", + "نوفمبر", + "دجمبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "إبريل", + "مايو", + "يونيو", + "يوليو", + "أغشت", + "شتمبر", + "أكتوبر", + "نوفمبر", + "دجمبر", + ] + + +class MoroccoArabicLocale(ArabicLocale): + names = ["ar_ma"] + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "ماي", + "يونيو", + "يوليوز", + "غشت", + "شتنبر", + "أكتوبر", + "نونبر", + "دجنبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "ماي", + "يونيو", + "يوليوز", + "غشت", + "شتنبر", + "أكتوبر", + "نونبر", + "دجنبر", + ] + + +class IcelandicLocale(Locale): + def _format_timeframe(self, timeframe, delta): + + timeframe = self.timeframes[timeframe] + if delta < 0: + timeframe = timeframe[0] + elif delta > 0: + timeframe = timeframe[1] + + return timeframe.format(abs(delta)) + + names = ["is", "is_is"] + + past = "fyrir {0} síðan" + future = "eftir {0}" + + timeframes = { + "now": "rétt í þessu", + "second": ("sekúndu", "sekúndu"), + "seconds": ("{0} nokkrum sekúndum", "nokkrar sekúndur"), + "minute": ("einni mínútu", "eina mínútu"), + "minutes": ("{0} mínútum", "{0} mínútur"), + "hour": ("einum tíma", "einn tíma"), + "hours": ("{0} tímum", "{0} tíma"), + "day": ("einum degi", "einn dag"), + "days": ("{0} dögum", "{0} daga"), + "month": ("einum mánuði", "einn mánuð"), + "months": ("{0} mánuðum", "{0} mánuði"), + "year": ("einu ári", "eitt ár"), + "years": ("{0} árum", "{0} ár"), + } + + meridians = {"am": "f.h.", "pm": "e.h.", "AM": "f.h.", "PM": "e.h."} + + month_names = [ + "", + "janúar", + "febrúar", + "mars", + "apríl", + "maí", + "júní", + "júlí", + "ágúst", + "september", + "október", + "nóvember", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maí", + "jún", + "júl", + "ágú", + "sep", + "okt", + "nóv", + "des", + ] + + day_names = [ + "", + "mánudagur", + "þriðjudagur", + "miðvikudagur", + "fimmtudagur", + "föstudagur", + "laugardagur", + "sunnudagur", + ] + day_abbreviations = ["", "mán", "þri", "mið", "fim", "fös", "lau", "sun"] + + +class DanishLocale(Locale): + + names = ["da", "da_dk"] + + past = "for {0} siden" + future = "efter {0}" + and_word = "og" + + timeframes = { + "now": "lige nu", + "second": "et sekund", + "seconds": "{0} et par sekunder", + "minute": "et minut", + "minutes": "{0} minutter", + "hour": "en time", + "hours": "{0} timer", + "day": "en dag", + "days": "{0} dage", + "month": "en måned", + "months": "{0} måneder", + "year": "et år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "marts", + "april", + "maj", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "mandag", + "tirsdag", + "onsdag", + "torsdag", + "fredag", + "lørdag", + "søndag", + ] + day_abbreviations = ["", "man", "tir", "ons", "tor", "fre", "lør", "søn"] + + +class MalayalamLocale(Locale): + + names = ["ml"] + + past = "{0} മുമ്പ്" + future = "{0} ശേഷം" + + timeframes = { + "now": "ഇപ്പോൾ", + "second": "ഒരു നിമിഷം", + "seconds": "{0} സെക്കന്റ്‌", + "minute": "ഒരു മിനിറ്റ്", + "minutes": "{0} മിനിറ്റ്", + "hour": "ഒരു മണിക്കൂർ", + "hours": "{0} മണിക്കൂർ", + "day": "ഒരു ദിവസം ", + "days": "{0} ദിവസം ", + "month": "ഒരു മാസം ", + "months": "{0} മാസം ", + "year": "ഒരു വർഷം ", + "years": "{0} വർഷം ", + } + + meridians = { + "am": "രാവിലെ", + "pm": "ഉച്ചക്ക് ശേഷം", + "AM": "രാവിലെ", + "PM": "ഉച്ചക്ക് ശേഷം", + } + + month_names = [ + "", + "ജനുവരി", + "ഫെബ്രുവരി", + "മാർച്ച്‌", + "ഏപ്രിൽ ", + "മെയ്‌ ", + "ജൂണ്‍", + "ജൂലൈ", + "ഓഗസ്റ്റ്‌", + "സെപ്റ്റംബർ", + "ഒക്ടോബർ", + "നവംബർ", + "ഡിസംബർ", + ] + month_abbreviations = [ + "", + "ജനു", + "ഫെബ് ", + "മാർ", + "ഏപ്രിൽ", + "മേയ്", + "ജൂണ്‍", + "ജൂലൈ", + "ഓഗസ്റ", + "സെപ്റ്റ", + "ഒക്ടോ", + "നവം", + "ഡിസം", + ] + + day_names = ["", "തിങ്കള്‍", "ചൊവ്വ", "ബുധന്‍", "വ്യാഴം", "വെള്ളി", "ശനി", "ഞായര്‍"] + day_abbreviations = [ + "", + "തിങ്കള്‍", + "ചൊവ്വ", + "ബുധന്‍", + "വ്യാഴം", + "വെള്ളി", + "ശനി", + "ഞായര്‍", + ] + + +class HindiLocale(Locale): + + names = ["hi"] + + past = "{0} पहले" + future = "{0} बाद" + + timeframes = { + "now": "अभी", + "second": "एक पल", + "seconds": "{0} सेकंड्", + "minute": "एक मिनट ", + "minutes": "{0} मिनट ", + "hour": "एक घंटा", + "hours": "{0} घंटे", + "day": "एक दिन", + "days": "{0} दिन", + "month": "एक माह ", + "months": "{0} महीने ", + "year": "एक वर्ष ", + "years": "{0} साल ", + } + + meridians = {"am": "सुबह", "pm": "शाम", "AM": "सुबह", "PM": "शाम"} + + month_names = [ + "", + "जनवरी", + "फरवरी", + "मार्च", + "अप्रैल ", + "मई", + "जून", + "जुलाई", + "अगस्त", + "सितंबर", + "अक्टूबर", + "नवंबर", + "दिसंबर", + ] + month_abbreviations = [ + "", + "जन", + "फ़र", + "मार्च", + "अप्रै", + "मई", + "जून", + "जुलाई", + "आग", + "सित", + "अकत", + "नवे", + "दिस", + ] + + day_names = [ + "", + "सोमवार", + "मंगलवार", + "बुधवार", + "गुरुवार", + "शुक्रवार", + "शनिवार", + "रविवार", + ] + day_abbreviations = ["", "सोम", "मंगल", "बुध", "गुरुवार", "शुक्र", "शनि", "रवि"] + + +class CzechLocale(Locale): + names = ["cs", "cs_cz"] + + timeframes = { + "now": "Teď", + "second": {"past": "vteřina", "future": "vteřina", "zero": "vteřina"}, + "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekund"]}, + "minute": {"past": "minutou", "future": "minutu", "zero": "{0} minut"}, + "minutes": {"past": "{0} minutami", "future": ["{0} minuty", "{0} minut"]}, + "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodin"}, + "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodin"]}, + "day": {"past": "dnem", "future": "den", "zero": "{0} dnů"}, + "days": {"past": "{0} dny", "future": ["{0} dny", "{0} dnů"]}, + "week": {"past": "týdnem", "future": "týden", "zero": "{0} týdnů"}, + "weeks": {"past": "{0} týdny", "future": ["{0} týdny", "{0} týdnů"]}, + "month": {"past": "měsícem", "future": "měsíc", "zero": "{0} měsíců"}, + "months": {"past": "{0} měsíci", "future": ["{0} měsíce", "{0} měsíců"]}, + "year": {"past": "rokem", "future": "rok", "zero": "{0} let"}, + "years": {"past": "{0} lety", "future": ["{0} roky", "{0} let"]}, + } + + past = "Před {0}" + future = "Za {0}" + + month_names = [ + "", + "leden", + "únor", + "březen", + "duben", + "květen", + "červen", + "červenec", + "srpen", + "září", + "říjen", + "listopad", + "prosinec", + ] + month_abbreviations = [ + "", + "led", + "úno", + "bře", + "dub", + "kvě", + "čvn", + "čvc", + "srp", + "zář", + "říj", + "lis", + "pro", + ] + + day_names = [ + "", + "pondělí", + "úterý", + "středa", + "čtvrtek", + "pátek", + "sobota", + "neděle", + ] + day_abbreviations = ["", "po", "út", "st", "čt", "pá", "so", "ne"] + + def _format_timeframe(self, timeframe, delta): + """Czech aware time frame format function, takes into account + the differences between past and future forms.""" + form = self.timeframes[timeframe] + if isinstance(form, dict): + if delta == 0: + form = form["zero"] # And *never* use 0 in the singular! + elif delta > 0: + form = form["future"] + else: + form = form["past"] + delta = abs(delta) + + if isinstance(form, list): + if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[0] + else: + form = form[1] + + return form.format(delta) + + +class SlovakLocale(Locale): + names = ["sk", "sk_sk"] + + timeframes = { + "now": "Teraz", + "second": {"past": "sekundou", "future": "sekundu", "zero": "{0} sekúnd"}, + "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekúnd"]}, + "minute": {"past": "minútou", "future": "minútu", "zero": "{0} minút"}, + "minutes": {"past": "{0} minútami", "future": ["{0} minúty", "{0} minút"]}, + "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodín"}, + "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodín"]}, + "day": {"past": "dňom", "future": "deň", "zero": "{0} dní"}, + "days": {"past": "{0} dňami", "future": ["{0} dni", "{0} dní"]}, + "week": {"past": "týždňom", "future": "týždeň", "zero": "{0} týždňov"}, + "weeks": {"past": "{0} týždňami", "future": ["{0} týždne", "{0} týždňov"]}, + "month": {"past": "mesiacom", "future": "mesiac", "zero": "{0} mesiacov"}, + "months": {"past": "{0} mesiacmi", "future": ["{0} mesiace", "{0} mesiacov"]}, + "year": {"past": "rokom", "future": "rok", "zero": "{0} rokov"}, + "years": {"past": "{0} rokmi", "future": ["{0} roky", "{0} rokov"]}, + } + + past = "Pred {0}" + future = "O {0}" + and_word = "a" + + month_names = [ + "", + "január", + "február", + "marec", + "apríl", + "máj", + "jún", + "júl", + "august", + "september", + "október", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "máj", + "jún", + "júl", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "pondelok", + "utorok", + "streda", + "štvrtok", + "piatok", + "sobota", + "nedeľa", + ] + day_abbreviations = ["", "po", "ut", "st", "št", "pi", "so", "ne"] + + def _format_timeframe(self, timeframe, delta): + """Slovak aware time frame format function, takes into account + the differences between past and future forms.""" + form = self.timeframes[timeframe] + if isinstance(form, dict): + if delta == 0: + form = form["zero"] # And *never* use 0 in the singular! + elif delta > 0: + form = form["future"] + else: + form = form["past"] + delta = abs(delta) + + if isinstance(form, list): + if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[0] + else: + form = form[1] + + return form.format(delta) + + +class FarsiLocale(Locale): + + names = ["fa", "fa_ir"] + + past = "{0} قبل" + future = "در {0}" + + timeframes = { + "now": "اکنون", + "second": "یک لحظه", + "seconds": "{0} ثانیه", + "minute": "یک دقیقه", + "minutes": "{0} دقیقه", + "hour": "یک ساعت", + "hours": "{0} ساعت", + "day": "یک روز", + "days": "{0} روز", + "month": "یک ماه", + "months": "{0} ماه", + "year": "یک سال", + "years": "{0} سال", + } + + meridians = { + "am": "قبل از ظهر", + "pm": "بعد از ظهر", + "AM": "قبل از ظهر", + "PM": "بعد از ظهر", + } + + month_names = [ + "", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ] + + day_names = [ + "", + "دو شنبه", + "سه شنبه", + "چهارشنبه", + "پنجشنبه", + "جمعه", + "شنبه", + "یکشنبه", + ] + day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + +class HebrewLocale(Locale): + + names = ["he", "he_IL"] + + past = "לפני {0}" + future = "בעוד {0}" + and_word = "ו" + + timeframes = { + "now": "הרגע", + "second": "שנייה", + "seconds": "{0} שניות", + "minute": "דקה", + "minutes": "{0} דקות", + "hour": "שעה", + "hours": "{0} שעות", + "2-hours": "שעתיים", + "day": "יום", + "days": "{0} ימים", + "2-days": "יומיים", + "week": "שבוע", + "weeks": "{0} שבועות", + "2-weeks": "שבועיים", + "month": "חודש", + "months": "{0} חודשים", + "2-months": "חודשיים", + "year": "שנה", + "years": "{0} שנים", + "2-years": "שנתיים", + } + + meridians = { + "am": 'לפנ"צ', + "pm": 'אחר"צ', + "AM": "לפני הצהריים", + "PM": "אחרי הצהריים", + } + + month_names = [ + "", + "ינואר", + "פברואר", + "מרץ", + "אפריל", + "מאי", + "יוני", + "יולי", + "אוגוסט", + "ספטמבר", + "אוקטובר", + "נובמבר", + "דצמבר", + ] + month_abbreviations = [ + "", + "ינו׳", + "פבר׳", + "מרץ", + "אפר׳", + "מאי", + "יוני", + "יולי", + "אוג׳", + "ספט׳", + "אוק׳", + "נוב׳", + "דצמ׳", + ] + + day_names = ["", "שני", "שלישי", "רביעי", "חמישי", "שישי", "שבת", "ראשון"] + day_abbreviations = ["", "ב׳", "ג׳", "ד׳", "ה׳", "ו׳", "ש׳", "א׳"] + + def _format_timeframe(self, timeframe, delta): + """Hebrew couple of aware""" + couple = "2-{}".format(timeframe) + single = timeframe.rstrip("s") + if abs(delta) == 2 and couple in self.timeframes: + key = couple + elif abs(delta) == 1 and single in self.timeframes: + key = single + else: + key = timeframe + + return self.timeframes[key].format(trunc(abs(delta))) + + def describe_multi(self, timeframes, only_distance=False): + """Describes a delta within multiple timeframes in plain language. + In Hebrew, the and word behaves a bit differently. + + :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. + :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords + """ + + humanized = "" + for index, (timeframe, delta) in enumerate(timeframes): + last_humanized = self._format_timeframe(timeframe, delta) + if index == 0: + humanized = last_humanized + elif index == len(timeframes) - 1: # Must have at least 2 items + humanized += " " + self.and_word + if last_humanized[0].isdecimal(): + humanized += "־" + humanized += last_humanized + else: # Don't add for the last one + humanized += ", " + last_humanized + + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + +class MarathiLocale(Locale): + + names = ["mr"] + + past = "{0} आधी" + future = "{0} नंतर" + + timeframes = { + "now": "सद्य", + "second": "एक सेकंद", + "seconds": "{0} सेकंद", + "minute": "एक मिनिट ", + "minutes": "{0} मिनिट ", + "hour": "एक तास", + "hours": "{0} तास", + "day": "एक दिवस", + "days": "{0} दिवस", + "month": "एक महिना ", + "months": "{0} महिने ", + "year": "एक वर्ष ", + "years": "{0} वर्ष ", + } + + meridians = {"am": "सकाळ", "pm": "संध्याकाळ", "AM": "सकाळ", "PM": "संध्याकाळ"} + + month_names = [ + "", + "जानेवारी", + "फेब्रुवारी", + "मार्च", + "एप्रिल", + "मे", + "जून", + "जुलै", + "अॉगस्ट", + "सप्टेंबर", + "अॉक्टोबर", + "नोव्हेंबर", + "डिसेंबर", + ] + month_abbreviations = [ + "", + "जान", + "फेब्रु", + "मार्च", + "एप्रि", + "मे", + "जून", + "जुलै", + "अॉग", + "सप्टें", + "अॉक्टो", + "नोव्हें", + "डिसें", + ] + + day_names = [ + "", + "सोमवार", + "मंगळवार", + "बुधवार", + "गुरुवार", + "शुक्रवार", + "शनिवार", + "रविवार", + ] + day_abbreviations = ["", "सोम", "मंगळ", "बुध", "गुरु", "शुक्र", "शनि", "रवि"] + + +def _map_locales(): + + locales = {} + + for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): + if issubclass(cls, Locale): # pragma: no branch + for name in cls.names: + locales[name.lower()] = cls + + return locales + + +class CatalanLocale(Locale): + names = ["ca", "ca_es", "ca_ad", "ca_fr", "ca_it"] + past = "Fa {0}" + future = "En {0}" + and_word = "i" + + timeframes = { + "now": "Ara mateix", + "second": "un segon", + "seconds": "{0} segons", + "minute": "1 minut", + "minutes": "{0} minuts", + "hour": "una hora", + "hours": "{0} hores", + "day": "un dia", + "days": "{0} dies", + "month": "un mes", + "months": "{0} mesos", + "year": "un any", + "years": "{0} anys", + } + + month_names = [ + "", + "gener", + "febrer", + "març", + "abril", + "maig", + "juny", + "juliol", + "agost", + "setembre", + "octubre", + "novembre", + "desembre", + ] + month_abbreviations = [ + "", + "gen.", + "febr.", + "març", + "abr.", + "maig", + "juny", + "jul.", + "ag.", + "set.", + "oct.", + "nov.", + "des.", + ] + day_names = [ + "", + "dilluns", + "dimarts", + "dimecres", + "dijous", + "divendres", + "dissabte", + "diumenge", + ] + day_abbreviations = [ + "", + "dl.", + "dt.", + "dc.", + "dj.", + "dv.", + "ds.", + "dg.", + ] + + +class BasqueLocale(Locale): + names = ["eu", "eu_eu"] + past = "duela {0}" + future = "{0}" # I don't know what's the right phrase in Basque for the future. + + timeframes = { + "now": "Orain", + "second": "segundo bat", + "seconds": "{0} segundu", + "minute": "minutu bat", + "minutes": "{0} minutu", + "hour": "ordu bat", + "hours": "{0} ordu", + "day": "egun bat", + "days": "{0} egun", + "month": "hilabete bat", + "months": "{0} hilabet", + "year": "urte bat", + "years": "{0} urte", + } + + month_names = [ + "", + "urtarrilak", + "otsailak", + "martxoak", + "apirilak", + "maiatzak", + "ekainak", + "uztailak", + "abuztuak", + "irailak", + "urriak", + "azaroak", + "abenduak", + ] + month_abbreviations = [ + "", + "urt", + "ots", + "mar", + "api", + "mai", + "eka", + "uzt", + "abu", + "ira", + "urr", + "aza", + "abe", + ] + day_names = [ + "", + "astelehena", + "asteartea", + "asteazkena", + "osteguna", + "ostirala", + "larunbata", + "igandea", + ] + day_abbreviations = ["", "al", "ar", "az", "og", "ol", "lr", "ig"] + + +class HungarianLocale(Locale): + + names = ["hu", "hu_hu"] + + past = "{0} ezelőtt" + future = "{0} múlva" + + timeframes = { + "now": "éppen most", + "second": {"past": "egy második", "future": "egy második"}, + "seconds": {"past": "{0} másodpercekkel", "future": "{0} pár másodperc"}, + "minute": {"past": "egy perccel", "future": "egy perc"}, + "minutes": {"past": "{0} perccel", "future": "{0} perc"}, + "hour": {"past": "egy órával", "future": "egy óra"}, + "hours": {"past": "{0} órával", "future": "{0} óra"}, + "day": {"past": "egy nappal", "future": "egy nap"}, + "days": {"past": "{0} nappal", "future": "{0} nap"}, + "month": {"past": "egy hónappal", "future": "egy hónap"}, + "months": {"past": "{0} hónappal", "future": "{0} hónap"}, + "year": {"past": "egy évvel", "future": "egy év"}, + "years": {"past": "{0} évvel", "future": "{0} év"}, + } + + month_names = [ + "", + "január", + "február", + "március", + "április", + "május", + "június", + "július", + "augusztus", + "szeptember", + "október", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "febr", + "márc", + "ápr", + "máj", + "jún", + "júl", + "aug", + "szept", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "hétfő", + "kedd", + "szerda", + "csütörtök", + "péntek", + "szombat", + "vasárnap", + ] + day_abbreviations = ["", "hét", "kedd", "szer", "csüt", "pént", "szom", "vas"] + + meridians = {"am": "de", "pm": "du", "AM": "DE", "PM": "DU"} + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + + if isinstance(form, dict): + if delta > 0: + form = form["future"] + else: + form = form["past"] + + return form.format(abs(delta)) + + +class EsperantoLocale(Locale): + names = ["eo", "eo_xx"] + past = "antaŭ {0}" + future = "post {0}" + + timeframes = { + "now": "nun", + "second": "sekundo", + "seconds": "{0} kelkaj sekundoj", + "minute": "unu minuto", + "minutes": "{0} minutoj", + "hour": "un horo", + "hours": "{0} horoj", + "day": "unu tago", + "days": "{0} tagoj", + "month": "unu monato", + "months": "{0} monatoj", + "year": "unu jaro", + "years": "{0} jaroj", + } + + month_names = [ + "", + "januaro", + "februaro", + "marto", + "aprilo", + "majo", + "junio", + "julio", + "aŭgusto", + "septembro", + "oktobro", + "novembro", + "decembro", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aŭg", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "lundo", + "mardo", + "merkredo", + "ĵaŭdo", + "vendredo", + "sabato", + "dimanĉo", + ] + day_abbreviations = ["", "lun", "mar", "mer", "ĵaŭ", "ven", "sab", "dim"] + + meridians = {"am": "atm", "pm": "ptm", "AM": "ATM", "PM": "PTM"} + + ordinal_day_re = r"((?P[1-3]?[0-9](?=a))a)" + + def _ordinal_number(self, n): + return "{}a".format(n) + + +class ThaiLocale(Locale): + + names = ["th", "th_th"] + + past = "{0}{1}ที่ผ่านมา" + future = "ในอีก{1}{0}" + + timeframes = { + "now": "ขณะนี้", + "second": "วินาที", + "seconds": "{0} ไม่กี่วินาที", + "minute": "1 นาที", + "minutes": "{0} นาที", + "hour": "1 ชั่วโมง", + "hours": "{0} ชั่วโมง", + "day": "1 วัน", + "days": "{0} วัน", + "month": "1 เดือน", + "months": "{0} เดือน", + "year": "1 ปี", + "years": "{0} ปี", + } + + month_names = [ + "", + "มกราคม", + "กุมภาพันธ์", + "มีนาคม", + "เมษายน", + "พฤษภาคม", + "มิถุนายน", + "กรกฎาคม", + "สิงหาคม", + "กันยายน", + "ตุลาคม", + "พฤศจิกายน", + "ธันวาคม", + ] + month_abbreviations = [ + "", + "ม.ค.", + "ก.พ.", + "มี.ค.", + "เม.ย.", + "พ.ค.", + "มิ.ย.", + "ก.ค.", + "ส.ค.", + "ก.ย.", + "ต.ค.", + "พ.ย.", + "ธ.ค.", + ] + + day_names = ["", "จันทร์", "อังคาร", "พุธ", "พฤหัสบดี", "ศุกร์", "เสาร์", "อาทิตย์"] + day_abbreviations = ["", "จ", "อ", "พ", "พฤ", "ศ", "ส", "อา"] + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + BE_OFFSET = 543 + + def year_full(self, year): + """Thai always use Buddhist Era (BE) which is CE + 543""" + year += self.BE_OFFSET + return "{:04d}".format(year) + + def year_abbreviation(self, year): + """Thai always use Buddhist Era (BE) which is CE + 543""" + year += self.BE_OFFSET + return "{:04d}".format(year)[2:] + + def _format_relative(self, humanized, timeframe, delta): + """Thai normally doesn't have any space between words""" + if timeframe == "now": + return humanized + space = "" if timeframe == "seconds" else " " + direction = self.past if delta < 0 else self.future + + return direction.format(humanized, space) + + +class BengaliLocale(Locale): + + names = ["bn", "bn_bd", "bn_in"] + + past = "{0} আগে" + future = "{0} পরে" + + timeframes = { + "now": "এখন", + "second": "একটি দ্বিতীয়", + "seconds": "{0} সেকেন্ড", + "minute": "এক মিনিট", + "minutes": "{0} মিনিট", + "hour": "এক ঘণ্টা", + "hours": "{0} ঘণ্টা", + "day": "এক দিন", + "days": "{0} দিন", + "month": "এক মাস", + "months": "{0} মাস ", + "year": "এক বছর", + "years": "{0} বছর", + } + + meridians = {"am": "সকাল", "pm": "বিকাল", "AM": "সকাল", "PM": "বিকাল"} + + month_names = [ + "", + "জানুয়ারি", + "ফেব্রুয়ারি", + "মার্চ", + "এপ্রিল", + "মে", + "জুন", + "জুলাই", + "আগস্ট", + "সেপ্টেম্বর", + "অক্টোবর", + "নভেম্বর", + "ডিসেম্বর", + ] + month_abbreviations = [ + "", + "জানু", + "ফেব", + "মার্চ", + "এপ্রি", + "মে", + "জুন", + "জুল", + "অগা", + "সেপ্ট", + "অক্টো", + "নভে", + "ডিসে", + ] + + day_names = [ + "", + "সোমবার", + "মঙ্গলবার", + "বুধবার", + "বৃহস্পতিবার", + "শুক্রবার", + "শনিবার", + "রবিবার", + ] + day_abbreviations = ["", "সোম", "মঙ্গল", "বুধ", "বৃহঃ", "শুক্র", "শনি", "রবি"] + + def _ordinal_number(self, n): + if n > 10 or n == 0: + return "{}তম".format(n) + if n in [1, 5, 7, 8, 9, 10]: + return "{}ম".format(n) + if n in [2, 3]: + return "{}য়".format(n) + if n == 4: + return "{}র্থ".format(n) + if n == 6: + return "{}ষ্ঠ".format(n) + + +class RomanshLocale(Locale): + + names = ["rm", "rm_ch"] + + past = "avant {0}" + future = "en {0}" + + timeframes = { + "now": "en quest mument", + "second": "in secunda", + "seconds": "{0} secundas", + "minute": "ina minuta", + "minutes": "{0} minutas", + "hour": "in'ura", + "hours": "{0} ura", + "day": "in di", + "days": "{0} dis", + "month": "in mais", + "months": "{0} mais", + "year": "in onn", + "years": "{0} onns", + } + + month_names = [ + "", + "schaner", + "favrer", + "mars", + "avrigl", + "matg", + "zercladur", + "fanadur", + "avust", + "settember", + "october", + "november", + "december", + ] + + month_abbreviations = [ + "", + "schan", + "fav", + "mars", + "avr", + "matg", + "zer", + "fan", + "avu", + "set", + "oct", + "nov", + "dec", + ] + + day_names = [ + "", + "glindesdi", + "mardi", + "mesemna", + "gievgia", + "venderdi", + "sonda", + "dumengia", + ] + + day_abbreviations = ["", "gli", "ma", "me", "gie", "ve", "so", "du"] + + +class RomanianLocale(Locale): + names = ["ro", "ro_ro"] + + past = "{0} în urmă" + future = "peste {0}" + and_word = "și" + + timeframes = { + "now": "acum", + "second": "o secunda", + "seconds": "{0} câteva secunde", + "minute": "un minut", + "minutes": "{0} minute", + "hour": "o oră", + "hours": "{0} ore", + "day": "o zi", + "days": "{0} zile", + "month": "o lună", + "months": "{0} luni", + "year": "un an", + "years": "{0} ani", + } + + month_names = [ + "", + "ianuarie", + "februarie", + "martie", + "aprilie", + "mai", + "iunie", + "iulie", + "august", + "septembrie", + "octombrie", + "noiembrie", + "decembrie", + ] + month_abbreviations = [ + "", + "ian", + "febr", + "mart", + "apr", + "mai", + "iun", + "iul", + "aug", + "sept", + "oct", + "nov", + "dec", + ] + + day_names = [ + "", + "luni", + "marți", + "miercuri", + "joi", + "vineri", + "sâmbătă", + "duminică", + ] + day_abbreviations = ["", "Lun", "Mar", "Mie", "Joi", "Vin", "Sâm", "Dum"] + + +class SlovenianLocale(Locale): + names = ["sl", "sl_si"] + + past = "pred {0}" + future = "čez {0}" + and_word = "in" + + timeframes = { + "now": "zdaj", + "second": "sekundo", + "seconds": "{0} sekund", + "minute": "minuta", + "minutes": "{0} minutami", + "hour": "uro", + "hours": "{0} ur", + "day": "dan", + "days": "{0} dni", + "month": "mesec", + "months": "{0} mesecev", + "year": "leto", + "years": "{0} let", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + month_names = [ + "", + "Januar", + "Februar", + "Marec", + "April", + "Maj", + "Junij", + "Julij", + "Avgust", + "September", + "Oktober", + "November", + "December", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "Maj", + "Jun", + "Jul", + "Avg", + "Sep", + "Okt", + "Nov", + "Dec", + ] + + day_names = [ + "", + "Ponedeljek", + "Torek", + "Sreda", + "Četrtek", + "Petek", + "Sobota", + "Nedelja", + ] + + day_abbreviations = ["", "Pon", "Tor", "Sre", "Čet", "Pet", "Sob", "Ned"] + + +class IndonesianLocale(Locale): + + names = ["id", "id_id"] + + past = "{0} yang lalu" + future = "dalam {0}" + and_word = "dan" + + timeframes = { + "now": "baru saja", + "second": "1 sebentar", + "seconds": "{0} detik", + "minute": "1 menit", + "minutes": "{0} menit", + "hour": "1 jam", + "hours": "{0} jam", + "day": "1 hari", + "days": "{0} hari", + "month": "1 bulan", + "months": "{0} bulan", + "year": "1 tahun", + "years": "{0} tahun", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + month_names = [ + "", + "Januari", + "Februari", + "Maret", + "April", + "Mei", + "Juni", + "Juli", + "Agustus", + "September", + "Oktober", + "November", + "Desember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "Mei", + "Jun", + "Jul", + "Ags", + "Sept", + "Okt", + "Nov", + "Des", + ] + + day_names = ["", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu", "Minggu"] + + day_abbreviations = [ + "", + "Senin", + "Selasa", + "Rabu", + "Kamis", + "Jumat", + "Sabtu", + "Minggu", + ] + + +class NepaliLocale(Locale): + names = ["ne", "ne_np"] + + past = "{0} पहिले" + future = "{0} पछी" + + timeframes = { + "now": "अहिले", + "second": "एक सेकेन्ड", + "seconds": "{0} सेकण्ड", + "minute": "मिनेट", + "minutes": "{0} मिनेट", + "hour": "एक घण्टा", + "hours": "{0} घण्टा", + "day": "एक दिन", + "days": "{0} दिन", + "month": "एक महिना", + "months": "{0} महिना", + "year": "एक बर्ष", + "years": "बर्ष", + } + + meridians = {"am": "पूर्वाह्न", "pm": "अपरान्ह", "AM": "पूर्वाह्न", "PM": "अपरान्ह"} + + month_names = [ + "", + "जनवरी", + "फेब्रुअरी", + "मार्च", + "एप्रील", + "मे", + "जुन", + "जुलाई", + "अगष्ट", + "सेप्टेम्बर", + "अक्टोबर", + "नोवेम्बर", + "डिसेम्बर", + ] + month_abbreviations = [ + "", + "जन", + "फेब", + "मार्च", + "एप्रील", + "मे", + "जुन", + "जुलाई", + "अग", + "सेप", + "अक्ट", + "नोव", + "डिस", + ] + + day_names = [ + "", + "सोमवार", + "मंगलवार", + "बुधवार", + "बिहिवार", + "शुक्रवार", + "शनिवार", + "आइतवार", + ] + + day_abbreviations = ["", "सोम", "मंगल", "बुध", "बिहि", "शुक्र", "शनि", "आइत"] + + +class EstonianLocale(Locale): + names = ["ee", "et"] + + past = "{0} tagasi" + future = "{0} pärast" + and_word = "ja" + + timeframes = { + "now": {"past": "just nüüd", "future": "just nüüd"}, + "second": {"past": "üks sekund", "future": "ühe sekundi"}, + "seconds": {"past": "{0} sekundit", "future": "{0} sekundi"}, + "minute": {"past": "üks minut", "future": "ühe minuti"}, + "minutes": {"past": "{0} minutit", "future": "{0} minuti"}, + "hour": {"past": "tund aega", "future": "tunni aja"}, + "hours": {"past": "{0} tundi", "future": "{0} tunni"}, + "day": {"past": "üks päev", "future": "ühe päeva"}, + "days": {"past": "{0} päeva", "future": "{0} päeva"}, + "month": {"past": "üks kuu", "future": "ühe kuu"}, + "months": {"past": "{0} kuud", "future": "{0} kuu"}, + "year": {"past": "üks aasta", "future": "ühe aasta"}, + "years": {"past": "{0} aastat", "future": "{0} aasta"}, + } + + month_names = [ + "", + "Jaanuar", + "Veebruar", + "Märts", + "Aprill", + "Mai", + "Juuni", + "Juuli", + "August", + "September", + "Oktoober", + "November", + "Detsember", + ] + month_abbreviations = [ + "", + "Jan", + "Veb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dets", + ] + + day_names = [ + "", + "Esmaspäev", + "Teisipäev", + "Kolmapäev", + "Neljapäev", + "Reede", + "Laupäev", + "Pühapäev", + ] + day_abbreviations = ["", "Esm", "Teis", "Kolm", "Nelj", "Re", "Lau", "Püh"] + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + if delta > 0: + form = form["future"] + else: + form = form["past"] + return form.format(abs(delta)) + + +class SwahiliLocale(Locale): + + names = [ + "sw", + "sw_ke", + "sw_tz", + ] + + past = "{0} iliyopita" + future = "muda wa {0}" + and_word = "na" + + timeframes = { + "now": "sasa hivi", + "second": "sekunde", + "seconds": "sekunde {0}", + "minute": "dakika moja", + "minutes": "dakika {0}", + "hour": "saa moja", + "hours": "saa {0}", + "day": "siku moja", + "days": "siku {0}", + "week": "wiki moja", + "weeks": "wiki {0}", + "month": "mwezi moja", + "months": "miezi {0}", + "year": "mwaka moja", + "years": "miaka {0}", + } + + meridians = {"am": "asu", "pm": "mch", "AM": "ASU", "PM": "MCH"} + + month_names = [ + "", + "Januari", + "Februari", + "Machi", + "Aprili", + "Mei", + "Juni", + "Julai", + "Agosti", + "Septemba", + "Oktoba", + "Novemba", + "Desemba", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mac", + "Apr", + "Mei", + "Jun", + "Jul", + "Ago", + "Sep", + "Okt", + "Nov", + "Des", + ] + + day_names = [ + "", + "Jumatatu", + "Jumanne", + "Jumatano", + "Alhamisi", + "Ijumaa", + "Jumamosi", + "Jumapili", + ] + day_abbreviations = [ + "", + "Jumatatu", + "Jumanne", + "Jumatano", + "Alhamisi", + "Ijumaa", + "Jumamosi", + "Jumapili", + ] + + +_locales = _map_locales() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py new file mode 100644 index 00000000000..243fd1721c4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py @@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import re +from datetime import datetime, timedelta + +from dateutil import tz + +from arrow import locales +from arrow.util import iso_to_gregorian, next_weekday, normalize_timestamp + +try: + from functools import lru_cache +except ImportError: # pragma: no cover + from backports.functools_lru_cache import lru_cache # pragma: no cover + + +class ParserError(ValueError): + pass + + +# Allows for ParserErrors to be propagated from _build_datetime() +# when day_of_year errors occur. +# Before this, the ParserErrors were caught by the try/except in +# _parse_multiformat() and the appropriate error message was not +# transmitted to the user. +class ParserMatchError(ParserError): + pass + + +class DateTimeParser(object): + + _FORMAT_RE = re.compile( + r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)" + ) + _ESCAPE_RE = re.compile(r"\[[^\[\]]*\]") + + _ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}") + _ONE_OR_TWO_OR_THREE_DIGIT_RE = re.compile(r"\d{1,3}") + _ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+") + _TWO_DIGIT_RE = re.compile(r"\d{2}") + _THREE_DIGIT_RE = re.compile(r"\d{3}") + _FOUR_DIGIT_RE = re.compile(r"\d{4}") + _TZ_Z_RE = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z") + _TZ_ZZ_RE = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z") + _TZ_NAME_RE = re.compile(r"\w[\w+\-/]+") + # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will + # break cases like "15 Jul 2000" and a format list (see issue #447) + _TIMESTAMP_RE = re.compile(r"^\-?\d+\.?\d+$") + _TIMESTAMP_EXPANDED_RE = re.compile(r"^\-?\d+$") + _TIME_RE = re.compile(r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$") + _WEEK_DATE_RE = re.compile(r"(?P\d{4})[\-]?W(?P\d{2})[\-]?(?P\d)?") + + _BASE_INPUT_RE_MAP = { + "YYYY": _FOUR_DIGIT_RE, + "YY": _TWO_DIGIT_RE, + "MM": _TWO_DIGIT_RE, + "M": _ONE_OR_TWO_DIGIT_RE, + "DDDD": _THREE_DIGIT_RE, + "DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE, + "DD": _TWO_DIGIT_RE, + "D": _ONE_OR_TWO_DIGIT_RE, + "HH": _TWO_DIGIT_RE, + "H": _ONE_OR_TWO_DIGIT_RE, + "hh": _TWO_DIGIT_RE, + "h": _ONE_OR_TWO_DIGIT_RE, + "mm": _TWO_DIGIT_RE, + "m": _ONE_OR_TWO_DIGIT_RE, + "ss": _TWO_DIGIT_RE, + "s": _ONE_OR_TWO_DIGIT_RE, + "X": _TIMESTAMP_RE, + "x": _TIMESTAMP_EXPANDED_RE, + "ZZZ": _TZ_NAME_RE, + "ZZ": _TZ_ZZ_RE, + "Z": _TZ_Z_RE, + "S": _ONE_OR_MORE_DIGIT_RE, + "W": _WEEK_DATE_RE, + } + + SEPARATORS = ["-", "/", "."] + + def __init__(self, locale="en_us", cache_size=0): + + self.locale = locales.get_locale(locale) + self._input_re_map = self._BASE_INPUT_RE_MAP.copy() + self._input_re_map.update( + { + "MMMM": self._generate_choice_re( + self.locale.month_names[1:], re.IGNORECASE + ), + "MMM": self._generate_choice_re( + self.locale.month_abbreviations[1:], re.IGNORECASE + ), + "Do": re.compile(self.locale.ordinal_day_re), + "dddd": self._generate_choice_re( + self.locale.day_names[1:], re.IGNORECASE + ), + "ddd": self._generate_choice_re( + self.locale.day_abbreviations[1:], re.IGNORECASE + ), + "d": re.compile(r"[1-7]"), + "a": self._generate_choice_re( + (self.locale.meridians["am"], self.locale.meridians["pm"]) + ), + # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to + # ensure backwards compatibility of this token + "A": self._generate_choice_re(self.locale.meridians.values()), + } + ) + if cache_size > 0: + self._generate_pattern_re = lru_cache(maxsize=cache_size)( + self._generate_pattern_re + ) + + # TODO: since we support more than ISO 8601, we should rename this function + # IDEA: break into multiple functions + def parse_iso(self, datetime_string, normalize_whitespace=False): + + if normalize_whitespace: + datetime_string = re.sub(r"\s+", " ", datetime_string.strip()) + + has_space_divider = " " in datetime_string + has_t_divider = "T" in datetime_string + + num_spaces = datetime_string.count(" ") + if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0: + raise ParserError( + "Expected an ISO 8601-like string, but was given '{}'. Try passing in a format string to resolve this.".format( + datetime_string + ) + ) + + has_time = has_space_divider or has_t_divider + has_tz = False + + # date formats (ISO 8601 and others) to test against + # NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used) + formats = [ + "YYYY-MM-DD", + "YYYY-M-DD", + "YYYY-M-D", + "YYYY/MM/DD", + "YYYY/M/DD", + "YYYY/M/D", + "YYYY.MM.DD", + "YYYY.M.DD", + "YYYY.M.D", + "YYYYMMDD", + "YYYY-DDDD", + "YYYYDDDD", + "YYYY-MM", + "YYYY/MM", + "YYYY.MM", + "YYYY", + "W", + ] + + if has_time: + + if has_space_divider: + date_string, time_string = datetime_string.split(" ", 1) + else: + date_string, time_string = datetime_string.split("T", 1) + + time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE) + + time_components = self._TIME_RE.match(time_parts[0]) + + if time_components is None: + raise ParserError( + "Invalid time component provided. Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format." + ) + + ( + hours, + minutes, + seconds, + subseconds_sep, + subseconds, + ) = time_components.groups() + + has_tz = len(time_parts) == 2 + has_minutes = minutes is not None + has_seconds = seconds is not None + has_subseconds = subseconds is not None + + is_basic_time_format = ":" not in time_parts[0] + tz_format = "Z" + + # use 'ZZ' token instead since tz offset is present in non-basic format + if has_tz and ":" in time_parts[1]: + tz_format = "ZZ" + + time_sep = "" if is_basic_time_format else ":" + + if has_subseconds: + time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format( + time_sep=time_sep, subseconds_sep=subseconds_sep + ) + elif has_seconds: + time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep) + elif has_minutes: + time_string = "HH{time_sep}mm".format(time_sep=time_sep) + else: + time_string = "HH" + + if has_space_divider: + formats = ["{} {}".format(f, time_string) for f in formats] + else: + formats = ["{}T{}".format(f, time_string) for f in formats] + + if has_time and has_tz: + # Add "Z" or "ZZ" to the format strings to indicate to + # _parse_token() that a timezone needs to be parsed + formats = ["{}{}".format(f, tz_format) for f in formats] + + return self._parse_multiformat(datetime_string, formats) + + def parse(self, datetime_string, fmt, normalize_whitespace=False): + + if normalize_whitespace: + datetime_string = re.sub(r"\s+", " ", datetime_string) + + if isinstance(fmt, list): + return self._parse_multiformat(datetime_string, fmt) + + fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt) + + match = fmt_pattern_re.search(datetime_string) + + if match is None: + raise ParserMatchError( + "Failed to match '{}' when parsing '{}'".format(fmt, datetime_string) + ) + + parts = {} + for token in fmt_tokens: + if token == "Do": + value = match.group("value") + elif token == "W": + value = (match.group("year"), match.group("week"), match.group("day")) + else: + value = match.group(token) + self._parse_token(token, value, parts) + + return self._build_datetime(parts) + + def _generate_pattern_re(self, fmt): + + # fmt is a string of tokens like 'YYYY-MM-DD' + # we construct a new string by replacing each + # token by its pattern: + # 'YYYY-MM-DD' -> '(?P\d{4})-(?P\d{2})-(?P
\d{2})' + tokens = [] + offset = 0 + + # Escape all special RegEx chars + escaped_fmt = re.escape(fmt) + + # Extract the bracketed expressions to be reinserted later. + escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt) + + # Any number of S is the same as one. + # TODO: allow users to specify the number of digits to parse + escaped_fmt = re.sub(r"S+", "S", escaped_fmt) + + escaped_data = re.findall(self._ESCAPE_RE, fmt) + + fmt_pattern = escaped_fmt + + for m in self._FORMAT_RE.finditer(escaped_fmt): + token = m.group(0) + try: + input_re = self._input_re_map[token] + except KeyError: + raise ParserError("Unrecognized token '{}'".format(token)) + input_pattern = "(?P<{}>{})".format(token, input_re.pattern) + tokens.append(token) + # a pattern doesn't have the same length as the token + # it replaces! We keep the difference in the offset variable. + # This works because the string is scanned left-to-right and matches + # are returned in the order found by finditer. + fmt_pattern = ( + fmt_pattern[: m.start() + offset] + + input_pattern + + fmt_pattern[m.end() + offset :] + ) + offset += len(input_pattern) - (m.end() - m.start()) + + final_fmt_pattern = "" + split_fmt = fmt_pattern.split(r"\#") + + # Due to the way Python splits, 'split_fmt' will always be longer + for i in range(len(split_fmt)): + final_fmt_pattern += split_fmt[i] + if i < len(escaped_data): + final_fmt_pattern += escaped_data[i][1:-1] + + # Wrap final_fmt_pattern in a custom word boundary to strictly + # match the formatting pattern and filter out date and time formats + # that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah, + # blah1998-09-12blah. The custom word boundary matches every character + # that is not a whitespace character to allow for searching for a date + # and time string in a natural language sentence. Therefore, searching + # for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will + # work properly. + # Certain punctuation before or after the target pattern such as + # "1998-09-12," is permitted. For the full list of valid punctuation, + # see the documentation. + + starting_word_boundary = ( + r"(?\s])" # This is the list of punctuation that is ok before the pattern (i.e. "It can't not be these characters before the pattern") + r"(\b|^)" # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a negative number through i.e. before epoch numbers + ) + ending_word_boundary = ( + r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks can appear after the pattern at most 1 time + r"(?!\S))" # Don't allow any non-whitespace character after the punctuation + ) + bounded_fmt_pattern = r"{}{}{}".format( + starting_word_boundary, final_fmt_pattern, ending_word_boundary + ) + + return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE) + + def _parse_token(self, token, value, parts): + + if token == "YYYY": + parts["year"] = int(value) + + elif token == "YY": + value = int(value) + parts["year"] = 1900 + value if value > 68 else 2000 + value + + elif token in ["MMMM", "MMM"]: + parts["month"] = self.locale.month_number(value.lower()) + + elif token in ["MM", "M"]: + parts["month"] = int(value) + + elif token in ["DDDD", "DDD"]: + parts["day_of_year"] = int(value) + + elif token in ["DD", "D"]: + parts["day"] = int(value) + + elif token == "Do": + parts["day"] = int(value) + + elif token == "dddd": + # locale day names are 1-indexed + day_of_week = [x.lower() for x in self.locale.day_names].index( + value.lower() + ) + parts["day_of_week"] = day_of_week - 1 + + elif token == "ddd": + # locale day abbreviations are 1-indexed + day_of_week = [x.lower() for x in self.locale.day_abbreviations].index( + value.lower() + ) + parts["day_of_week"] = day_of_week - 1 + + elif token.upper() in ["HH", "H"]: + parts["hour"] = int(value) + + elif token in ["mm", "m"]: + parts["minute"] = int(value) + + elif token in ["ss", "s"]: + parts["second"] = int(value) + + elif token == "S": + # We have the *most significant* digits of an arbitrary-precision integer. + # We want the six most significant digits as an integer, rounded. + # IDEA: add nanosecond support somehow? Need datetime support for it first. + value = value.ljust(7, str("0")) + + # floating-point (IEEE-754) defaults to half-to-even rounding + seventh_digit = int(value[6]) + if seventh_digit == 5: + rounding = int(value[5]) % 2 + elif seventh_digit > 5: + rounding = 1 + else: + rounding = 0 + + parts["microsecond"] = int(value[:6]) + rounding + + elif token == "X": + parts["timestamp"] = float(value) + + elif token == "x": + parts["expanded_timestamp"] = int(value) + + elif token in ["ZZZ", "ZZ", "Z"]: + parts["tzinfo"] = TzinfoParser.parse(value) + + elif token in ["a", "A"]: + if value in (self.locale.meridians["am"], self.locale.meridians["AM"]): + parts["am_pm"] = "am" + elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]): + parts["am_pm"] = "pm" + + elif token == "W": + parts["weekdate"] = value + + @staticmethod + def _build_datetime(parts): + + weekdate = parts.get("weekdate") + + if weekdate is not None: + # we can use strptime (%G, %V, %u) in python 3.6 but these tokens aren't available before that + year, week = int(weekdate[0]), int(weekdate[1]) + + if weekdate[2] is not None: + day = int(weekdate[2]) + else: + # day not given, default to 1 + day = 1 + + dt = iso_to_gregorian(year, week, day) + parts["year"] = dt.year + parts["month"] = dt.month + parts["day"] = dt.day + + timestamp = parts.get("timestamp") + + if timestamp is not None: + return datetime.fromtimestamp(timestamp, tz=tz.tzutc()) + + expanded_timestamp = parts.get("expanded_timestamp") + + if expanded_timestamp is not None: + return datetime.fromtimestamp( + normalize_timestamp(expanded_timestamp), + tz=tz.tzutc(), + ) + + day_of_year = parts.get("day_of_year") + + if day_of_year is not None: + year = parts.get("year") + month = parts.get("month") + if year is None: + raise ParserError( + "Year component is required with the DDD and DDDD tokens." + ) + + if month is not None: + raise ParserError( + "Month component is not allowed with the DDD and DDDD tokens." + ) + + date_string = "{}-{}".format(year, day_of_year) + try: + dt = datetime.strptime(date_string, "%Y-%j") + except ValueError: + raise ParserError( + "The provided day of year '{}' is invalid.".format(day_of_year) + ) + + parts["year"] = dt.year + parts["month"] = dt.month + parts["day"] = dt.day + + day_of_week = parts.get("day_of_week") + day = parts.get("day") + + # If day is passed, ignore day of week + if day_of_week is not None and day is None: + year = parts.get("year", 1970) + month = parts.get("month", 1) + day = 1 + + # dddd => first day of week after epoch + # dddd YYYY => first day of week in specified year + # dddd MM YYYY => first day of week in specified year and month + # dddd MM => first day after epoch in specified month + next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week) + parts["year"] = next_weekday_dt.year + parts["month"] = next_weekday_dt.month + parts["day"] = next_weekday_dt.day + + am_pm = parts.get("am_pm") + hour = parts.get("hour", 0) + + if am_pm == "pm" and hour < 12: + hour += 12 + elif am_pm == "am" and hour == 12: + hour = 0 + + # Support for midnight at the end of day + if hour == 24: + if parts.get("minute", 0) != 0: + raise ParserError("Midnight at the end of day must not contain minutes") + if parts.get("second", 0) != 0: + raise ParserError("Midnight at the end of day must not contain seconds") + if parts.get("microsecond", 0) != 0: + raise ParserError( + "Midnight at the end of day must not contain microseconds" + ) + hour = 0 + day_increment = 1 + else: + day_increment = 0 + + # account for rounding up to 1000000 + microsecond = parts.get("microsecond", 0) + if microsecond == 1000000: + microsecond = 0 + second_increment = 1 + else: + second_increment = 0 + + increment = timedelta(days=day_increment, seconds=second_increment) + + return ( + datetime( + year=parts.get("year", 1), + month=parts.get("month", 1), + day=parts.get("day", 1), + hour=hour, + minute=parts.get("minute", 0), + second=parts.get("second", 0), + microsecond=microsecond, + tzinfo=parts.get("tzinfo"), + ) + + increment + ) + + def _parse_multiformat(self, string, formats): + + _datetime = None + + for fmt in formats: + try: + _datetime = self.parse(string, fmt) + break + except ParserMatchError: + pass + + if _datetime is None: + raise ParserError( + "Could not match input '{}' to any of the following formats: {}".format( + string, ", ".join(formats) + ) + ) + + return _datetime + + # generates a capture group of choices separated by an OR operator + @staticmethod + def _generate_choice_re(choices, flags=0): + return re.compile(r"({})".format("|".join(choices)), flags=flags) + + +class TzinfoParser(object): + _TZINFO_RE = re.compile(r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$") + + @classmethod + def parse(cls, tzinfo_string): + + tzinfo = None + + if tzinfo_string == "local": + tzinfo = tz.tzlocal() + + elif tzinfo_string in ["utc", "UTC", "Z"]: + tzinfo = tz.tzutc() + + else: + + iso_match = cls._TZINFO_RE.match(tzinfo_string) + + if iso_match: + sign, hours, minutes = iso_match.groups() + if minutes is None: + minutes = 0 + seconds = int(hours) * 3600 + int(minutes) * 60 + + if sign == "-": + seconds *= -1 + + tzinfo = tz.tzoffset(None, seconds) + + else: + tzinfo = tz.gettz(tzinfo_string) + + if tzinfo is None: + raise ParserError( + 'Could not parse timezone expression "{}"'.format(tzinfo_string) + ) + + return tzinfo diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py new file mode 100644 index 00000000000..acce8878df8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import datetime +import numbers + +from dateutil.rrule import WEEKLY, rrule + +from arrow.constants import MAX_TIMESTAMP, MAX_TIMESTAMP_MS, MAX_TIMESTAMP_US + + +def next_weekday(start_date, weekday): + """Get next weekday from the specified start date. + + :param start_date: Datetime object representing the start date. + :param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday). + :return: Datetime object corresponding to the next weekday after start_date. + + Usage:: + + # Get first Monday after epoch + >>> next_weekday(datetime(1970, 1, 1), 0) + 1970-01-05 00:00:00 + + # Get first Thursday after epoch + >>> next_weekday(datetime(1970, 1, 1), 3) + 1970-01-01 00:00:00 + + # Get first Sunday after epoch + >>> next_weekday(datetime(1970, 1, 1), 6) + 1970-01-04 00:00:00 + """ + if weekday < 0 or weekday > 6: + raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).") + return rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0] + + +def total_seconds(td): + """Get total seconds for timedelta.""" + return td.total_seconds() + + +def is_timestamp(value): + """Check if value is a valid timestamp.""" + if isinstance(value, bool): + return False + if not ( + isinstance(value, numbers.Integral) + or isinstance(value, float) + or isinstance(value, str) + ): + return False + try: + float(value) + return True + except ValueError: + return False + + +def normalize_timestamp(timestamp): + """Normalize millisecond and microsecond timestamps into normal timestamps.""" + if timestamp > MAX_TIMESTAMP: + if timestamp < MAX_TIMESTAMP_MS: + timestamp /= 1e3 + elif timestamp < MAX_TIMESTAMP_US: + timestamp /= 1e6 + else: + raise ValueError( + "The specified timestamp '{}' is too large.".format(timestamp) + ) + return timestamp + + +# Credit to https://stackoverflow.com/a/1700069 +def iso_to_gregorian(iso_year, iso_week, iso_day): + """Converts an ISO week date tuple into a datetime object.""" + + if not 1 <= iso_week <= 53: + raise ValueError("ISO Calendar week value must be between 1-53.") + + if not 1 <= iso_day <= 7: + raise ValueError("ISO Calendar day value must be between 1-7") + + # The first week of the year always contains 4 Jan. + fourth_jan = datetime.date(iso_year, 1, 4) + delta = datetime.timedelta(fourth_jan.isoweekday() - 1) + year_start = fourth_jan - delta + gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) + + return gregorian + + +def validate_bounds(bounds): + if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]": + raise ValueError( + 'Invalid bounds. Please select between "()", "(]", "[)", or "[]".' + ) + + +# Python 2.7 / 3.0+ definitions for isstr function. + +try: # pragma: no cover + basestring + + def isstr(s): + return isinstance(s, basestring) # noqa: F821 + + +except NameError: # pragma: no cover + + def isstr(s): + return isinstance(s, str) + + +__all__ = ["next_weekday", "total_seconds", "is_timestamp", "isstr", "iso_to_gregorian"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile new file mode 100644 index 00000000000..d4bb2cbb9ed --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py new file mode 100644 index 00000000000..aaf3c508223 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- + +# -- Path setup -------------------------------------------------------------- + +import io +import os +import sys + +sys.path.insert(0, os.path.abspath("..")) + +about = {} +with io.open("../arrow/_version.py", "r", encoding="utf-8") as f: + exec(f.read(), about) + +# -- Project information ----------------------------------------------------- + +project = u"Arrow 🏹" +copyright = "2020, Chris Smith" +author = "Chris Smith" + +release = about["__version__"] + +# -- General configuration --------------------------------------------------- + +extensions = ["sphinx.ext.autodoc"] + +templates_path = [] + +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +master_doc = "index" +source_suffix = ".rst" +pygments_style = "sphinx" + +language = None + +# -- Options for HTML output ------------------------------------------------- + +html_theme = "alabaster" +html_theme_path = [] +html_static_path = [] + +html_show_sourcelink = False +html_show_sphinx = False +html_show_copyright = True + +# https://alabaster.readthedocs.io/en/latest/customization.html +html_theme_options = { + "description": "Arrow is a sensible and human-friendly approach to dates, times and timestamps.", + "github_user": "arrow-py", + "github_repo": "arrow", + "github_banner": True, + "show_related": False, + "show_powered_by": False, + "github_button": True, + "github_type": "star", + "github_count": "true", # must be a string +} + +html_sidebars = { + "**": ["about.html", "localtoc.html", "relations.html", "searchbox.html"] +} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst new file mode 100644 index 00000000000..e2830b04f30 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst @@ -0,0 +1,566 @@ +Arrow: Better dates & times for Python +====================================== + +Release v\ |release| (`Installation`_) (`Changelog `_) + +.. include:: ../README.rst + :start-after: start-inclusion-marker-do-not-remove + :end-before: end-inclusion-marker-do-not-remove + +User's Guide +------------ + +Creation +~~~~~~~~ + +Get 'now' easily: + +.. code-block:: python + + >>> arrow.utcnow() + + + >>> arrow.now() + + + >>> arrow.now('US/Pacific') + + +Create from timestamps (:code:`int` or :code:`float`): + +.. code-block:: python + + >>> arrow.get(1367900664) + + + >>> arrow.get(1367900664.152325) + + +Use a naive or timezone-aware datetime, or flexibly specify a timezone: + +.. code-block:: python + + >>> arrow.get(datetime.utcnow()) + + + >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') + + + >>> from dateutil import tz + >>> arrow.get(datetime(2013, 5, 5), tz.gettz('US/Pacific')) + + + >>> arrow.get(datetime.now(tz.gettz('US/Pacific'))) + + +Parse from a string: + +.. code-block:: python + + >>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss') + + +Search a date in a string: + +.. code-block:: python + + >>> arrow.get('June was born in May 1980', 'MMMM YYYY') + + +Some ISO 8601 compliant strings are recognized and parsed without a format string: + + >>> arrow.get('2013-09-30T15:34:00.000-07:00') + + +Arrow objects can be instantiated directly too, with the same arguments as a datetime: + +.. code-block:: python + + >>> arrow.get(2013, 5, 5) + + + >>> arrow.Arrow(2013, 5, 5) + + +Properties +~~~~~~~~~~ + +Get a datetime or timestamp representation: + +.. code-block:: python + + >>> a = arrow.utcnow() + >>> a.datetime + datetime.datetime(2013, 5, 7, 4, 38, 15, 447644, tzinfo=tzutc()) + + >>> a.timestamp + 1367901495 + +Get a naive datetime, and tzinfo: + +.. code-block:: python + + >>> a.naive + datetime.datetime(2013, 5, 7, 4, 38, 15, 447644) + + >>> a.tzinfo + tzutc() + +Get any datetime value: + +.. code-block:: python + + >>> a.year + 2013 + +Call datetime functions that return properties: + +.. code-block:: python + + >>> a.date() + datetime.date(2013, 5, 7) + + >>> a.time() + datetime.time(4, 38, 15, 447644) + +Replace & Shift +~~~~~~~~~~~~~~~ + +Get a new :class:`Arrow ` object, with altered attributes, just as you would with a datetime: + +.. code-block:: python + + >>> arw = arrow.utcnow() + >>> arw + + + >>> arw.replace(hour=4, minute=40) + + +Or, get one with attributes shifted forward or backward: + +.. code-block:: python + + >>> arw.shift(weeks=+3) + + +Even replace the timezone without altering other attributes: + +.. code-block:: python + + >>> arw.replace(tzinfo='US/Pacific') + + +Move between the earlier and later moments of an ambiguous time: + +.. code-block:: python + + >>> paris_transition = arrow.Arrow(2019, 10, 27, 2, tzinfo="Europe/Paris", fold=0) + >>> paris_transition + + >>> paris_transition.ambiguous + True + >>> paris_transition.replace(fold=1) + + +Format +~~~~~~ + +.. code-block:: python + + >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-07 05:23:16 -00:00' + +Convert +~~~~~~~ + +Convert from UTC to other timezones by name or tzinfo: + +.. code-block:: python + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc.to('US/Pacific') + + + >>> utc.to(tz.gettz('US/Pacific')) + + +Or using shorthand: + +.. code-block:: python + + >>> utc.to('local') + + + >>> utc.to('local').to('utc') + + + +Humanize +~~~~~~~~ + +Humanize relative to now: + +.. code-block:: python + + >>> past = arrow.utcnow().shift(hours=-1) + >>> past.humanize() + 'an hour ago' + +Or another Arrow, or datetime: + +.. code-block:: python + + >>> present = arrow.utcnow() + >>> future = present.shift(hours=2) + >>> future.humanize(present) + 'in 2 hours' + +Indicate time as relative or include only the distance + +.. code-block:: python + + >>> present = arrow.utcnow() + >>> future = present.shift(hours=2) + >>> future.humanize(present) + 'in 2 hours' + >>> future.humanize(present, only_distance=True) + '2 hours' + + +Indicate a specific time granularity (or multiple): + +.. code-block:: python + + >>> present = arrow.utcnow() + >>> future = present.shift(minutes=66) + >>> future.humanize(present, granularity="minute") + 'in 66 minutes' + >>> future.humanize(present, granularity=["hour", "minute"]) + 'in an hour and 6 minutes' + >>> present.humanize(future, granularity=["hour", "minute"]) + 'an hour and 6 minutes ago' + >>> future.humanize(present, only_distance=True, granularity=["hour", "minute"]) + 'an hour and 6 minutes' + +Support for a growing number of locales (see ``locales.py`` for supported languages): + +.. code-block:: python + + + >>> future = arrow.utcnow().shift(hours=1) + >>> future.humanize(a, locale='ru') + 'через 2 час(а,ов)' + + +Ranges & Spans +~~~~~~~~~~~~~~ + +Get the time span of any unit: + +.. code-block:: python + + >>> arrow.utcnow().span('hour') + (, ) + +Or just get the floor and ceiling: + +.. code-block:: python + + >>> arrow.utcnow().floor('hour') + + + >>> arrow.utcnow().ceil('hour') + + +You can also get a range of time spans: + +.. code-block:: python + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.span_range('hour', start, end): + ... print r + ... + (, ) + (, ) + (, ) + (, ) + (, ) + +Or just iterate over a range of time: + +.. code-block:: python + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print repr(r) + ... + + + + + + +.. toctree:: + :maxdepth: 2 + +Factories +~~~~~~~~~ + +Use factories to harness Arrow's module API for a custom Arrow-derived type. First, derive your type: + +.. code-block:: python + + >>> class CustomArrow(arrow.Arrow): + ... + ... def days_till_xmas(self): + ... + ... xmas = arrow.Arrow(self.year, 12, 25) + ... + ... if self > xmas: + ... xmas = xmas.shift(years=1) + ... + ... return (xmas - self).days + + +Then get and use a factory for it: + +.. code-block:: python + + >>> factory = arrow.ArrowFactory(CustomArrow) + >>> custom = factory.utcnow() + >>> custom + >>> + + >>> custom.days_till_xmas() + >>> 211 + +Supported Tokens +~~~~~~~~~~~~~~~~ + +Use the following tokens for parsing and formatting. Note that they are **not** the same as the tokens for `strptime `_: + ++--------------------------------+--------------+-------------------------------------------+ +| |Token |Output | ++================================+==============+===========================================+ +|**Year** |YYYY |2000, 2001, 2002 ... 2012, 2013 | ++--------------------------------+--------------+-------------------------------------------+ +| |YY |00, 01, 02 ... 12, 13 | ++--------------------------------+--------------+-------------------------------------------+ +|**Month** |MMMM |January, February, March ... [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |MMM |Jan, Feb, Mar ... [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |MM |01, 02, 03 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +| |M |1, 2, 3 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +|**Day of Year** |DDDD |001, 002, 003 ... 364, 365 | ++--------------------------------+--------------+-------------------------------------------+ +| |DDD |1, 2, 3 ... 364, 365 | ++--------------------------------+--------------+-------------------------------------------+ +|**Day of Month** |DD |01, 02, 03 ... 30, 31 | ++--------------------------------+--------------+-------------------------------------------+ +| |D |1, 2, 3 ... 30, 31 | ++--------------------------------+--------------+-------------------------------------------+ +| |Do |1st, 2nd, 3rd ... 30th, 31st | ++--------------------------------+--------------+-------------------------------------------+ +|**Day of Week** |dddd |Monday, Tuesday, Wednesday ... [#t2]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |ddd |Mon, Tue, Wed ... [#t2]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |d |1, 2, 3 ... 6, 7 | ++--------------------------------+--------------+-------------------------------------------+ +|**ISO week date** |W |2011-W05-4, 2019-W17 | ++--------------------------------+--------------+-------------------------------------------+ +|**Hour** |HH |00, 01, 02 ... 23, 24 | ++--------------------------------+--------------+-------------------------------------------+ +| |H |0, 1, 2 ... 23, 24 | ++--------------------------------+--------------+-------------------------------------------+ +| |hh |01, 02, 03 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +| |h |1, 2, 3 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +|**AM / PM** |A |AM, PM, am, pm [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |a |am, pm [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +|**Minute** |mm |00, 01, 02 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +| |m |0, 1, 2 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +|**Second** |ss |00, 01, 02 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +| |s |0, 1, 2 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +|**Sub-second** |S... |0, 02, 003, 000006, 123123123123... [#t3]_ | ++--------------------------------+--------------+-------------------------------------------+ +|**Timezone** |ZZZ |Asia/Baku, Europe/Warsaw, GMT ... [#t4]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |ZZ |-07:00, -06:00 ... +06:00, +07:00, +08, Z | ++--------------------------------+--------------+-------------------------------------------+ +| |Z |-0700, -0600 ... +0600, +0700, +08, Z | ++--------------------------------+--------------+-------------------------------------------+ +|**Seconds Timestamp** |X |1381685817, 1381685817.915482 ... [#t5]_ | ++--------------------------------+--------------+-------------------------------------------+ +|**ms or µs Timestamp** |x |1569980330813, 1569980330813221 | ++--------------------------------+--------------+-------------------------------------------+ + +.. rubric:: Footnotes + +.. [#t1] localization support for parsing and formatting +.. [#t2] localization support only for formatting +.. [#t3] the result is truncated to microseconds, with `half-to-even rounding `_. +.. [#t4] timezone names from `tz database `_ provided via dateutil package, note that abbreviations such as MST, PDT, BRST are unlikely to parse due to ambiguity. Use the full IANA zone name instead (Asia/Shanghai, Europe/London, America/Chicago etc). +.. [#t5] this token cannot be used for parsing timestamps out of natural language strings due to compatibility reasons + +Built-in Formats +++++++++++++++++ + +There are several formatting standards that are provided as built-in tokens. + +.. code-block:: python + + >>> arw = arrow.utcnow() + >>> arw.format(arrow.FORMAT_ATOM) + '2020-05-27 10:30:35+00:00' + >>> arw.format(arrow.FORMAT_COOKIE) + 'Wednesday, 27-May-2020 10:30:35 UTC' + >>> arw.format(arrow.FORMAT_RSS) + 'Wed, 27 May 2020 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC822) + 'Wed, 27 May 20 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC850) + 'Wednesday, 27-May-20 10:30:35 UTC' + >>> arw.format(arrow.FORMAT_RFC1036) + 'Wed, 27 May 20 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC1123) + 'Wed, 27 May 2020 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC2822) + 'Wed, 27 May 2020 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC3339) + '2020-05-27 10:30:35+00:00' + >>> arw.format(arrow.FORMAT_W3C) + '2020-05-27 10:30:35+00:00' + +Escaping Formats +~~~~~~~~~~~~~~~~ + +Tokens, phrases, and regular expressions in a format string can be escaped when parsing and formatting by enclosing them within square brackets. + +Tokens & Phrases +++++++++++++++++ + +Any `token `_ or phrase can be escaped as follows: + +.. code-block:: python + + >>> fmt = "YYYY-MM-DD h [h] m" + >>> arw = arrow.get("2018-03-09 8 h 40", fmt) + + >>> arw.format(fmt) + '2018-03-09 8 h 40' + + >>> fmt = "YYYY-MM-DD h [hello] m" + >>> arw = arrow.get("2018-03-09 8 hello 40", fmt) + + >>> arw.format(fmt) + '2018-03-09 8 hello 40' + + >>> fmt = "YYYY-MM-DD h [hello world] m" + >>> arw = arrow.get("2018-03-09 8 hello world 40", fmt) + + >>> arw.format(fmt) + '2018-03-09 8 hello world 40' + +This can be useful for parsing dates in different locales such as French, in which it is common to format time strings as "8 h 40" rather than "8:40". + +Regular Expressions ++++++++++++++++++++ + +You can also escape regular expressions by enclosing them within square brackets. In the following example, we are using the regular expression :code:`\s+` to match any number of whitespace characters that separate the tokens. This is useful if you do not know the number of spaces between tokens ahead of time (e.g. in log files). + +.. code-block:: python + + >>> fmt = r"ddd[\s+]MMM[\s+]DD[\s+]HH:mm:ss[\s+]YYYY" + >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) + + + >>> arrow.get("Mon \tSep 08 16:41:45 2014", fmt) + + + >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) + + +Punctuation +~~~~~~~~~~~ + +Date and time formats may be fenced on either side by one punctuation character from the following list: ``, . ; : ? ! " \` ' [ ] { } ( ) < >`` + +.. code-block:: python + + >>> arrow.get("Cool date: 2019-10-31T09:12:45.123456+04:30.", "YYYY-MM-DDTHH:mm:ss.SZZ") + + + >>> arrow.get("Tomorrow (2019-10-31) is Halloween!", "YYYY-MM-DD") + + + >>> arrow.get("Halloween is on 2019.10.31.", "YYYY.MM.DD") + + + >>> arrow.get("It's Halloween tomorrow (2019-10-31)!", "YYYY-MM-DD") + # Raises exception because there are multiple punctuation marks following the date + +Redundant Whitespace +~~~~~~~~~~~~~~~~~~~~ + +Redundant whitespace characters (spaces, tabs, and newlines) can be normalized automatically by passing in the ``normalize_whitespace`` flag to ``arrow.get``: + +.. code-block:: python + + >>> arrow.get('\t \n 2013-05-05T12:30:45.123456 \t \n', normalize_whitespace=True) + + + >>> arrow.get('2013-05-05 T \n 12:30:45\t123456', 'YYYY-MM-DD T HH:mm:ss S', normalize_whitespace=True) + + +API Guide +--------- + +arrow.arrow +~~~~~~~~~~~ + +.. automodule:: arrow.arrow + :members: + +arrow.factory +~~~~~~~~~~~~~ + +.. automodule:: arrow.factory + :members: + +arrow.api +~~~~~~~~~ + +.. automodule:: arrow.api + :members: + +arrow.locale +~~~~~~~~~~~~ + +.. automodule:: arrow.locales + :members: + :undoc-members: + +Release History +--------------- + +.. toctree:: + :maxdepth: 2 + + releases diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat new file mode 100644 index 00000000000..922152e96a0 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst new file mode 100644 index 00000000000..22e1e59c8c7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst @@ -0,0 +1,3 @@ +.. _releases: + +.. include:: ../CHANGELOG.rst diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt new file mode 100644 index 00000000000..df565d83845 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt @@ -0,0 +1,14 @@ +backports.functools_lru_cache==1.6.1; python_version == "2.7" +dateparser==0.7.* +pre-commit==1.21.*; python_version <= "3.5" +pre-commit==2.6.*; python_version >= "3.6" +pytest==4.6.*; python_version == "2.7" +pytest==6.0.*; python_version >= "3.5" +pytest-cov==2.10.* +pytest-mock==2.0.*; python_version == "2.7" +pytest-mock==3.2.*; python_version >= "3.5" +python-dateutil==2.8.* +pytz==2019.* +simplejson==3.17.* +sphinx==1.8.*; python_version == "2.7" +sphinx==3.2.*; python_version >= "3.5" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg new file mode 100644 index 00000000000..2a9acf13daa --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py new file mode 100644 index 00000000000..dc4f0e77d54 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +import io + +from setuptools import setup + +with io.open("README.rst", "r", encoding="utf-8") as f: + readme = f.read() + +about = {} +with io.open("arrow/_version.py", "r", encoding="utf-8") as f: + exec(f.read(), about) + +setup( + name="arrow", + version=about["__version__"], + description="Better dates & times for Python", + long_description=readme, + long_description_content_type="text/x-rst", + url="https://arrow.readthedocs.io", + author="Chris Smith", + author_email="crsmithdev@gmail.com", + license="Apache 2.0", + packages=["arrow"], + zip_safe=False, + python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", + install_requires=[ + "python-dateutil>=2.7.0", + "backports.functools_lru_cache>=1.2.1;python_version=='2.7'", + ], + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Topic :: Software Development :: Libraries :: Python Modules", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + ], + keywords="arrow date time datetime timestamp timezone humanize", + project_urls={ + "Repository": "https://github.com/arrow-py/arrow", + "Bug Reports": "https://github.com/arrow-py/arrow/issues", + "Documentation": "https://arrow.readthedocs.io", + }, +) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py new file mode 100644 index 00000000000..5bc8a4af2e8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +from datetime import datetime + +import pytest +from dateutil import tz as dateutil_tz + +from arrow import arrow, factory, formatter, locales, parser + + +@pytest.fixture(scope="class") +def time_utcnow(request): + request.cls.arrow = arrow.Arrow.utcnow() + + +@pytest.fixture(scope="class") +def time_2013_01_01(request): + request.cls.now = arrow.Arrow.utcnow() + request.cls.arrow = arrow.Arrow(2013, 1, 1) + request.cls.datetime = datetime(2013, 1, 1) + + +@pytest.fixture(scope="class") +def time_2013_02_03(request): + request.cls.arrow = arrow.Arrow(2013, 2, 3, 12, 30, 45, 1) + + +@pytest.fixture(scope="class") +def time_2013_02_15(request): + request.cls.datetime = datetime(2013, 2, 15, 3, 41, 22, 8923) + request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) + + +@pytest.fixture(scope="class") +def time_1975_12_25(request): + request.cls.datetime = datetime( + 1975, 12, 25, 14, 15, 16, tzinfo=dateutil_tz.gettz("America/New_York") + ) + request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) + + +@pytest.fixture(scope="class") +def arrow_formatter(request): + request.cls.formatter = formatter.DateTimeFormatter() + + +@pytest.fixture(scope="class") +def arrow_factory(request): + request.cls.factory = factory.ArrowFactory() + + +@pytest.fixture(scope="class") +def lang_locales(request): + request.cls.locales = locales._locales + + +@pytest.fixture(scope="class") +def lang_locale(request): + # As locale test classes are prefixed with Test, we are dynamically getting the locale by the test class name. + # TestEnglishLocale -> EnglishLocale + name = request.cls.__name__[4:] + request.cls.locale = locales.get_locale_by_class_name(name) + + +@pytest.fixture(scope="class") +def dt_parser(request): + request.cls.parser = parser.DateTimeParser() + + +@pytest.fixture(scope="class") +def dt_parser_regex(request): + request.cls.format_regex = parser.DateTimeParser._FORMAT_RE + + +@pytest.fixture(scope="class") +def tzinfo_parser(request): + request.cls.parser = parser.TzinfoParser() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py new file mode 100644 index 00000000000..9b19a27cd97 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +import arrow + + +class TestModule: + def test_get(self, mocker): + mocker.patch("arrow.api._factory.get", return_value="result") + + assert arrow.api.get() == "result" + + def test_utcnow(self, mocker): + mocker.patch("arrow.api._factory.utcnow", return_value="utcnow") + + assert arrow.api.utcnow() == "utcnow" + + def test_now(self, mocker): + mocker.patch("arrow.api._factory.now", tz="tz", return_value="now") + + assert arrow.api.now("tz") == "now" + + def test_factory(self): + class MockCustomArrowClass(arrow.Arrow): + pass + + result = arrow.api.factory(MockCustomArrowClass) + + assert isinstance(result, arrow.factory.ArrowFactory) + assert isinstance(result.utcnow(), MockCustomArrowClass) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py new file mode 100644 index 00000000000..b0bd20a5e3a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py @@ -0,0 +1,2150 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import calendar +import pickle +import sys +import time +from datetime import date, datetime, timedelta + +import dateutil +import pytest +import pytz +import simplejson as json +from dateutil import tz +from dateutil.relativedelta import FR, MO, SA, SU, TH, TU, WE + +from arrow import arrow + +from .utils import assert_datetime_equality + + +class TestTestArrowInit: + def test_init_bad_input(self): + + with pytest.raises(TypeError): + arrow.Arrow(2013) + + with pytest.raises(TypeError): + arrow.Arrow(2013, 2) + + with pytest.raises(ValueError): + arrow.Arrow(2013, 2, 2, 12, 30, 45, 9999999) + + def test_init(self): + + result = arrow.Arrow(2013, 2, 2) + self.expected = datetime(2013, 2, 2, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12) + self.expected = datetime(2013, 2, 2, 12, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12, 30) + self.expected = datetime(2013, 2, 2, 12, 30, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12, 30, 45) + self.expected = datetime(2013, 2, 2, 12, 30, 45, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12, 30, 45, 999999) + self.expected = datetime(2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") + ) + self.expected = datetime( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") + ) + assert result._datetime == self.expected + + # regression tests for issue #626 + def test_init_pytz_timezone(self): + + result = arrow.Arrow( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=pytz.timezone("Europe/Paris") + ) + self.expected = datetime( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") + ) + assert result._datetime == self.expected + assert_datetime_equality(result._datetime, self.expected, 1) + + def test_init_with_fold(self): + before = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") + after = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1) + + assert hasattr(before, "fold") + assert hasattr(after, "fold") + + # PEP-495 requires the comparisons below to be true + assert before == after + assert before.utcoffset() != after.utcoffset() + + +class TestTestArrowFactory: + def test_now(self): + + result = arrow.Arrow.now() + + assert_datetime_equality( + result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) + ) + + def test_utcnow(self): + + result = arrow.Arrow.utcnow() + + assert_datetime_equality( + result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + assert result.fold == 0 + + def test_fromtimestamp(self): + + timestamp = time.time() + + result = arrow.Arrow.fromtimestamp(timestamp) + assert_datetime_equality( + result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) + ) + + result = arrow.Arrow.fromtimestamp(timestamp, tzinfo=tz.gettz("Europe/Paris")) + assert_datetime_equality( + result._datetime, + datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), + ) + + result = arrow.Arrow.fromtimestamp(timestamp, tzinfo="Europe/Paris") + assert_datetime_equality( + result._datetime, + datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), + ) + + with pytest.raises(ValueError): + arrow.Arrow.fromtimestamp("invalid timestamp") + + def test_utcfromtimestamp(self): + + timestamp = time.time() + + result = arrow.Arrow.utcfromtimestamp(timestamp) + assert_datetime_equality( + result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + with pytest.raises(ValueError): + arrow.Arrow.utcfromtimestamp("invalid timestamp") + + def test_fromdatetime(self): + + dt = datetime(2013, 2, 3, 12, 30, 45, 1) + + result = arrow.Arrow.fromdatetime(dt) + + assert result._datetime == dt.replace(tzinfo=tz.tzutc()) + + def test_fromdatetime_dt_tzinfo(self): + + dt = datetime(2013, 2, 3, 12, 30, 45, 1, tzinfo=tz.gettz("US/Pacific")) + + result = arrow.Arrow.fromdatetime(dt) + + assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) + + def test_fromdatetime_tzinfo_arg(self): + + dt = datetime(2013, 2, 3, 12, 30, 45, 1) + + result = arrow.Arrow.fromdatetime(dt, tz.gettz("US/Pacific")) + + assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) + + def test_fromdate(self): + + dt = date(2013, 2, 3) + + result = arrow.Arrow.fromdate(dt, tz.gettz("US/Pacific")) + + assert result._datetime == datetime(2013, 2, 3, tzinfo=tz.gettz("US/Pacific")) + + def test_strptime(self): + + formatted = datetime(2013, 2, 3, 12, 30, 45).strftime("%Y-%m-%d %H:%M:%S") + + result = arrow.Arrow.strptime(formatted, "%Y-%m-%d %H:%M:%S") + assert result._datetime == datetime(2013, 2, 3, 12, 30, 45, tzinfo=tz.tzutc()) + + result = arrow.Arrow.strptime( + formatted, "%Y-%m-%d %H:%M:%S", tzinfo=tz.gettz("Europe/Paris") + ) + assert result._datetime == datetime( + 2013, 2, 3, 12, 30, 45, tzinfo=tz.gettz("Europe/Paris") + ) + + +@pytest.mark.usefixtures("time_2013_02_03") +class TestTestArrowRepresentation: + def test_repr(self): + + result = self.arrow.__repr__() + + assert result == "".format(self.arrow._datetime.isoformat()) + + def test_str(self): + + result = self.arrow.__str__() + + assert result == self.arrow._datetime.isoformat() + + def test_hash(self): + + result = self.arrow.__hash__() + + assert result == self.arrow._datetime.__hash__() + + def test_format(self): + + result = "{:YYYY-MM-DD}".format(self.arrow) + + assert result == "2013-02-03" + + def test_bare_format(self): + + result = self.arrow.format() + + assert result == "2013-02-03 12:30:45+00:00" + + def test_format_no_format_string(self): + + result = "{}".format(self.arrow) + + assert result == str(self.arrow) + + def test_clone(self): + + result = self.arrow.clone() + + assert result is not self.arrow + assert result._datetime == self.arrow._datetime + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowAttribute: + def test_getattr_base(self): + + with pytest.raises(AttributeError): + self.arrow.prop + + def test_getattr_week(self): + + assert self.arrow.week == 1 + + def test_getattr_quarter(self): + # start dates + q1 = arrow.Arrow(2013, 1, 1) + q2 = arrow.Arrow(2013, 4, 1) + q3 = arrow.Arrow(2013, 8, 1) + q4 = arrow.Arrow(2013, 10, 1) + assert q1.quarter == 1 + assert q2.quarter == 2 + assert q3.quarter == 3 + assert q4.quarter == 4 + + # end dates + q1 = arrow.Arrow(2013, 3, 31) + q2 = arrow.Arrow(2013, 6, 30) + q3 = arrow.Arrow(2013, 9, 30) + q4 = arrow.Arrow(2013, 12, 31) + assert q1.quarter == 1 + assert q2.quarter == 2 + assert q3.quarter == 3 + assert q4.quarter == 4 + + def test_getattr_dt_value(self): + + assert self.arrow.year == 2013 + + def test_tzinfo(self): + + self.arrow.tzinfo = tz.gettz("PST") + assert self.arrow.tzinfo == tz.gettz("PST") + + def test_naive(self): + + assert self.arrow.naive == self.arrow._datetime.replace(tzinfo=None) + + def test_timestamp(self): + + assert self.arrow.timestamp == calendar.timegm( + self.arrow._datetime.utctimetuple() + ) + + with pytest.warns(DeprecationWarning): + self.arrow.timestamp + + def test_int_timestamp(self): + + assert self.arrow.int_timestamp == calendar.timegm( + self.arrow._datetime.utctimetuple() + ) + + def test_float_timestamp(self): + + result = self.arrow.float_timestamp - self.arrow.timestamp + + assert result == self.arrow.microsecond + + def test_getattr_fold(self): + + # UTC is always unambiguous + assert self.now.fold == 0 + + ambiguous_dt = arrow.Arrow( + 2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1 + ) + assert ambiguous_dt.fold == 1 + + with pytest.raises(AttributeError): + ambiguous_dt.fold = 0 + + def test_getattr_ambiguous(self): + + assert not self.now.ambiguous + + ambiguous_dt = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") + + assert ambiguous_dt.ambiguous + + def test_getattr_imaginary(self): + + assert not self.now.imaginary + + imaginary_dt = arrow.Arrow(2013, 3, 31, 2, 30, tzinfo="Europe/Paris") + + assert imaginary_dt.imaginary + + +@pytest.mark.usefixtures("time_utcnow") +class TestArrowComparison: + def test_eq(self): + + assert self.arrow == self.arrow + assert self.arrow == self.arrow.datetime + assert not (self.arrow == "abc") + + def test_ne(self): + + assert not (self.arrow != self.arrow) + assert not (self.arrow != self.arrow.datetime) + assert self.arrow != "abc" + + def test_gt(self): + + arrow_cmp = self.arrow.shift(minutes=1) + + assert not (self.arrow > self.arrow) + assert not (self.arrow > self.arrow.datetime) + + with pytest.raises(TypeError): + self.arrow > "abc" + + assert self.arrow < arrow_cmp + assert self.arrow < arrow_cmp.datetime + + def test_ge(self): + + with pytest.raises(TypeError): + self.arrow >= "abc" + + assert self.arrow >= self.arrow + assert self.arrow >= self.arrow.datetime + + def test_lt(self): + + arrow_cmp = self.arrow.shift(minutes=1) + + assert not (self.arrow < self.arrow) + assert not (self.arrow < self.arrow.datetime) + + with pytest.raises(TypeError): + self.arrow < "abc" + + assert self.arrow < arrow_cmp + assert self.arrow < arrow_cmp.datetime + + def test_le(self): + + with pytest.raises(TypeError): + self.arrow <= "abc" + + assert self.arrow <= self.arrow + assert self.arrow <= self.arrow.datetime + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowMath: + def test_add_timedelta(self): + + result = self.arrow.__add__(timedelta(days=1)) + + assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) + + def test_add_other(self): + + with pytest.raises(TypeError): + self.arrow + 1 + + def test_radd(self): + + result = self.arrow.__radd__(timedelta(days=1)) + + assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) + + def test_sub_timedelta(self): + + result = self.arrow.__sub__(timedelta(days=1)) + + assert result._datetime == datetime(2012, 12, 31, tzinfo=tz.tzutc()) + + def test_sub_datetime(self): + + result = self.arrow.__sub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) + + assert result == timedelta(days=11) + + def test_sub_arrow(self): + + result = self.arrow.__sub__(arrow.Arrow(2012, 12, 21, tzinfo=tz.tzutc())) + + assert result == timedelta(days=11) + + def test_sub_other(self): + + with pytest.raises(TypeError): + self.arrow - object() + + def test_rsub_datetime(self): + + result = self.arrow.__rsub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) + + assert result == timedelta(days=-11) + + def test_rsub_other(self): + + with pytest.raises(TypeError): + timedelta(days=1) - self.arrow + + +@pytest.mark.usefixtures("time_utcnow") +class TestArrowDatetimeInterface: + def test_date(self): + + result = self.arrow.date() + + assert result == self.arrow._datetime.date() + + def test_time(self): + + result = self.arrow.time() + + assert result == self.arrow._datetime.time() + + def test_timetz(self): + + result = self.arrow.timetz() + + assert result == self.arrow._datetime.timetz() + + def test_astimezone(self): + + other_tz = tz.gettz("US/Pacific") + + result = self.arrow.astimezone(other_tz) + + assert result == self.arrow._datetime.astimezone(other_tz) + + def test_utcoffset(self): + + result = self.arrow.utcoffset() + + assert result == self.arrow._datetime.utcoffset() + + def test_dst(self): + + result = self.arrow.dst() + + assert result == self.arrow._datetime.dst() + + def test_timetuple(self): + + result = self.arrow.timetuple() + + assert result == self.arrow._datetime.timetuple() + + def test_utctimetuple(self): + + result = self.arrow.utctimetuple() + + assert result == self.arrow._datetime.utctimetuple() + + def test_toordinal(self): + + result = self.arrow.toordinal() + + assert result == self.arrow._datetime.toordinal() + + def test_weekday(self): + + result = self.arrow.weekday() + + assert result == self.arrow._datetime.weekday() + + def test_isoweekday(self): + + result = self.arrow.isoweekday() + + assert result == self.arrow._datetime.isoweekday() + + def test_isocalendar(self): + + result = self.arrow.isocalendar() + + assert result == self.arrow._datetime.isocalendar() + + def test_isoformat(self): + + result = self.arrow.isoformat() + + assert result == self.arrow._datetime.isoformat() + + def test_simplejson(self): + + result = json.dumps({"v": self.arrow.for_json()}, for_json=True) + + assert json.loads(result)["v"] == self.arrow._datetime.isoformat() + + def test_ctime(self): + + result = self.arrow.ctime() + + assert result == self.arrow._datetime.ctime() + + def test_strftime(self): + + result = self.arrow.strftime("%Y") + + assert result == self.arrow._datetime.strftime("%Y") + + +class TestArrowFalsePositiveDst: + """These tests relate to issues #376 and #551. + The key points in both issues are that arrow will assign a UTC timezone if none is provided and + .to() will change other attributes to be correct whereas .replace() only changes the specified attribute. + + Issue 376 + >>> arrow.get('2016-11-06').to('America/New_York').ceil('day') + < Arrow [2016-11-05T23:59:59.999999-04:00] > + + Issue 551 + >>> just_before = arrow.get('2018-11-04T01:59:59.999999') + >>> just_before + 2018-11-04T01:59:59.999999+00:00 + >>> just_after = just_before.shift(microseconds=1) + >>> just_after + 2018-11-04T02:00:00+00:00 + >>> just_before_eastern = just_before.replace(tzinfo='US/Eastern') + >>> just_before_eastern + 2018-11-04T01:59:59.999999-04:00 + >>> just_after_eastern = just_after.replace(tzinfo='US/Eastern') + >>> just_after_eastern + 2018-11-04T02:00:00-05:00 + """ + + def test_dst(self): + self.before_1 = arrow.Arrow( + 2016, 11, 6, 3, 59, tzinfo=tz.gettz("America/New_York") + ) + self.before_2 = arrow.Arrow(2016, 11, 6, tzinfo=tz.gettz("America/New_York")) + self.after_1 = arrow.Arrow(2016, 11, 6, 4, tzinfo=tz.gettz("America/New_York")) + self.after_2 = arrow.Arrow( + 2016, 11, 6, 23, 59, tzinfo=tz.gettz("America/New_York") + ) + self.before_3 = arrow.Arrow( + 2018, 11, 4, 3, 59, tzinfo=tz.gettz("America/New_York") + ) + self.before_4 = arrow.Arrow(2018, 11, 4, tzinfo=tz.gettz("America/New_York")) + self.after_3 = arrow.Arrow(2018, 11, 4, 4, tzinfo=tz.gettz("America/New_York")) + self.after_4 = arrow.Arrow( + 2018, 11, 4, 23, 59, tzinfo=tz.gettz("America/New_York") + ) + assert self.before_1.day == self.before_2.day + assert self.after_1.day == self.after_2.day + assert self.before_3.day == self.before_4.day + assert self.after_3.day == self.after_4.day + + +class TestArrowConversion: + def test_to(self): + + dt_from = datetime.now() + arrow_from = arrow.Arrow.fromdatetime(dt_from, tz.gettz("US/Pacific")) + + self.expected = dt_from.replace(tzinfo=tz.gettz("US/Pacific")).astimezone( + tz.tzutc() + ) + + assert arrow_from.to("UTC").datetime == self.expected + assert arrow_from.to(tz.tzutc()).datetime == self.expected + + # issue #368 + def test_to_pacific_then_utc(self): + result = arrow.Arrow(2018, 11, 4, 1, tzinfo="-08:00").to("US/Pacific").to("UTC") + assert result == arrow.Arrow(2018, 11, 4, 9) + + # issue #368 + def test_to_amsterdam_then_utc(self): + result = arrow.Arrow(2016, 10, 30).to("Europe/Amsterdam") + assert result.utcoffset() == timedelta(seconds=7200) + + # regression test for #690 + def test_to_israel_same_offset(self): + + result = arrow.Arrow(2019, 10, 27, 2, 21, 1, tzinfo="+03:00").to("Israel") + expected = arrow.Arrow(2019, 10, 27, 1, 21, 1, tzinfo="Israel") + + assert result == expected + assert result.utcoffset() != expected.utcoffset() + + # issue 315 + def test_anchorage_dst(self): + before = arrow.Arrow(2016, 3, 13, 1, 59, tzinfo="America/Anchorage") + after = arrow.Arrow(2016, 3, 13, 2, 1, tzinfo="America/Anchorage") + + assert before.utcoffset() != after.utcoffset() + + # issue 476 + def test_chicago_fall(self): + + result = arrow.Arrow(2017, 11, 5, 2, 1, tzinfo="-05:00").to("America/Chicago") + expected = arrow.Arrow(2017, 11, 5, 1, 1, tzinfo="America/Chicago") + + assert result == expected + assert result.utcoffset() != expected.utcoffset() + + def test_toronto_gap(self): + + before = arrow.Arrow(2011, 3, 13, 6, 30, tzinfo="UTC").to("America/Toronto") + after = arrow.Arrow(2011, 3, 13, 7, 30, tzinfo="UTC").to("America/Toronto") + + assert before.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 1, 30) + assert after.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 3, 30) + + assert before.utcoffset() != after.utcoffset() + + def test_sydney_gap(self): + + before = arrow.Arrow(2012, 10, 6, 15, 30, tzinfo="UTC").to("Australia/Sydney") + after = arrow.Arrow(2012, 10, 6, 16, 30, tzinfo="UTC").to("Australia/Sydney") + + assert before.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 1, 30) + assert after.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 3, 30) + + assert before.utcoffset() != after.utcoffset() + + +class TestArrowPickling: + def test_pickle_and_unpickle(self): + + dt = arrow.Arrow.utcnow() + + pickled = pickle.dumps(dt) + + unpickled = pickle.loads(pickled) + + assert unpickled == dt + + +class TestArrowReplace: + def test_not_attr(self): + + with pytest.raises(AttributeError): + arrow.Arrow.utcnow().replace(abc=1) + + def test_replace(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.replace(year=2012) == arrow.Arrow(2012, 5, 5, 12, 30, 45) + assert arw.replace(month=1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) + assert arw.replace(day=1) == arrow.Arrow(2013, 5, 1, 12, 30, 45) + assert arw.replace(hour=1) == arrow.Arrow(2013, 5, 5, 1, 30, 45) + assert arw.replace(minute=1) == arrow.Arrow(2013, 5, 5, 12, 1, 45) + assert arw.replace(second=1) == arrow.Arrow(2013, 5, 5, 12, 30, 1) + + def test_replace_tzinfo(self): + + arw = arrow.Arrow.utcnow().to("US/Eastern") + + result = arw.replace(tzinfo=tz.gettz("US/Pacific")) + + assert result == arw.datetime.replace(tzinfo=tz.gettz("US/Pacific")) + + def test_replace_fold(self): + + before = arrow.Arrow(2017, 11, 5, 1, tzinfo="America/New_York") + after = before.replace(fold=1) + + assert before.fold == 0 + assert after.fold == 1 + assert before == after + assert before.utcoffset() != after.utcoffset() + + def test_replace_fold_and_other(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.replace(fold=1, minute=50) == arrow.Arrow(2013, 5, 5, 12, 50, 45) + assert arw.replace(minute=50, fold=1) == arrow.Arrow(2013, 5, 5, 12, 50, 45) + + def test_replace_week(self): + + with pytest.raises(AttributeError): + arrow.Arrow.utcnow().replace(week=1) + + def test_replace_quarter(self): + + with pytest.raises(AttributeError): + arrow.Arrow.utcnow().replace(quarter=1) + + def test_replace_quarter_and_fold(self): + with pytest.raises(AttributeError): + arrow.utcnow().replace(fold=1, quarter=1) + + with pytest.raises(AttributeError): + arrow.utcnow().replace(quarter=1, fold=1) + + def test_replace_other_kwargs(self): + + with pytest.raises(AttributeError): + arrow.utcnow().replace(abc="def") + + +class TestArrowShift: + def test_not_attr(self): + + now = arrow.Arrow.utcnow() + + with pytest.raises(AttributeError): + now.shift(abc=1) + + with pytest.raises(AttributeError): + now.shift(week=1) + + def test_shift(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.shift(years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) + assert arw.shift(quarters=1) == arrow.Arrow(2013, 8, 5, 12, 30, 45) + assert arw.shift(quarters=1, months=1) == arrow.Arrow(2013, 9, 5, 12, 30, 45) + assert arw.shift(months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) + assert arw.shift(weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) + assert arw.shift(days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) + assert arw.shift(minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) + assert arw.shift(seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) + assert arw.shift(microseconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 45, 1) + + # Remember: Python's weekday 0 is Monday + assert arw.shift(weekday=0) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=1) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=2) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=3) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=4) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=5) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=6) == arw + + with pytest.raises(IndexError): + arw.shift(weekday=7) + + # Use dateutil.relativedelta's convenient day instances + assert arw.shift(weekday=MO) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=MO(0)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=MO(1)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=MO(2)) == arrow.Arrow(2013, 5, 13, 12, 30, 45) + assert arw.shift(weekday=TU) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=TU(0)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=TU(1)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=TU(2)) == arrow.Arrow(2013, 5, 14, 12, 30, 45) + assert arw.shift(weekday=WE) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=WE(0)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=WE(1)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=WE(2)) == arrow.Arrow(2013, 5, 15, 12, 30, 45) + assert arw.shift(weekday=TH) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=TH(0)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=TH(1)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=TH(2)) == arrow.Arrow(2013, 5, 16, 12, 30, 45) + assert arw.shift(weekday=FR) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=FR(0)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=FR(1)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=FR(2)) == arrow.Arrow(2013, 5, 17, 12, 30, 45) + assert arw.shift(weekday=SA) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=SA(0)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=SA(1)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=SA(2)) == arrow.Arrow(2013, 5, 18, 12, 30, 45) + assert arw.shift(weekday=SU) == arw + assert arw.shift(weekday=SU(0)) == arw + assert arw.shift(weekday=SU(1)) == arw + assert arw.shift(weekday=SU(2)) == arrow.Arrow(2013, 5, 12, 12, 30, 45) + + def test_shift_negative(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.shift(years=-1) == arrow.Arrow(2012, 5, 5, 12, 30, 45) + assert arw.shift(quarters=-1) == arrow.Arrow(2013, 2, 5, 12, 30, 45) + assert arw.shift(quarters=-1, months=-1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) + assert arw.shift(months=-1) == arrow.Arrow(2013, 4, 5, 12, 30, 45) + assert arw.shift(weeks=-1) == arrow.Arrow(2013, 4, 28, 12, 30, 45) + assert arw.shift(days=-1) == arrow.Arrow(2013, 5, 4, 12, 30, 45) + assert arw.shift(hours=-1) == arrow.Arrow(2013, 5, 5, 11, 30, 45) + assert arw.shift(minutes=-1) == arrow.Arrow(2013, 5, 5, 12, 29, 45) + assert arw.shift(seconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44) + assert arw.shift(microseconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44, 999999) + + # Not sure how practical these negative weekdays are + assert arw.shift(weekday=-1) == arw.shift(weekday=SU) + assert arw.shift(weekday=-2) == arw.shift(weekday=SA) + assert arw.shift(weekday=-3) == arw.shift(weekday=FR) + assert arw.shift(weekday=-4) == arw.shift(weekday=TH) + assert arw.shift(weekday=-5) == arw.shift(weekday=WE) + assert arw.shift(weekday=-6) == arw.shift(weekday=TU) + assert arw.shift(weekday=-7) == arw.shift(weekday=MO) + + with pytest.raises(IndexError): + arw.shift(weekday=-8) + + assert arw.shift(weekday=MO(-1)) == arrow.Arrow(2013, 4, 29, 12, 30, 45) + assert arw.shift(weekday=TU(-1)) == arrow.Arrow(2013, 4, 30, 12, 30, 45) + assert arw.shift(weekday=WE(-1)) == arrow.Arrow(2013, 5, 1, 12, 30, 45) + assert arw.shift(weekday=TH(-1)) == arrow.Arrow(2013, 5, 2, 12, 30, 45) + assert arw.shift(weekday=FR(-1)) == arrow.Arrow(2013, 5, 3, 12, 30, 45) + assert arw.shift(weekday=SA(-1)) == arrow.Arrow(2013, 5, 4, 12, 30, 45) + assert arw.shift(weekday=SU(-1)) == arw + assert arw.shift(weekday=SU(-2)) == arrow.Arrow(2013, 4, 28, 12, 30, 45) + + def test_shift_quarters_bug(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + # The value of the last-read argument was used instead of the ``quarters`` argument. + # Recall that the keyword argument dict, like all dicts, is unordered, so only certain + # combinations of arguments would exhibit this. + assert arw.shift(quarters=0, years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) + assert arw.shift(quarters=0, months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) + assert arw.shift(quarters=0, weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) + assert arw.shift(quarters=0, days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(quarters=0, hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) + assert arw.shift(quarters=0, minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) + assert arw.shift(quarters=0, seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) + assert arw.shift(quarters=0, microseconds=1) == arrow.Arrow( + 2013, 5, 5, 12, 30, 45, 1 + ) + + def test_shift_positive_imaginary(self): + + # Avoid shifting into imaginary datetimes, take into account DST and other timezone changes. + + new_york = arrow.Arrow(2017, 3, 12, 1, 30, tzinfo="America/New_York") + assert new_york.shift(hours=+1) == arrow.Arrow( + 2017, 3, 12, 3, 30, tzinfo="America/New_York" + ) + + # pendulum example + paris = arrow.Arrow(2013, 3, 31, 1, 50, tzinfo="Europe/Paris") + assert paris.shift(minutes=+20) == arrow.Arrow( + 2013, 3, 31, 3, 10, tzinfo="Europe/Paris" + ) + + canberra = arrow.Arrow(2018, 10, 7, 1, 30, tzinfo="Australia/Canberra") + assert canberra.shift(hours=+1) == arrow.Arrow( + 2018, 10, 7, 3, 30, tzinfo="Australia/Canberra" + ) + + kiev = arrow.Arrow(2018, 3, 25, 2, 30, tzinfo="Europe/Kiev") + assert kiev.shift(hours=+1) == arrow.Arrow( + 2018, 3, 25, 4, 30, tzinfo="Europe/Kiev" + ) + + # Edge case, the entire day of 2011-12-30 is imaginary in this zone! + apia = arrow.Arrow(2011, 12, 29, 23, tzinfo="Pacific/Apia") + assert apia.shift(hours=+2) == arrow.Arrow( + 2011, 12, 31, 1, tzinfo="Pacific/Apia" + ) + + def test_shift_negative_imaginary(self): + + new_york = arrow.Arrow(2011, 3, 13, 3, 30, tzinfo="America/New_York") + assert new_york.shift(hours=-1) == arrow.Arrow( + 2011, 3, 13, 3, 30, tzinfo="America/New_York" + ) + assert new_york.shift(hours=-2) == arrow.Arrow( + 2011, 3, 13, 1, 30, tzinfo="America/New_York" + ) + + london = arrow.Arrow(2019, 3, 31, 2, tzinfo="Europe/London") + assert london.shift(hours=-1) == arrow.Arrow( + 2019, 3, 31, 2, tzinfo="Europe/London" + ) + assert london.shift(hours=-2) == arrow.Arrow( + 2019, 3, 31, 0, tzinfo="Europe/London" + ) + + # edge case, crossing the international dateline + apia = arrow.Arrow(2011, 12, 31, 1, tzinfo="Pacific/Apia") + assert apia.shift(hours=-2) == arrow.Arrow( + 2011, 12, 31, 23, tzinfo="Pacific/Apia" + ) + + @pytest.mark.skipif( + dateutil.__version__ < "2.7.1", reason="old tz database (2018d needed)" + ) + def test_shift_kiritimati(self): + # corrected 2018d tz database release, will fail in earlier versions + + kiritimati = arrow.Arrow(1994, 12, 30, 12, 30, tzinfo="Pacific/Kiritimati") + assert kiritimati.shift(days=+1) == arrow.Arrow( + 1995, 1, 1, 12, 30, tzinfo="Pacific/Kiritimati" + ) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="unsupported before python 3.6" + ) + def shift_imaginary_seconds(self): + # offset has a seconds component + monrovia = arrow.Arrow(1972, 1, 6, 23, tzinfo="Africa/Monrovia") + assert monrovia.shift(hours=+1, minutes=+30) == arrow.Arrow( + 1972, 1, 7, 1, 14, 30, tzinfo="Africa/Monrovia" + ) + + +class TestArrowRange: + def test_year(self): + + result = list( + arrow.Arrow.range( + "year", datetime(2013, 1, 2, 3, 4, 5), datetime(2016, 4, 5, 6, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2014, 1, 2, 3, 4, 5), + arrow.Arrow(2015, 1, 2, 3, 4, 5), + arrow.Arrow(2016, 1, 2, 3, 4, 5), + ] + + def test_quarter(self): + + result = list( + arrow.Arrow.range( + "quarter", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) + ) + ) + + assert result == [ + arrow.Arrow(2013, 2, 3, 4, 5, 6), + arrow.Arrow(2013, 5, 3, 4, 5, 6), + ] + + def test_month(self): + + result = list( + arrow.Arrow.range( + "month", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) + ) + ) + + assert result == [ + arrow.Arrow(2013, 2, 3, 4, 5, 6), + arrow.Arrow(2013, 3, 3, 4, 5, 6), + arrow.Arrow(2013, 4, 3, 4, 5, 6), + arrow.Arrow(2013, 5, 3, 4, 5, 6), + ] + + def test_week(self): + + result = list( + arrow.Arrow.range( + "week", datetime(2013, 9, 1, 2, 3, 4), datetime(2013, 10, 1, 2, 3, 4) + ) + ) + + assert result == [ + arrow.Arrow(2013, 9, 1, 2, 3, 4), + arrow.Arrow(2013, 9, 8, 2, 3, 4), + arrow.Arrow(2013, 9, 15, 2, 3, 4), + arrow.Arrow(2013, 9, 22, 2, 3, 4), + arrow.Arrow(2013, 9, 29, 2, 3, 4), + ] + + def test_day(self): + + result = list( + arrow.Arrow.range( + "day", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 5, 6, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 3, 3, 4, 5), + arrow.Arrow(2013, 1, 4, 3, 4, 5), + arrow.Arrow(2013, 1, 5, 3, 4, 5), + ] + + def test_hour(self): + + result = list( + arrow.Arrow.range( + "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 6, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 2, 4, 4, 5), + arrow.Arrow(2013, 1, 2, 5, 4, 5), + arrow.Arrow(2013, 1, 2, 6, 4, 5), + ] + + result = list( + arrow.Arrow.range( + "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 5) + ) + ) + + assert result == [arrow.Arrow(2013, 1, 2, 3, 4, 5)] + + def test_minute(self): + + result = list( + arrow.Arrow.range( + "minute", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 2, 3, 5, 5), + arrow.Arrow(2013, 1, 2, 3, 6, 5), + arrow.Arrow(2013, 1, 2, 3, 7, 5), + ] + + def test_second(self): + + result = list( + arrow.Arrow.range( + "second", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 2, 3, 4, 6), + arrow.Arrow(2013, 1, 2, 3, 4, 7), + arrow.Arrow(2013, 1, 2, 3, 4, 8), + ] + + def test_arrow(self): + + result = list( + arrow.Arrow.range( + "day", + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 5, 6, 7, 8), + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 3, 3, 4, 5), + arrow.Arrow(2013, 1, 4, 3, 4, 5), + arrow.Arrow(2013, 1, 5, 3, 4, 5), + ] + + def test_naive_tz(self): + + result = arrow.Arrow.range( + "year", datetime(2013, 1, 2, 3), datetime(2016, 4, 5, 6), "US/Pacific" + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Pacific") + + def test_aware_same_tz(self): + + result = arrow.Arrow.range( + "day", + arrow.Arrow(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")), + arrow.Arrow(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Pacific") + + def test_aware_different_tz(self): + + result = arrow.Arrow.range( + "day", + datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), + datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Eastern") + + def test_aware_tz(self): + + result = arrow.Arrow.range( + "day", + datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), + datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), + tz=tz.gettz("US/Central"), + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Central") + + def test_imaginary(self): + # issue #72, avoid duplication in utc column + + before = arrow.Arrow(2018, 3, 10, 23, tzinfo="US/Pacific") + after = arrow.Arrow(2018, 3, 11, 4, tzinfo="US/Pacific") + + pacific_range = [t for t in arrow.Arrow.range("hour", before, after)] + utc_range = [t.to("utc") for t in arrow.Arrow.range("hour", before, after)] + + assert len(pacific_range) == len(set(pacific_range)) + assert len(utc_range) == len(set(utc_range)) + + def test_unsupported(self): + + with pytest.raises(AttributeError): + next(arrow.Arrow.range("abc", datetime.utcnow(), datetime.utcnow())) + + def test_range_over_months_ending_on_different_days(self): + # regression test for issue #842 + result = list(arrow.Arrow.range("month", datetime(2015, 1, 31), limit=4)) + assert result == [ + arrow.Arrow(2015, 1, 31), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 3, 31), + arrow.Arrow(2015, 4, 30), + ] + + result = list(arrow.Arrow.range("month", datetime(2015, 1, 30), limit=3)) + assert result == [ + arrow.Arrow(2015, 1, 30), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 3, 30), + ] + + result = list(arrow.Arrow.range("month", datetime(2015, 2, 28), limit=3)) + assert result == [ + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 3, 28), + arrow.Arrow(2015, 4, 28), + ] + + result = list(arrow.Arrow.range("month", datetime(2015, 3, 31), limit=3)) + assert result == [ + arrow.Arrow(2015, 3, 31), + arrow.Arrow(2015, 4, 30), + arrow.Arrow(2015, 5, 31), + ] + + def test_range_over_quarter_months_ending_on_different_days(self): + result = list(arrow.Arrow.range("quarter", datetime(2014, 11, 30), limit=3)) + assert result == [ + arrow.Arrow(2014, 11, 30), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 5, 30), + ] + + def test_range_over_year_maintains_end_date_across_leap_year(self): + result = list(arrow.Arrow.range("year", datetime(2012, 2, 29), limit=5)) + assert result == [ + arrow.Arrow(2012, 2, 29), + arrow.Arrow(2013, 2, 28), + arrow.Arrow(2014, 2, 28), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2016, 2, 29), + ] + + +class TestArrowSpanRange: + def test_year(self): + + result = list( + arrow.Arrow.span_range("year", datetime(2013, 2, 1), datetime(2016, 3, 31)) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1), + arrow.Arrow(2013, 12, 31, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2014, 1, 1), + arrow.Arrow(2014, 12, 31, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2015, 1, 1), + arrow.Arrow(2015, 12, 31, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2016, 1, 1), + arrow.Arrow(2016, 12, 31, 23, 59, 59, 999999), + ), + ] + + def test_quarter(self): + + result = list( + arrow.Arrow.span_range( + "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15) + ) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 6, 30, 23, 59, 59, 999999)), + ] + + def test_month(self): + + result = list( + arrow.Arrow.span_range("month", datetime(2013, 1, 2), datetime(2013, 4, 15)) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 1, 31, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 2, 1), arrow.Arrow(2013, 2, 28, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 3, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 4, 30, 23, 59, 59, 999999)), + ] + + def test_week(self): + + result = list( + arrow.Arrow.span_range("week", datetime(2013, 2, 2), datetime(2013, 2, 28)) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 28), arrow.Arrow(2013, 2, 3, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 2, 4), arrow.Arrow(2013, 2, 10, 23, 59, 59, 999999)), + ( + arrow.Arrow(2013, 2, 11), + arrow.Arrow(2013, 2, 17, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 2, 18), + arrow.Arrow(2013, 2, 24, 23, 59, 59, 999999), + ), + (arrow.Arrow(2013, 2, 25), arrow.Arrow(2013, 3, 3, 23, 59, 59, 999999)), + ] + + def test_day(self): + + result = list( + arrow.Arrow.span_range( + "day", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0), + arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 2, 0), + arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 3, 0), + arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 4, 0), + arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), + ), + ] + + def test_days(self): + + result = list( + arrow.Arrow.span_range( + "days", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0), + arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 2, 0), + arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 3, 0), + arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 4, 0), + arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), + ), + ] + + def test_hour(self): + + result = list( + arrow.Arrow.span_range( + "hour", datetime(2013, 1, 1, 0, 30), datetime(2013, 1, 1, 3, 30) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0), + arrow.Arrow(2013, 1, 1, 0, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 1), + arrow.Arrow(2013, 1, 1, 1, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 2), + arrow.Arrow(2013, 1, 1, 2, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 3), + arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999), + ), + ] + + result = list( + arrow.Arrow.span_range( + "hour", datetime(2013, 1, 1, 3, 30), datetime(2013, 1, 1, 3, 30) + ) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1, 3), arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999)) + ] + + def test_minute(self): + + result = list( + arrow.Arrow.span_range( + "minute", datetime(2013, 1, 1, 0, 0, 30), datetime(2013, 1, 1, 0, 3, 30) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0, 0), + arrow.Arrow(2013, 1, 1, 0, 0, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 1), + arrow.Arrow(2013, 1, 1, 0, 1, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 2), + arrow.Arrow(2013, 1, 1, 0, 2, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 3), + arrow.Arrow(2013, 1, 1, 0, 3, 59, 999999), + ), + ] + + def test_second(self): + + result = list( + arrow.Arrow.span_range( + "second", datetime(2013, 1, 1), datetime(2013, 1, 1, 0, 0, 3) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0, 0, 0), + arrow.Arrow(2013, 1, 1, 0, 0, 0, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 0, 1), + arrow.Arrow(2013, 1, 1, 0, 0, 1, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 0, 2), + arrow.Arrow(2013, 1, 1, 0, 0, 2, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 0, 3), + arrow.Arrow(2013, 1, 1, 0, 0, 3, 999999), + ), + ] + + def test_naive_tz(self): + + tzinfo = tz.gettz("US/Pacific") + + result = arrow.Arrow.span_range( + "hour", datetime(2013, 1, 1, 0), datetime(2013, 1, 1, 3, 59), "US/Pacific" + ) + + for f, c in result: + assert f.tzinfo == tzinfo + assert c.tzinfo == tzinfo + + def test_aware_same_tz(self): + + tzinfo = tz.gettz("US/Pacific") + + result = arrow.Arrow.span_range( + "hour", + datetime(2013, 1, 1, 0, tzinfo=tzinfo), + datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo), + ) + + for f, c in result: + assert f.tzinfo == tzinfo + assert c.tzinfo == tzinfo + + def test_aware_different_tz(self): + + tzinfo1 = tz.gettz("US/Pacific") + tzinfo2 = tz.gettz("US/Eastern") + + result = arrow.Arrow.span_range( + "hour", + datetime(2013, 1, 1, 0, tzinfo=tzinfo1), + datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo2), + ) + + for f, c in result: + assert f.tzinfo == tzinfo1 + assert c.tzinfo == tzinfo1 + + def test_aware_tz(self): + + result = arrow.Arrow.span_range( + "hour", + datetime(2013, 1, 1, 0, tzinfo=tz.gettz("US/Eastern")), + datetime(2013, 1, 1, 2, 59, tzinfo=tz.gettz("US/Eastern")), + tz="US/Central", + ) + + for f, c in result: + assert f.tzinfo == tz.gettz("US/Central") + assert c.tzinfo == tz.gettz("US/Central") + + def test_bounds_param_is_passed(self): + + result = list( + arrow.Arrow.span_range( + "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15), bounds="[]" + ) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 4, 1)), + (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 7, 1)), + ] + + +class TestArrowInterval: + def test_incorrect_input(self): + with pytest.raises(ValueError): + list( + arrow.Arrow.interval( + "month", datetime(2013, 1, 2), datetime(2013, 4, 15), 0 + ) + ) + + def test_correct(self): + result = list( + arrow.Arrow.interval( + "hour", datetime(2013, 5, 5, 12, 30), datetime(2013, 5, 5, 17, 15), 2 + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 5, 5, 12), + arrow.Arrow(2013, 5, 5, 13, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 5, 5, 14), + arrow.Arrow(2013, 5, 5, 15, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 5, 5, 16), + arrow.Arrow(2013, 5, 5, 17, 59, 59, 999999), + ), + ] + + def test_bounds_param_is_passed(self): + result = list( + arrow.Arrow.interval( + "hour", + datetime(2013, 5, 5, 12, 30), + datetime(2013, 5, 5, 17, 15), + 2, + bounds="[]", + ) + ) + + assert result == [ + (arrow.Arrow(2013, 5, 5, 12), arrow.Arrow(2013, 5, 5, 14)), + (arrow.Arrow(2013, 5, 5, 14), arrow.Arrow(2013, 5, 5, 16)), + (arrow.Arrow(2013, 5, 5, 16), arrow.Arrow(2013, 5, 5, 18)), + ] + + +@pytest.mark.usefixtures("time_2013_02_15") +class TestArrowSpan: + def test_span_attribute(self): + + with pytest.raises(AttributeError): + self.arrow.span("span") + + def test_span_year(self): + + floor, ceil = self.arrow.span("year") + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_quarter(self): + + floor, ceil = self.arrow.span("quarter") + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 3, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_quarter_count(self): + + floor, ceil = self.arrow.span("quarter", 2) + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 6, 30, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_year_count(self): + + floor, ceil = self.arrow.span("year", 2) + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2014, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_month(self): + + floor, ceil = self.arrow.span("month") + + assert floor == datetime(2013, 2, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 28, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_week(self): + + floor, ceil = self.arrow.span("week") + + assert floor == datetime(2013, 2, 11, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 17, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_day(self): + + floor, ceil = self.arrow.span("day") + + assert floor == datetime(2013, 2, 15, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_hour(self): + + floor, ceil = self.arrow.span("hour") + + assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_minute(self): + + floor, ceil = self.arrow.span("minute") + + assert floor == datetime(2013, 2, 15, 3, 41, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 41, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_second(self): + + floor, ceil = self.arrow.span("second") + + assert floor == datetime(2013, 2, 15, 3, 41, 22, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 41, 22, 999999, tzinfo=tz.tzutc()) + + def test_span_microsecond(self): + + floor, ceil = self.arrow.span("microsecond") + + assert floor == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) + + def test_floor(self): + + floor, ceil = self.arrow.span("month") + + assert floor == self.arrow.floor("month") + assert ceil == self.arrow.ceil("month") + + def test_span_inclusive_inclusive(self): + + floor, ceil = self.arrow.span("hour", bounds="[]") + + assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) + + def test_span_exclusive_inclusive(self): + + floor, ceil = self.arrow.span("hour", bounds="(]") + + assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) + + def test_span_exclusive_exclusive(self): + + floor, ceil = self.arrow.span("hour", bounds="()") + + assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_bounds_are_validated(self): + + with pytest.raises(ValueError): + floor, ceil = self.arrow.span("hour", bounds="][") + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowHumanize: + def test_granularity(self): + + assert self.now.humanize(granularity="second") == "just now" + + later1 = self.now.shift(seconds=1) + assert self.now.humanize(later1, granularity="second") == "just now" + assert later1.humanize(self.now, granularity="second") == "just now" + assert self.now.humanize(later1, granularity="minute") == "0 minutes ago" + assert later1.humanize(self.now, granularity="minute") == "in 0 minutes" + + later100 = self.now.shift(seconds=100) + assert self.now.humanize(later100, granularity="second") == "100 seconds ago" + assert later100.humanize(self.now, granularity="second") == "in 100 seconds" + assert self.now.humanize(later100, granularity="minute") == "a minute ago" + assert later100.humanize(self.now, granularity="minute") == "in a minute" + assert self.now.humanize(later100, granularity="hour") == "0 hours ago" + assert later100.humanize(self.now, granularity="hour") == "in 0 hours" + + later4000 = self.now.shift(seconds=4000) + assert self.now.humanize(later4000, granularity="minute") == "66 minutes ago" + assert later4000.humanize(self.now, granularity="minute") == "in 66 minutes" + assert self.now.humanize(later4000, granularity="hour") == "an hour ago" + assert later4000.humanize(self.now, granularity="hour") == "in an hour" + assert self.now.humanize(later4000, granularity="day") == "0 days ago" + assert later4000.humanize(self.now, granularity="day") == "in 0 days" + + later105 = self.now.shift(seconds=10 ** 5) + assert self.now.humanize(later105, granularity="hour") == "27 hours ago" + assert later105.humanize(self.now, granularity="hour") == "in 27 hours" + assert self.now.humanize(later105, granularity="day") == "a day ago" + assert later105.humanize(self.now, granularity="day") == "in a day" + assert self.now.humanize(later105, granularity="week") == "0 weeks ago" + assert later105.humanize(self.now, granularity="week") == "in 0 weeks" + assert self.now.humanize(later105, granularity="month") == "0 months ago" + assert later105.humanize(self.now, granularity="month") == "in 0 months" + assert self.now.humanize(later105, granularity=["month"]) == "0 months ago" + assert later105.humanize(self.now, granularity=["month"]) == "in 0 months" + + later106 = self.now.shift(seconds=3 * 10 ** 6) + assert self.now.humanize(later106, granularity="day") == "34 days ago" + assert later106.humanize(self.now, granularity="day") == "in 34 days" + assert self.now.humanize(later106, granularity="week") == "4 weeks ago" + assert later106.humanize(self.now, granularity="week") == "in 4 weeks" + assert self.now.humanize(later106, granularity="month") == "a month ago" + assert later106.humanize(self.now, granularity="month") == "in a month" + assert self.now.humanize(later106, granularity="year") == "0 years ago" + assert later106.humanize(self.now, granularity="year") == "in 0 years" + + later506 = self.now.shift(seconds=50 * 10 ** 6) + assert self.now.humanize(later506, granularity="week") == "82 weeks ago" + assert later506.humanize(self.now, granularity="week") == "in 82 weeks" + assert self.now.humanize(later506, granularity="month") == "18 months ago" + assert later506.humanize(self.now, granularity="month") == "in 18 months" + assert self.now.humanize(later506, granularity="year") == "a year ago" + assert later506.humanize(self.now, granularity="year") == "in a year" + + later108 = self.now.shift(seconds=10 ** 8) + assert self.now.humanize(later108, granularity="year") == "3 years ago" + assert later108.humanize(self.now, granularity="year") == "in 3 years" + + later108onlydistance = self.now.shift(seconds=10 ** 8) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity="year" + ) + == "3 years" + ) + assert ( + later108onlydistance.humanize( + self.now, only_distance=True, granularity="year" + ) + == "3 years" + ) + + with pytest.raises(AttributeError): + self.now.humanize(later108, granularity="years") + + def test_multiple_granularity(self): + assert self.now.humanize(granularity="second") == "just now" + assert self.now.humanize(granularity=["second"]) == "just now" + assert ( + self.now.humanize(granularity=["year", "month", "day", "hour", "second"]) + == "in 0 years 0 months 0 days 0 hours and 0 seconds" + ) + + later4000 = self.now.shift(seconds=4000) + assert ( + later4000.humanize(self.now, granularity=["hour", "minute"]) + == "in an hour and 6 minutes" + ) + assert ( + self.now.humanize(later4000, granularity=["hour", "minute"]) + == "an hour and 6 minutes ago" + ) + assert ( + later4000.humanize( + self.now, granularity=["hour", "minute"], only_distance=True + ) + == "an hour and 6 minutes" + ) + assert ( + later4000.humanize(self.now, granularity=["day", "hour", "minute"]) + == "in 0 days an hour and 6 minutes" + ) + assert ( + self.now.humanize(later4000, granularity=["day", "hour", "minute"]) + == "0 days an hour and 6 minutes ago" + ) + + later105 = self.now.shift(seconds=10 ** 5) + assert ( + self.now.humanize(later105, granularity=["hour", "day", "minute"]) + == "a day 3 hours and 46 minutes ago" + ) + with pytest.raises(AttributeError): + self.now.humanize(later105, granularity=["error", "second"]) + + later108onlydistance = self.now.shift(seconds=10 ** 8) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity=["year"] + ) + == "3 years" + ) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity=["month", "week"] + ) + == "37 months and 4 weeks" + ) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity=["year", "second"] + ) + == "3 years and 5327200 seconds" + ) + + one_min_one_sec_ago = self.now.shift(minutes=-1, seconds=-1) + assert ( + one_min_one_sec_ago.humanize(self.now, granularity=["minute", "second"]) + == "a minute and a second ago" + ) + + one_min_two_secs_ago = self.now.shift(minutes=-1, seconds=-2) + assert ( + one_min_two_secs_ago.humanize(self.now, granularity=["minute", "second"]) + == "a minute and 2 seconds ago" + ) + + def test_seconds(self): + + later = self.now.shift(seconds=10) + + # regression test for issue #727 + assert self.now.humanize(later) == "10 seconds ago" + assert later.humanize(self.now) == "in 10 seconds" + + assert self.now.humanize(later, only_distance=True) == "10 seconds" + assert later.humanize(self.now, only_distance=True) == "10 seconds" + + def test_minute(self): + + later = self.now.shift(minutes=1) + + assert self.now.humanize(later) == "a minute ago" + assert later.humanize(self.now) == "in a minute" + + assert self.now.humanize(later, only_distance=True) == "a minute" + assert later.humanize(self.now, only_distance=True) == "a minute" + + def test_minutes(self): + + later = self.now.shift(minutes=2) + + assert self.now.humanize(later) == "2 minutes ago" + assert later.humanize(self.now) == "in 2 minutes" + + assert self.now.humanize(later, only_distance=True) == "2 minutes" + assert later.humanize(self.now, only_distance=True) == "2 minutes" + + def test_hour(self): + + later = self.now.shift(hours=1) + + assert self.now.humanize(later) == "an hour ago" + assert later.humanize(self.now) == "in an hour" + + assert self.now.humanize(later, only_distance=True) == "an hour" + assert later.humanize(self.now, only_distance=True) == "an hour" + + def test_hours(self): + + later = self.now.shift(hours=2) + + assert self.now.humanize(later) == "2 hours ago" + assert later.humanize(self.now) == "in 2 hours" + + assert self.now.humanize(later, only_distance=True) == "2 hours" + assert later.humanize(self.now, only_distance=True) == "2 hours" + + def test_day(self): + + later = self.now.shift(days=1) + + assert self.now.humanize(later) == "a day ago" + assert later.humanize(self.now) == "in a day" + + # regression test for issue #697 + less_than_48_hours = self.now.shift( + days=1, hours=23, seconds=59, microseconds=999999 + ) + assert self.now.humanize(less_than_48_hours) == "a day ago" + assert less_than_48_hours.humanize(self.now) == "in a day" + + less_than_48_hours_date = less_than_48_hours._datetime.date() + with pytest.raises(TypeError): + # humanize other argument does not take raw datetime.date objects + self.now.humanize(less_than_48_hours_date) + + # convert from date to arrow object + less_than_48_hours_date = arrow.Arrow.fromdate(less_than_48_hours_date) + assert self.now.humanize(less_than_48_hours_date) == "a day ago" + assert less_than_48_hours_date.humanize(self.now) == "in a day" + + assert self.now.humanize(later, only_distance=True) == "a day" + assert later.humanize(self.now, only_distance=True) == "a day" + + def test_days(self): + + later = self.now.shift(days=2) + + assert self.now.humanize(later) == "2 days ago" + assert later.humanize(self.now) == "in 2 days" + + assert self.now.humanize(later, only_distance=True) == "2 days" + assert later.humanize(self.now, only_distance=True) == "2 days" + + # Regression tests for humanize bug referenced in issue 541 + later = self.now.shift(days=3) + assert later.humanize(self.now) == "in 3 days" + + later = self.now.shift(days=3, seconds=1) + assert later.humanize(self.now) == "in 3 days" + + later = self.now.shift(days=4) + assert later.humanize(self.now) == "in 4 days" + + def test_week(self): + + later = self.now.shift(weeks=1) + + assert self.now.humanize(later) == "a week ago" + assert later.humanize(self.now) == "in a week" + + assert self.now.humanize(later, only_distance=True) == "a week" + assert later.humanize(self.now, only_distance=True) == "a week" + + def test_weeks(self): + + later = self.now.shift(weeks=2) + + assert self.now.humanize(later) == "2 weeks ago" + assert later.humanize(self.now) == "in 2 weeks" + + assert self.now.humanize(later, only_distance=True) == "2 weeks" + assert later.humanize(self.now, only_distance=True) == "2 weeks" + + def test_month(self): + + later = self.now.shift(months=1) + + assert self.now.humanize(later) == "a month ago" + assert later.humanize(self.now) == "in a month" + + assert self.now.humanize(later, only_distance=True) == "a month" + assert later.humanize(self.now, only_distance=True) == "a month" + + def test_months(self): + + later = self.now.shift(months=2) + earlier = self.now.shift(months=-2) + + assert earlier.humanize(self.now) == "2 months ago" + assert later.humanize(self.now) == "in 2 months" + + assert self.now.humanize(later, only_distance=True) == "2 months" + assert later.humanize(self.now, only_distance=True) == "2 months" + + def test_year(self): + + later = self.now.shift(years=1) + + assert self.now.humanize(later) == "a year ago" + assert later.humanize(self.now) == "in a year" + + assert self.now.humanize(later, only_distance=True) == "a year" + assert later.humanize(self.now, only_distance=True) == "a year" + + def test_years(self): + + later = self.now.shift(years=2) + + assert self.now.humanize(later) == "2 years ago" + assert later.humanize(self.now) == "in 2 years" + + assert self.now.humanize(later, only_distance=True) == "2 years" + assert later.humanize(self.now, only_distance=True) == "2 years" + + arw = arrow.Arrow(2014, 7, 2) + + result = arw.humanize(self.datetime) + + assert result == "in 2 years" + + def test_arrow(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + result = arw.humanize(arrow.Arrow.fromdatetime(self.datetime)) + + assert result == "just now" + + def test_datetime_tzinfo(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + result = arw.humanize(self.datetime.replace(tzinfo=tz.tzutc())) + + assert result == "just now" + + def test_other(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + with pytest.raises(TypeError): + arw.humanize(object()) + + def test_invalid_locale(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + with pytest.raises(ValueError): + arw.humanize(locale="klingon") + + def test_none(self): + + arw = arrow.Arrow.utcnow() + + result = arw.humanize() + + assert result == "just now" + + result = arw.humanize(None) + + assert result == "just now" + + def test_untranslated_granularity(self, mocker): + + arw = arrow.Arrow.utcnow() + later = arw.shift(weeks=1) + + # simulate an untranslated timeframe key + mocker.patch.dict("arrow.locales.EnglishLocale.timeframes") + del arrow.locales.EnglishLocale.timeframes["week"] + with pytest.raises(ValueError): + arw.humanize(later, granularity="week") + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowHumanizeTestsWithLocale: + def test_now(self): + + arw = arrow.Arrow(2013, 1, 1, 0, 0, 0) + + result = arw.humanize(self.datetime, locale="ru") + + assert result == "сейчас" + + def test_seconds(self): + arw = arrow.Arrow(2013, 1, 1, 0, 0, 44) + + result = arw.humanize(self.datetime, locale="ru") + + assert result == "через 44 несколько секунд" + + def test_years(self): + + arw = arrow.Arrow(2011, 7, 2) + + result = arw.humanize(self.datetime, locale="ru") + + assert result == "2 года назад" + + +class TestArrowIsBetween: + def test_start_before_end(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + result = target.is_between(start, end) + assert not result + + def test_exclusive_exclusive_bounds(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 27)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 10)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 36)) + result = target.is_between(start, end, "()") + assert result + result = target.is_between(start, end) + assert result + + def test_exclusive_exclusive_bounds_same_date(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + result = target.is_between(start, end, "()") + assert not result + + def test_inclusive_exclusive_bounds(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 4)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) + result = target.is_between(start, end, "[)") + assert not result + + def test_exclusive_inclusive_bounds(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + result = target.is_between(start, end, "(]") + assert result + + def test_inclusive_inclusive_bounds_same_date(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + result = target.is_between(start, end, "[]") + assert result + + def test_type_error_exception(self): + with pytest.raises(TypeError): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = datetime(2013, 5, 5) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) + target.is_between(start, end) + + with pytest.raises(TypeError): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + end = datetime(2013, 5, 8) + target.is_between(start, end) + + with pytest.raises(TypeError): + target.is_between(None, None) + + def test_value_error_exception(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) + with pytest.raises(ValueError): + target.is_between(start, end, "][") + with pytest.raises(ValueError): + target.is_between(start, end, "") + with pytest.raises(ValueError): + target.is_between(start, end, "]") + with pytest.raises(ValueError): + target.is_between(start, end, "[") + with pytest.raises(ValueError): + target.is_between(start, end, "hello") + + +class TestArrowUtil: + def test_get_datetime(self): + + get_datetime = arrow.Arrow._get_datetime + + arw = arrow.Arrow.utcnow() + dt = datetime.utcnow() + timestamp = time.time() + + assert get_datetime(arw) == arw.datetime + assert get_datetime(dt) == dt + assert ( + get_datetime(timestamp) == arrow.Arrow.utcfromtimestamp(timestamp).datetime + ) + + with pytest.raises(ValueError) as raise_ctx: + get_datetime("abc") + assert "not recognized as a datetime or timestamp" in str(raise_ctx.value) + + def test_get_tzinfo(self): + + get_tzinfo = arrow.Arrow._get_tzinfo + + with pytest.raises(ValueError) as raise_ctx: + get_tzinfo("abc") + assert "not recognized as a timezone" in str(raise_ctx.value) + + def test_get_iteration_params(self): + + assert arrow.Arrow._get_iteration_params("end", None) == ("end", sys.maxsize) + assert arrow.Arrow._get_iteration_params(None, 100) == (arrow.Arrow.max, 100) + assert arrow.Arrow._get_iteration_params(100, 120) == (100, 120) + + with pytest.raises(ValueError): + arrow.Arrow._get_iteration_params(None, None) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py new file mode 100644 index 00000000000..2b8df5168ff --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +import time +from datetime import date, datetime + +import pytest +from dateutil import tz + +from arrow.parser import ParserError + +from .utils import assert_datetime_equality + + +@pytest.mark.usefixtures("arrow_factory") +class TestGet: + def test_no_args(self): + + assert_datetime_equality( + self.factory.get(), datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + def test_timestamp_one_arg_no_arg(self): + + no_arg = self.factory.get(1406430900).timestamp + one_arg = self.factory.get("1406430900", "X").timestamp + + assert no_arg == one_arg + + def test_one_arg_none(self): + + assert_datetime_equality( + self.factory.get(None), datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + def test_struct_time(self): + + assert_datetime_equality( + self.factory.get(time.gmtime()), + datetime.utcnow().replace(tzinfo=tz.tzutc()), + ) + + def test_one_arg_timestamp(self): + + int_timestamp = int(time.time()) + timestamp_dt = datetime.utcfromtimestamp(int_timestamp).replace( + tzinfo=tz.tzutc() + ) + + assert self.factory.get(int_timestamp) == timestamp_dt + + with pytest.raises(ParserError): + self.factory.get(str(int_timestamp)) + + float_timestamp = time.time() + timestamp_dt = datetime.utcfromtimestamp(float_timestamp).replace( + tzinfo=tz.tzutc() + ) + + assert self.factory.get(float_timestamp) == timestamp_dt + + with pytest.raises(ParserError): + self.factory.get(str(float_timestamp)) + + # Regression test for issue #216 + # Python 3 raises OverflowError, Python 2 raises ValueError + timestamp = 99999999999999999999999999.99999999999999999999999999 + with pytest.raises((OverflowError, ValueError)): + self.factory.get(timestamp) + + def test_one_arg_expanded_timestamp(self): + + millisecond_timestamp = 1591328104308 + microsecond_timestamp = 1591328104308505 + + # Regression test for issue #796 + assert self.factory.get(millisecond_timestamp) == datetime.utcfromtimestamp( + 1591328104.308 + ).replace(tzinfo=tz.tzutc()) + assert self.factory.get(microsecond_timestamp) == datetime.utcfromtimestamp( + 1591328104.308505 + ).replace(tzinfo=tz.tzutc()) + + def test_one_arg_timestamp_with_tzinfo(self): + + timestamp = time.time() + timestamp_dt = datetime.fromtimestamp(timestamp, tz=tz.tzutc()).astimezone( + tz.gettz("US/Pacific") + ) + timezone = tz.gettz("US/Pacific") + + assert_datetime_equality( + self.factory.get(timestamp, tzinfo=timezone), timestamp_dt + ) + + def test_one_arg_arrow(self): + + arw = self.factory.utcnow() + result = self.factory.get(arw) + + assert arw == result + + def test_one_arg_datetime(self): + + dt = datetime.utcnow().replace(tzinfo=tz.tzutc()) + + assert self.factory.get(dt) == dt + + def test_one_arg_date(self): + + d = date.today() + dt = datetime(d.year, d.month, d.day, tzinfo=tz.tzutc()) + + assert self.factory.get(d) == dt + + def test_one_arg_tzinfo(self): + + self.expected = ( + datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tz.gettz("US/Pacific")) + ) + + assert_datetime_equality( + self.factory.get(tz.gettz("US/Pacific")), self.expected + ) + + # regression test for issue #658 + def test_one_arg_dateparser_datetime(self): + dateparser = pytest.importorskip("dateparser") + expected = datetime(1990, 1, 1).replace(tzinfo=tz.tzutc()) + # dateparser outputs: datetime.datetime(1990, 1, 1, 0, 0, tzinfo=) + parsed_date = dateparser.parse("1990-01-01T00:00:00+00:00") + dt_output = self.factory.get(parsed_date)._datetime.replace(tzinfo=tz.tzutc()) + assert dt_output == expected + + def test_kwarg_tzinfo(self): + + self.expected = ( + datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tz.gettz("US/Pacific")) + ) + + assert_datetime_equality( + self.factory.get(tzinfo=tz.gettz("US/Pacific")), self.expected + ) + + def test_kwarg_tzinfo_string(self): + + self.expected = ( + datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tz.gettz("US/Pacific")) + ) + + assert_datetime_equality(self.factory.get(tzinfo="US/Pacific"), self.expected) + + with pytest.raises(ParserError): + self.factory.get(tzinfo="US/PacificInvalidTzinfo") + + def test_kwarg_normalize_whitespace(self): + result = self.factory.get( + "Jun 1 2005 1:33PM", + "MMM D YYYY H:mmA", + tzinfo=tz.tzutc(), + normalize_whitespace=True, + ) + assert result._datetime == datetime(2005, 6, 1, 13, 33, tzinfo=tz.tzutc()) + + result = self.factory.get( + "\t 2013-05-05T12:30:45.123456 \t \n", + tzinfo=tz.tzutc(), + normalize_whitespace=True, + ) + assert result._datetime == datetime( + 2013, 5, 5, 12, 30, 45, 123456, tzinfo=tz.tzutc() + ) + + def test_one_arg_iso_str(self): + + dt = datetime.utcnow() + + assert_datetime_equality( + self.factory.get(dt.isoformat()), dt.replace(tzinfo=tz.tzutc()) + ) + + def test_one_arg_iso_calendar(self): + + pairs = [ + (datetime(2004, 1, 4), (2004, 1, 7)), + (datetime(2008, 12, 30), (2009, 1, 2)), + (datetime(2010, 1, 2), (2009, 53, 6)), + (datetime(2000, 2, 29), (2000, 9, 2)), + (datetime(2005, 1, 1), (2004, 53, 6)), + (datetime(2010, 1, 4), (2010, 1, 1)), + (datetime(2010, 1, 3), (2009, 53, 7)), + (datetime(2003, 12, 29), (2004, 1, 1)), + ] + + for pair in pairs: + dt, iso = pair + assert self.factory.get(iso) == self.factory.get(dt) + + with pytest.raises(TypeError): + self.factory.get((2014, 7, 1, 4)) + + with pytest.raises(TypeError): + self.factory.get((2014, 7)) + + with pytest.raises(ValueError): + self.factory.get((2014, 70, 1)) + + with pytest.raises(ValueError): + self.factory.get((2014, 7, 10)) + + def test_one_arg_other(self): + + with pytest.raises(TypeError): + self.factory.get(object()) + + def test_one_arg_bool(self): + + with pytest.raises(TypeError): + self.factory.get(False) + + with pytest.raises(TypeError): + self.factory.get(True) + + def test_two_args_datetime_tzinfo(self): + + result = self.factory.get(datetime(2013, 1, 1), tz.gettz("US/Pacific")) + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_datetime_tz_str(self): + + result = self.factory.get(datetime(2013, 1, 1), "US/Pacific") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_date_tzinfo(self): + + result = self.factory.get(date(2013, 1, 1), tz.gettz("US/Pacific")) + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_date_tz_str(self): + + result = self.factory.get(date(2013, 1, 1), "US/Pacific") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_datetime_other(self): + + with pytest.raises(TypeError): + self.factory.get(datetime.utcnow(), object()) + + def test_two_args_date_other(self): + + with pytest.raises(TypeError): + self.factory.get(date.today(), object()) + + def test_two_args_str_str(self): + + result = self.factory.get("2013-01-01", "YYYY-MM-DD") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_two_args_str_tzinfo(self): + + result = self.factory.get("2013-01-01", tzinfo=tz.gettz("US/Pacific")) + + assert_datetime_equality( + result._datetime, datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + ) + + def test_two_args_twitter_format(self): + + # format returned by twitter API for created_at: + twitter_date = "Fri Apr 08 21:08:54 +0000 2016" + result = self.factory.get(twitter_date, "ddd MMM DD HH:mm:ss Z YYYY") + + assert result._datetime == datetime(2016, 4, 8, 21, 8, 54, tzinfo=tz.tzutc()) + + def test_two_args_str_list(self): + + result = self.factory.get("2013-01-01", ["MM/DD/YYYY", "YYYY-MM-DD"]) + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_two_args_unicode_unicode(self): + + result = self.factory.get(u"2013-01-01", u"YYYY-MM-DD") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_two_args_other(self): + + with pytest.raises(TypeError): + self.factory.get(object(), object()) + + def test_three_args_with_tzinfo(self): + + timefmt = "YYYYMMDD" + d = "20150514" + + assert self.factory.get(d, timefmt, tzinfo=tz.tzlocal()) == datetime( + 2015, 5, 14, tzinfo=tz.tzlocal() + ) + + def test_three_args(self): + + assert self.factory.get(2013, 1, 1) == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_full_kwargs(self): + + assert ( + self.factory.get( + year=2016, + month=7, + day=14, + hour=7, + minute=16, + second=45, + microsecond=631092, + ) + == datetime(2016, 7, 14, 7, 16, 45, 631092, tzinfo=tz.tzutc()) + ) + + def test_three_kwargs(self): + + assert self.factory.get(year=2016, month=7, day=14) == datetime( + 2016, 7, 14, 0, 0, tzinfo=tz.tzutc() + ) + + def test_tzinfo_string_kwargs(self): + result = self.factory.get("2019072807", "YYYYMMDDHH", tzinfo="UTC") + assert result._datetime == datetime(2019, 7, 28, 7, 0, 0, 0, tzinfo=tz.tzutc()) + + def test_insufficient_kwargs(self): + + with pytest.raises(TypeError): + self.factory.get(year=2016) + + with pytest.raises(TypeError): + self.factory.get(year=2016, month=7) + + def test_locale(self): + result = self.factory.get("2010", "YYYY", locale="ja") + assert result._datetime == datetime(2010, 1, 1, 0, 0, 0, 0, tzinfo=tz.tzutc()) + + # regression test for issue #701 + result = self.factory.get( + "Montag, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY", locale="de" + ) + assert result._datetime == datetime(2019, 9, 9, 0, 0, 0, 0, tzinfo=tz.tzutc()) + + def test_locale_kwarg_only(self): + res = self.factory.get(locale="ja") + assert res.tzinfo == tz.tzutc() + + def test_locale_with_tzinfo(self): + res = self.factory.get(locale="ja", tzinfo=tz.gettz("Asia/Tokyo")) + assert res.tzinfo == tz.gettz("Asia/Tokyo") + + +@pytest.mark.usefixtures("arrow_factory") +class TestUtcNow: + def test_utcnow(self): + + assert_datetime_equality( + self.factory.utcnow()._datetime, + datetime.utcnow().replace(tzinfo=tz.tzutc()), + ) + + +@pytest.mark.usefixtures("arrow_factory") +class TestNow: + def test_no_tz(self): + + assert_datetime_equality(self.factory.now(), datetime.now(tz.tzlocal())) + + def test_tzinfo(self): + + assert_datetime_equality( + self.factory.now(tz.gettz("EST")), datetime.now(tz.gettz("EST")) + ) + + def test_tz_str(self): + + assert_datetime_equality(self.factory.now("EST"), datetime.now(tz.gettz("EST"))) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py new file mode 100644 index 00000000000..e97aeb5dcc7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +from datetime import datetime + +import pytest +import pytz +from dateutil import tz as dateutil_tz + +from arrow import ( + FORMAT_ATOM, + FORMAT_COOKIE, + FORMAT_RFC822, + FORMAT_RFC850, + FORMAT_RFC1036, + FORMAT_RFC1123, + FORMAT_RFC2822, + FORMAT_RFC3339, + FORMAT_RSS, + FORMAT_W3C, +) + +from .utils import make_full_tz_list + + +@pytest.mark.usefixtures("arrow_formatter") +class TestFormatterFormatToken: + def test_format(self): + + dt = datetime(2013, 2, 5, 12, 32, 51) + + result = self.formatter.format(dt, "MM-DD-YYYY hh:mm:ss a") + + assert result == "02-05-2013 12:32:51 pm" + + def test_year(self): + + dt = datetime(2013, 1, 1) + assert self.formatter._format_token(dt, "YYYY") == "2013" + assert self.formatter._format_token(dt, "YY") == "13" + + def test_month(self): + + dt = datetime(2013, 1, 1) + assert self.formatter._format_token(dt, "MMMM") == "January" + assert self.formatter._format_token(dt, "MMM") == "Jan" + assert self.formatter._format_token(dt, "MM") == "01" + assert self.formatter._format_token(dt, "M") == "1" + + def test_day(self): + + dt = datetime(2013, 2, 1) + assert self.formatter._format_token(dt, "DDDD") == "032" + assert self.formatter._format_token(dt, "DDD") == "32" + assert self.formatter._format_token(dt, "DD") == "01" + assert self.formatter._format_token(dt, "D") == "1" + assert self.formatter._format_token(dt, "Do") == "1st" + + assert self.formatter._format_token(dt, "dddd") == "Friday" + assert self.formatter._format_token(dt, "ddd") == "Fri" + assert self.formatter._format_token(dt, "d") == "5" + + def test_hour(self): + + dt = datetime(2013, 1, 1, 2) + assert self.formatter._format_token(dt, "HH") == "02" + assert self.formatter._format_token(dt, "H") == "2" + + dt = datetime(2013, 1, 1, 13) + assert self.formatter._format_token(dt, "HH") == "13" + assert self.formatter._format_token(dt, "H") == "13" + + dt = datetime(2013, 1, 1, 2) + assert self.formatter._format_token(dt, "hh") == "02" + assert self.formatter._format_token(dt, "h") == "2" + + dt = datetime(2013, 1, 1, 13) + assert self.formatter._format_token(dt, "hh") == "01" + assert self.formatter._format_token(dt, "h") == "1" + + # test that 12-hour time converts to '12' at midnight + dt = datetime(2013, 1, 1, 0) + assert self.formatter._format_token(dt, "hh") == "12" + assert self.formatter._format_token(dt, "h") == "12" + + def test_minute(self): + + dt = datetime(2013, 1, 1, 0, 1) + assert self.formatter._format_token(dt, "mm") == "01" + assert self.formatter._format_token(dt, "m") == "1" + + def test_second(self): + + dt = datetime(2013, 1, 1, 0, 0, 1) + assert self.formatter._format_token(dt, "ss") == "01" + assert self.formatter._format_token(dt, "s") == "1" + + def test_sub_second(self): + + dt = datetime(2013, 1, 1, 0, 0, 0, 123456) + assert self.formatter._format_token(dt, "SSSSSS") == "123456" + assert self.formatter._format_token(dt, "SSSSS") == "12345" + assert self.formatter._format_token(dt, "SSSS") == "1234" + assert self.formatter._format_token(dt, "SSS") == "123" + assert self.formatter._format_token(dt, "SS") == "12" + assert self.formatter._format_token(dt, "S") == "1" + + dt = datetime(2013, 1, 1, 0, 0, 0, 2000) + assert self.formatter._format_token(dt, "SSSSSS") == "002000" + assert self.formatter._format_token(dt, "SSSSS") == "00200" + assert self.formatter._format_token(dt, "SSSS") == "0020" + assert self.formatter._format_token(dt, "SSS") == "002" + assert self.formatter._format_token(dt, "SS") == "00" + assert self.formatter._format_token(dt, "S") == "0" + + def test_timestamp(self): + + timestamp = 1588437009.8952794 + dt = datetime.utcfromtimestamp(timestamp) + expected = str(int(timestamp)) + assert self.formatter._format_token(dt, "X") == expected + + # Must round because time.time() may return a float with greater + # than 6 digits of precision + expected = str(int(timestamp * 1000000)) + assert self.formatter._format_token(dt, "x") == expected + + def test_timezone(self): + + dt = datetime.utcnow().replace(tzinfo=dateutil_tz.gettz("US/Pacific")) + + result = self.formatter._format_token(dt, "ZZ") + assert result == "-07:00" or result == "-08:00" + + result = self.formatter._format_token(dt, "Z") + assert result == "-0700" or result == "-0800" + + @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) + def test_timezone_formatter(self, full_tz_name): + + # This test will fail if we use "now" as date as soon as we change from/to DST + dt = datetime(1986, 2, 14, tzinfo=pytz.timezone("UTC")).replace( + tzinfo=dateutil_tz.gettz(full_tz_name) + ) + abbreviation = dt.tzname() + + result = self.formatter._format_token(dt, "ZZZ") + assert result == abbreviation + + def test_am_pm(self): + + dt = datetime(2012, 1, 1, 11) + assert self.formatter._format_token(dt, "a") == "am" + assert self.formatter._format_token(dt, "A") == "AM" + + dt = datetime(2012, 1, 1, 13) + assert self.formatter._format_token(dt, "a") == "pm" + assert self.formatter._format_token(dt, "A") == "PM" + + def test_week(self): + dt = datetime(2017, 5, 19) + assert self.formatter._format_token(dt, "W") == "2017-W20-5" + + # make sure week is zero padded when needed + dt_early = datetime(2011, 1, 20) + assert self.formatter._format_token(dt_early, "W") == "2011-W03-4" + + def test_nonsense(self): + dt = datetime(2012, 1, 1, 11) + assert self.formatter._format_token(dt, None) is None + assert self.formatter._format_token(dt, "NONSENSE") is None + + def test_escape(self): + + assert ( + self.formatter.format( + datetime(2015, 12, 10, 17, 9), "MMMM D, YYYY [at] h:mma" + ) + == "December 10, 2015 at 5:09pm" + ) + + assert ( + self.formatter.format( + datetime(2015, 12, 10, 17, 9), "[MMMM] M D, YYYY [at] h:mma" + ) + == "MMMM 12 10, 2015 at 5:09pm" + ) + + assert ( + self.formatter.format( + datetime(1990, 11, 25), + "[It happened on] MMMM Do [in the year] YYYY [a long time ago]", + ) + == "It happened on November 25th in the year 1990 a long time ago" + ) + + assert ( + self.formatter.format( + datetime(1990, 11, 25), + "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]", + ) + == "It happened on November 25th in the year 1990 a long time ago" + ) + + assert ( + self.formatter.format( + datetime(1, 1, 1), "[I'm][ entirely][ escaped,][ weee!]" + ) + == "I'm entirely escaped, weee!" + ) + + # Special RegEx characters + assert ( + self.formatter.format( + datetime(2017, 12, 31, 2, 0), "MMM DD, YYYY |^${}().*+?<>-& h:mm A" + ) + == "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM" + ) + + # Escaping is atomic: brackets inside brackets are treated literally + assert self.formatter.format(datetime(1, 1, 1), "[[[ ]]") == "[[ ]" + + +@pytest.mark.usefixtures("arrow_formatter", "time_1975_12_25") +class TestFormatterBuiltinFormats: + def test_atom(self): + assert ( + self.formatter.format(self.datetime, FORMAT_ATOM) + == "1975-12-25 14:15:16-05:00" + ) + + def test_cookie(self): + assert ( + self.formatter.format(self.datetime, FORMAT_COOKIE) + == "Thursday, 25-Dec-1975 14:15:16 EST" + ) + + def test_rfc_822(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC822) + == "Thu, 25 Dec 75 14:15:16 -0500" + ) + + def test_rfc_850(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC850) + == "Thursday, 25-Dec-75 14:15:16 EST" + ) + + def test_rfc_1036(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC1036) + == "Thu, 25 Dec 75 14:15:16 -0500" + ) + + def test_rfc_1123(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC1123) + == "Thu, 25 Dec 1975 14:15:16 -0500" + ) + + def test_rfc_2822(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC2822) + == "Thu, 25 Dec 1975 14:15:16 -0500" + ) + + def test_rfc3339(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC3339) + == "1975-12-25 14:15:16-05:00" + ) + + def test_rss(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RSS) + == "Thu, 25 Dec 1975 14:15:16 -0500" + ) + + def test_w3c(self): + assert ( + self.formatter.format(self.datetime, FORMAT_W3C) + == "1975-12-25 14:15:16-05:00" + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py new file mode 100644 index 00000000000..006ccdd5bac --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py @@ -0,0 +1,1352 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import pytest + +from arrow import arrow, locales + + +@pytest.mark.usefixtures("lang_locales") +class TestLocaleValidation: + """Validate locales to ensure that translations are valid and complete""" + + def test_locale_validation(self): + + for _, locale_cls in self.locales.items(): + # 7 days + 1 spacer to allow for 1-indexing of months + assert len(locale_cls.day_names) == 8 + assert locale_cls.day_names[0] == "" + # ensure that all string from index 1 onward are valid (not blank or None) + assert all(locale_cls.day_names[1:]) + + assert len(locale_cls.day_abbreviations) == 8 + assert locale_cls.day_abbreviations[0] == "" + assert all(locale_cls.day_abbreviations[1:]) + + # 12 months + 1 spacer to allow for 1-indexing of months + assert len(locale_cls.month_names) == 13 + assert locale_cls.month_names[0] == "" + assert all(locale_cls.month_names[1:]) + + assert len(locale_cls.month_abbreviations) == 13 + assert locale_cls.month_abbreviations[0] == "" + assert all(locale_cls.month_abbreviations[1:]) + + assert len(locale_cls.names) > 0 + assert locale_cls.past is not None + assert locale_cls.future is not None + + +class TestModule: + def test_get_locale(self, mocker): + mock_locale = mocker.Mock() + mock_locale_cls = mocker.Mock() + mock_locale_cls.return_value = mock_locale + + with pytest.raises(ValueError): + arrow.locales.get_locale("locale_name") + + cls_dict = arrow.locales._locales + mocker.patch.dict(cls_dict, {"locale_name": mock_locale_cls}) + + result = arrow.locales.get_locale("locale_name") + + assert result == mock_locale + + def test_get_locale_by_class_name(self, mocker): + mock_locale_cls = mocker.Mock() + mock_locale_obj = mock_locale_cls.return_value = mocker.Mock() + + globals_fn = mocker.Mock() + globals_fn.return_value = {"NonExistentLocale": mock_locale_cls} + + with pytest.raises(ValueError): + arrow.locales.get_locale_by_class_name("NonExistentLocale") + + mocker.patch.object(locales, "globals", globals_fn) + result = arrow.locales.get_locale_by_class_name("NonExistentLocale") + + mock_locale_cls.assert_called_once_with() + assert result == mock_locale_obj + + def test_locales(self): + + assert len(locales._locales) > 0 + + +@pytest.mark.usefixtures("lang_locale") +class TestEnglishLocale: + def test_describe(self): + assert self.locale.describe("now", only_distance=True) == "instantly" + assert self.locale.describe("now", only_distance=False) == "just now" + + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 hours" + assert self.locale._format_timeframe("hour", 0) == "an hour" + + def test_format_relative_now(self): + + result = self.locale._format_relative("just now", "now", 0) + + assert result == "just now" + + def test_format_relative_past(self): + + result = self.locale._format_relative("an hour", "hour", 1) + + assert result == "in an hour" + + def test_format_relative_future(self): + + result = self.locale._format_relative("an hour", "hour", -1) + + assert result == "an hour ago" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(0) == "0th" + assert self.locale.ordinal_number(1) == "1st" + assert self.locale.ordinal_number(2) == "2nd" + assert self.locale.ordinal_number(3) == "3rd" + assert self.locale.ordinal_number(4) == "4th" + assert self.locale.ordinal_number(10) == "10th" + assert self.locale.ordinal_number(11) == "11th" + assert self.locale.ordinal_number(12) == "12th" + assert self.locale.ordinal_number(13) == "13th" + assert self.locale.ordinal_number(14) == "14th" + assert self.locale.ordinal_number(21) == "21st" + assert self.locale.ordinal_number(22) == "22nd" + assert self.locale.ordinal_number(23) == "23rd" + assert self.locale.ordinal_number(24) == "24th" + + assert self.locale.ordinal_number(100) == "100th" + assert self.locale.ordinal_number(101) == "101st" + assert self.locale.ordinal_number(102) == "102nd" + assert self.locale.ordinal_number(103) == "103rd" + assert self.locale.ordinal_number(104) == "104th" + assert self.locale.ordinal_number(110) == "110th" + assert self.locale.ordinal_number(111) == "111th" + assert self.locale.ordinal_number(112) == "112th" + assert self.locale.ordinal_number(113) == "113th" + assert self.locale.ordinal_number(114) == "114th" + assert self.locale.ordinal_number(121) == "121st" + assert self.locale.ordinal_number(122) == "122nd" + assert self.locale.ordinal_number(123) == "123rd" + assert self.locale.ordinal_number(124) == "124th" + + def test_meridian_invalid_token(self): + assert self.locale.meridian(7, None) is None + assert self.locale.meridian(7, "B") is None + assert self.locale.meridian(7, "NONSENSE") is None + + +@pytest.mark.usefixtures("lang_locale") +class TestItalianLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1º" + + +@pytest.mark.usefixtures("lang_locale") +class TestSpanishLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1º" + + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "ahora" + assert self.locale._format_timeframe("seconds", 1) == "1 segundos" + assert self.locale._format_timeframe("seconds", 3) == "3 segundos" + assert self.locale._format_timeframe("seconds", 30) == "30 segundos" + assert self.locale._format_timeframe("minute", 1) == "un minuto" + assert self.locale._format_timeframe("minutes", 4) == "4 minutos" + assert self.locale._format_timeframe("minutes", 40) == "40 minutos" + assert self.locale._format_timeframe("hour", 1) == "una hora" + assert self.locale._format_timeframe("hours", 5) == "5 horas" + assert self.locale._format_timeframe("hours", 23) == "23 horas" + assert self.locale._format_timeframe("day", 1) == "un día" + assert self.locale._format_timeframe("days", 6) == "6 días" + assert self.locale._format_timeframe("days", 12) == "12 días" + assert self.locale._format_timeframe("week", 1) == "una semana" + assert self.locale._format_timeframe("weeks", 2) == "2 semanas" + assert self.locale._format_timeframe("weeks", 3) == "3 semanas" + assert self.locale._format_timeframe("month", 1) == "un mes" + assert self.locale._format_timeframe("months", 7) == "7 meses" + assert self.locale._format_timeframe("months", 11) == "11 meses" + assert self.locale._format_timeframe("year", 1) == "un año" + assert self.locale._format_timeframe("years", 8) == "8 años" + assert self.locale._format_timeframe("years", 12) == "12 años" + + assert self.locale._format_timeframe("now", 0) == "ahora" + assert self.locale._format_timeframe("seconds", -1) == "1 segundos" + assert self.locale._format_timeframe("seconds", -9) == "9 segundos" + assert self.locale._format_timeframe("seconds", -12) == "12 segundos" + assert self.locale._format_timeframe("minute", -1) == "un minuto" + assert self.locale._format_timeframe("minutes", -2) == "2 minutos" + assert self.locale._format_timeframe("minutes", -10) == "10 minutos" + assert self.locale._format_timeframe("hour", -1) == "una hora" + assert self.locale._format_timeframe("hours", -3) == "3 horas" + assert self.locale._format_timeframe("hours", -11) == "11 horas" + assert self.locale._format_timeframe("day", -1) == "un día" + assert self.locale._format_timeframe("days", -2) == "2 días" + assert self.locale._format_timeframe("days", -12) == "12 días" + assert self.locale._format_timeframe("week", -1) == "una semana" + assert self.locale._format_timeframe("weeks", -2) == "2 semanas" + assert self.locale._format_timeframe("weeks", -3) == "3 semanas" + assert self.locale._format_timeframe("month", -1) == "un mes" + assert self.locale._format_timeframe("months", -3) == "3 meses" + assert self.locale._format_timeframe("months", -13) == "13 meses" + assert self.locale._format_timeframe("year", -1) == "un año" + assert self.locale._format_timeframe("years", -4) == "4 años" + assert self.locale._format_timeframe("years", -14) == "14 años" + + +@pytest.mark.usefixtures("lang_locale") +class TestFrenchLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1er" + assert self.locale.ordinal_number(2) == "2e" + + def test_month_abbreviation(self): + assert "juil" in self.locale.month_abbreviations + + +@pytest.mark.usefixtures("lang_locale") +class TestFrenchCanadianLocale: + def test_month_abbreviation(self): + assert "juill" in self.locale.month_abbreviations + + +@pytest.mark.usefixtures("lang_locale") +class TestRussianLocale: + def test_plurals2(self): + assert self.locale._format_timeframe("hours", 0) == "0 часов" + assert self.locale._format_timeframe("hours", 1) == "1 час" + assert self.locale._format_timeframe("hours", 2) == "2 часа" + assert self.locale._format_timeframe("hours", 4) == "4 часа" + assert self.locale._format_timeframe("hours", 5) == "5 часов" + assert self.locale._format_timeframe("hours", 21) == "21 час" + assert self.locale._format_timeframe("hours", 22) == "22 часа" + assert self.locale._format_timeframe("hours", 25) == "25 часов" + + # feminine grammatical gender should be tested separately + assert self.locale._format_timeframe("minutes", 0) == "0 минут" + assert self.locale._format_timeframe("minutes", 1) == "1 минуту" + assert self.locale._format_timeframe("minutes", 2) == "2 минуты" + assert self.locale._format_timeframe("minutes", 4) == "4 минуты" + assert self.locale._format_timeframe("minutes", 5) == "5 минут" + assert self.locale._format_timeframe("minutes", 21) == "21 минуту" + assert self.locale._format_timeframe("minutes", 22) == "22 минуты" + assert self.locale._format_timeframe("minutes", 25) == "25 минут" + + +@pytest.mark.usefixtures("lang_locale") +class TestPolishLocale: + def test_plurals(self): + + assert self.locale._format_timeframe("seconds", 0) == "0 sekund" + assert self.locale._format_timeframe("second", 1) == "sekundę" + assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" + assert self.locale._format_timeframe("seconds", 5) == "5 sekund" + assert self.locale._format_timeframe("seconds", 21) == "21 sekund" + assert self.locale._format_timeframe("seconds", 22) == "22 sekundy" + assert self.locale._format_timeframe("seconds", 25) == "25 sekund" + + assert self.locale._format_timeframe("minutes", 0) == "0 minut" + assert self.locale._format_timeframe("minute", 1) == "minutę" + assert self.locale._format_timeframe("minutes", 2) == "2 minuty" + assert self.locale._format_timeframe("minutes", 5) == "5 minut" + assert self.locale._format_timeframe("minutes", 21) == "21 minut" + assert self.locale._format_timeframe("minutes", 22) == "22 minuty" + assert self.locale._format_timeframe("minutes", 25) == "25 minut" + + assert self.locale._format_timeframe("hours", 0) == "0 godzin" + assert self.locale._format_timeframe("hour", 1) == "godzinę" + assert self.locale._format_timeframe("hours", 2) == "2 godziny" + assert self.locale._format_timeframe("hours", 5) == "5 godzin" + assert self.locale._format_timeframe("hours", 21) == "21 godzin" + assert self.locale._format_timeframe("hours", 22) == "22 godziny" + assert self.locale._format_timeframe("hours", 25) == "25 godzin" + + assert self.locale._format_timeframe("weeks", 0) == "0 tygodni" + assert self.locale._format_timeframe("week", 1) == "tydzień" + assert self.locale._format_timeframe("weeks", 2) == "2 tygodnie" + assert self.locale._format_timeframe("weeks", 5) == "5 tygodni" + assert self.locale._format_timeframe("weeks", 21) == "21 tygodni" + assert self.locale._format_timeframe("weeks", 22) == "22 tygodnie" + assert self.locale._format_timeframe("weeks", 25) == "25 tygodni" + + assert self.locale._format_timeframe("months", 0) == "0 miesięcy" + assert self.locale._format_timeframe("month", 1) == "miesiąc" + assert self.locale._format_timeframe("months", 2) == "2 miesiące" + assert self.locale._format_timeframe("months", 5) == "5 miesięcy" + assert self.locale._format_timeframe("months", 21) == "21 miesięcy" + assert self.locale._format_timeframe("months", 22) == "22 miesiące" + assert self.locale._format_timeframe("months", 25) == "25 miesięcy" + + assert self.locale._format_timeframe("years", 0) == "0 lat" + assert self.locale._format_timeframe("year", 1) == "rok" + assert self.locale._format_timeframe("years", 2) == "2 lata" + assert self.locale._format_timeframe("years", 5) == "5 lat" + assert self.locale._format_timeframe("years", 21) == "21 lat" + assert self.locale._format_timeframe("years", 22) == "22 lata" + assert self.locale._format_timeframe("years", 25) == "25 lat" + + +@pytest.mark.usefixtures("lang_locale") +class TestIcelandicLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("minute", -1) == "einni mínútu" + assert self.locale._format_timeframe("minute", 1) == "eina mínútu" + + assert self.locale._format_timeframe("hours", -2) == "2 tímum" + assert self.locale._format_timeframe("hours", 2) == "2 tíma" + assert self.locale._format_timeframe("now", 0) == "rétt í þessu" + + +@pytest.mark.usefixtures("lang_locale") +class TestMalayalamLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 മണിക്കൂർ" + assert self.locale._format_timeframe("hour", 0) == "ഒരു മണിക്കൂർ" + + def test_format_relative_now(self): + + result = self.locale._format_relative("ഇപ്പോൾ", "now", 0) + + assert result == "ഇപ്പോൾ" + + def test_format_relative_past(self): + + result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", 1) + assert result == "ഒരു മണിക്കൂർ ശേഷം" + + def test_format_relative_future(self): + + result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", -1) + assert result == "ഒരു മണിക്കൂർ മുമ്പ്" + + +@pytest.mark.usefixtures("lang_locale") +class TestHindiLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 घंटे" + assert self.locale._format_timeframe("hour", 0) == "एक घंटा" + + def test_format_relative_now(self): + + result = self.locale._format_relative("अभी", "now", 0) + assert result == "अभी" + + def test_format_relative_past(self): + + result = self.locale._format_relative("एक घंटा", "hour", 1) + assert result == "एक घंटा बाद" + + def test_format_relative_future(self): + + result = self.locale._format_relative("एक घंटा", "hour", -1) + assert result == "एक घंटा पहले" + + +@pytest.mark.usefixtures("lang_locale") +class TestCzechLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 hodiny" + assert self.locale._format_timeframe("hours", 5) == "5 hodin" + assert self.locale._format_timeframe("hour", 0) == "0 hodin" + assert self.locale._format_timeframe("hours", -2) == "2 hodinami" + assert self.locale._format_timeframe("hours", -5) == "5 hodinami" + assert self.locale._format_timeframe("now", 0) == "Teď" + + assert self.locale._format_timeframe("weeks", 2) == "2 týdny" + assert self.locale._format_timeframe("weeks", 5) == "5 týdnů" + assert self.locale._format_timeframe("week", 0) == "0 týdnů" + assert self.locale._format_timeframe("weeks", -2) == "2 týdny" + assert self.locale._format_timeframe("weeks", -5) == "5 týdny" + + def test_format_relative_now(self): + + result = self.locale._format_relative("Teď", "now", 0) + assert result == "Teď" + + def test_format_relative_future(self): + + result = self.locale._format_relative("hodinu", "hour", 1) + assert result == "Za hodinu" + + def test_format_relative_past(self): + + result = self.locale._format_relative("hodinou", "hour", -1) + assert result == "Před hodinou" + + +@pytest.mark.usefixtures("lang_locale") +class TestSlovakLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("seconds", -5) == "5 sekundami" + assert self.locale._format_timeframe("seconds", -2) == "2 sekundami" + assert self.locale._format_timeframe("second", -1) == "sekundou" + assert self.locale._format_timeframe("second", 0) == "0 sekúnd" + assert self.locale._format_timeframe("second", 1) == "sekundu" + assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" + assert self.locale._format_timeframe("seconds", 5) == "5 sekúnd" + + assert self.locale._format_timeframe("minutes", -5) == "5 minútami" + assert self.locale._format_timeframe("minutes", -2) == "2 minútami" + assert self.locale._format_timeframe("minute", -1) == "minútou" + assert self.locale._format_timeframe("minute", 0) == "0 minút" + assert self.locale._format_timeframe("minute", 1) == "minútu" + assert self.locale._format_timeframe("minutes", 2) == "2 minúty" + assert self.locale._format_timeframe("minutes", 5) == "5 minút" + + assert self.locale._format_timeframe("hours", -5) == "5 hodinami" + assert self.locale._format_timeframe("hours", -2) == "2 hodinami" + assert self.locale._format_timeframe("hour", -1) == "hodinou" + assert self.locale._format_timeframe("hour", 0) == "0 hodín" + assert self.locale._format_timeframe("hour", 1) == "hodinu" + assert self.locale._format_timeframe("hours", 2) == "2 hodiny" + assert self.locale._format_timeframe("hours", 5) == "5 hodín" + + assert self.locale._format_timeframe("days", -5) == "5 dňami" + assert self.locale._format_timeframe("days", -2) == "2 dňami" + assert self.locale._format_timeframe("day", -1) == "dňom" + assert self.locale._format_timeframe("day", 0) == "0 dní" + assert self.locale._format_timeframe("day", 1) == "deň" + assert self.locale._format_timeframe("days", 2) == "2 dni" + assert self.locale._format_timeframe("days", 5) == "5 dní" + + assert self.locale._format_timeframe("weeks", -5) == "5 týždňami" + assert self.locale._format_timeframe("weeks", -2) == "2 týždňami" + assert self.locale._format_timeframe("week", -1) == "týždňom" + assert self.locale._format_timeframe("week", 0) == "0 týždňov" + assert self.locale._format_timeframe("week", 1) == "týždeň" + assert self.locale._format_timeframe("weeks", 2) == "2 týždne" + assert self.locale._format_timeframe("weeks", 5) == "5 týždňov" + + assert self.locale._format_timeframe("months", -5) == "5 mesiacmi" + assert self.locale._format_timeframe("months", -2) == "2 mesiacmi" + assert self.locale._format_timeframe("month", -1) == "mesiacom" + assert self.locale._format_timeframe("month", 0) == "0 mesiacov" + assert self.locale._format_timeframe("month", 1) == "mesiac" + assert self.locale._format_timeframe("months", 2) == "2 mesiace" + assert self.locale._format_timeframe("months", 5) == "5 mesiacov" + + assert self.locale._format_timeframe("years", -5) == "5 rokmi" + assert self.locale._format_timeframe("years", -2) == "2 rokmi" + assert self.locale._format_timeframe("year", -1) == "rokom" + assert self.locale._format_timeframe("year", 0) == "0 rokov" + assert self.locale._format_timeframe("year", 1) == "rok" + assert self.locale._format_timeframe("years", 2) == "2 roky" + assert self.locale._format_timeframe("years", 5) == "5 rokov" + + assert self.locale._format_timeframe("now", 0) == "Teraz" + + def test_format_relative_now(self): + + result = self.locale._format_relative("Teraz", "now", 0) + assert result == "Teraz" + + def test_format_relative_future(self): + + result = self.locale._format_relative("hodinu", "hour", 1) + assert result == "O hodinu" + + def test_format_relative_past(self): + + result = self.locale._format_relative("hodinou", "hour", -1) + assert result == "Pred hodinou" + + +@pytest.mark.usefixtures("lang_locale") +class TestBulgarianLocale: + def test_plurals2(self): + assert self.locale._format_timeframe("hours", 0) == "0 часа" + assert self.locale._format_timeframe("hours", 1) == "1 час" + assert self.locale._format_timeframe("hours", 2) == "2 часа" + assert self.locale._format_timeframe("hours", 4) == "4 часа" + assert self.locale._format_timeframe("hours", 5) == "5 часа" + assert self.locale._format_timeframe("hours", 21) == "21 час" + assert self.locale._format_timeframe("hours", 22) == "22 часа" + assert self.locale._format_timeframe("hours", 25) == "25 часа" + + # feminine grammatical gender should be tested separately + assert self.locale._format_timeframe("minutes", 0) == "0 минути" + assert self.locale._format_timeframe("minutes", 1) == "1 минута" + assert self.locale._format_timeframe("minutes", 2) == "2 минути" + assert self.locale._format_timeframe("minutes", 4) == "4 минути" + assert self.locale._format_timeframe("minutes", 5) == "5 минути" + assert self.locale._format_timeframe("minutes", 21) == "21 минута" + assert self.locale._format_timeframe("minutes", 22) == "22 минути" + assert self.locale._format_timeframe("minutes", 25) == "25 минути" + + +@pytest.mark.usefixtures("lang_locale") +class TestMacedonianLocale: + def test_singles_mk(self): + assert self.locale._format_timeframe("second", 1) == "една секунда" + assert self.locale._format_timeframe("minute", 1) == "една минута" + assert self.locale._format_timeframe("hour", 1) == "еден саат" + assert self.locale._format_timeframe("day", 1) == "еден ден" + assert self.locale._format_timeframe("week", 1) == "една недела" + assert self.locale._format_timeframe("month", 1) == "еден месец" + assert self.locale._format_timeframe("year", 1) == "една година" + + def test_meridians_mk(self): + assert self.locale.meridian(7, "A") == "претпладне" + assert self.locale.meridian(18, "A") == "попладне" + assert self.locale.meridian(10, "a") == "дп" + assert self.locale.meridian(22, "a") == "пп" + + def test_describe_mk(self): + assert self.locale.describe("second", only_distance=True) == "една секунда" + assert self.locale.describe("second", only_distance=False) == "за една секунда" + assert self.locale.describe("minute", only_distance=True) == "една минута" + assert self.locale.describe("minute", only_distance=False) == "за една минута" + assert self.locale.describe("hour", only_distance=True) == "еден саат" + assert self.locale.describe("hour", only_distance=False) == "за еден саат" + assert self.locale.describe("day", only_distance=True) == "еден ден" + assert self.locale.describe("day", only_distance=False) == "за еден ден" + assert self.locale.describe("week", only_distance=True) == "една недела" + assert self.locale.describe("week", only_distance=False) == "за една недела" + assert self.locale.describe("month", only_distance=True) == "еден месец" + assert self.locale.describe("month", only_distance=False) == "за еден месец" + assert self.locale.describe("year", only_distance=True) == "една година" + assert self.locale.describe("year", only_distance=False) == "за една година" + + def test_relative_mk(self): + # time + assert self.locale._format_relative("сега", "now", 0) == "сега" + assert self.locale._format_relative("1 секунда", "seconds", 1) == "за 1 секунда" + assert self.locale._format_relative("1 минута", "minutes", 1) == "за 1 минута" + assert self.locale._format_relative("1 саат", "hours", 1) == "за 1 саат" + assert self.locale._format_relative("1 ден", "days", 1) == "за 1 ден" + assert self.locale._format_relative("1 недела", "weeks", 1) == "за 1 недела" + assert self.locale._format_relative("1 месец", "months", 1) == "за 1 месец" + assert self.locale._format_relative("1 година", "years", 1) == "за 1 година" + assert ( + self.locale._format_relative("1 секунда", "seconds", -1) == "пред 1 секунда" + ) + assert ( + self.locale._format_relative("1 минута", "minutes", -1) == "пред 1 минута" + ) + assert self.locale._format_relative("1 саат", "hours", -1) == "пред 1 саат" + assert self.locale._format_relative("1 ден", "days", -1) == "пред 1 ден" + assert self.locale._format_relative("1 недела", "weeks", -1) == "пред 1 недела" + assert self.locale._format_relative("1 месец", "months", -1) == "пред 1 месец" + assert self.locale._format_relative("1 година", "years", -1) == "пред 1 година" + + def test_plurals_mk(self): + # Seconds + assert self.locale._format_timeframe("seconds", 0) == "0 секунди" + assert self.locale._format_timeframe("seconds", 1) == "1 секунда" + assert self.locale._format_timeframe("seconds", 2) == "2 секунди" + assert self.locale._format_timeframe("seconds", 4) == "4 секунди" + assert self.locale._format_timeframe("seconds", 5) == "5 секунди" + assert self.locale._format_timeframe("seconds", 21) == "21 секунда" + assert self.locale._format_timeframe("seconds", 22) == "22 секунди" + assert self.locale._format_timeframe("seconds", 25) == "25 секунди" + + # Minutes + assert self.locale._format_timeframe("minutes", 0) == "0 минути" + assert self.locale._format_timeframe("minutes", 1) == "1 минута" + assert self.locale._format_timeframe("minutes", 2) == "2 минути" + assert self.locale._format_timeframe("minutes", 4) == "4 минути" + assert self.locale._format_timeframe("minutes", 5) == "5 минути" + assert self.locale._format_timeframe("minutes", 21) == "21 минута" + assert self.locale._format_timeframe("minutes", 22) == "22 минути" + assert self.locale._format_timeframe("minutes", 25) == "25 минути" + + # Hours + assert self.locale._format_timeframe("hours", 0) == "0 саати" + assert self.locale._format_timeframe("hours", 1) == "1 саат" + assert self.locale._format_timeframe("hours", 2) == "2 саати" + assert self.locale._format_timeframe("hours", 4) == "4 саати" + assert self.locale._format_timeframe("hours", 5) == "5 саати" + assert self.locale._format_timeframe("hours", 21) == "21 саат" + assert self.locale._format_timeframe("hours", 22) == "22 саати" + assert self.locale._format_timeframe("hours", 25) == "25 саати" + + # Days + assert self.locale._format_timeframe("days", 0) == "0 дена" + assert self.locale._format_timeframe("days", 1) == "1 ден" + assert self.locale._format_timeframe("days", 2) == "2 дена" + assert self.locale._format_timeframe("days", 3) == "3 дена" + assert self.locale._format_timeframe("days", 21) == "21 ден" + + # Weeks + assert self.locale._format_timeframe("weeks", 0) == "0 недели" + assert self.locale._format_timeframe("weeks", 1) == "1 недела" + assert self.locale._format_timeframe("weeks", 2) == "2 недели" + assert self.locale._format_timeframe("weeks", 4) == "4 недели" + assert self.locale._format_timeframe("weeks", 5) == "5 недели" + assert self.locale._format_timeframe("weeks", 21) == "21 недела" + assert self.locale._format_timeframe("weeks", 22) == "22 недели" + assert self.locale._format_timeframe("weeks", 25) == "25 недели" + + # Months + assert self.locale._format_timeframe("months", 0) == "0 месеци" + assert self.locale._format_timeframe("months", 1) == "1 месец" + assert self.locale._format_timeframe("months", 2) == "2 месеци" + assert self.locale._format_timeframe("months", 4) == "4 месеци" + assert self.locale._format_timeframe("months", 5) == "5 месеци" + assert self.locale._format_timeframe("months", 21) == "21 месец" + assert self.locale._format_timeframe("months", 22) == "22 месеци" + assert self.locale._format_timeframe("months", 25) == "25 месеци" + + # Years + assert self.locale._format_timeframe("years", 1) == "1 година" + assert self.locale._format_timeframe("years", 2) == "2 години" + assert self.locale._format_timeframe("years", 5) == "5 години" + + def test_multi_describe_mk(self): + describe = self.locale.describe_multi + + fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] + assert describe(fulltest) == "за 5 години 1 недела 1 саат 6 минути" + seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] + assert describe(seconds4000_0days) == "за 0 дена 1 саат 6 минути" + seconds4000 = [("hours", 1), ("minutes", 6)] + assert describe(seconds4000) == "за 1 саат 6 минути" + assert describe(seconds4000, only_distance=True) == "1 саат 6 минути" + seconds3700 = [("hours", 1), ("minutes", 1)] + assert describe(seconds3700) == "за 1 саат 1 минута" + seconds300_0hours = [("hours", 0), ("minutes", 5)] + assert describe(seconds300_0hours) == "за 0 саати 5 минути" + seconds300 = [("minutes", 5)] + assert describe(seconds300) == "за 5 минути" + seconds60 = [("minutes", 1)] + assert describe(seconds60) == "за 1 минута" + assert describe(seconds60, only_distance=True) == "1 минута" + seconds60 = [("seconds", 1)] + assert describe(seconds60) == "за 1 секунда" + assert describe(seconds60, only_distance=True) == "1 секунда" + + +@pytest.mark.usefixtures("time_2013_01_01") +@pytest.mark.usefixtures("lang_locale") +class TestHebrewLocale: + def test_couple_of_timeframe(self): + assert self.locale._format_timeframe("days", 1) == "יום" + assert self.locale._format_timeframe("days", 2) == "יומיים" + assert self.locale._format_timeframe("days", 3) == "3 ימים" + + assert self.locale._format_timeframe("hours", 1) == "שעה" + assert self.locale._format_timeframe("hours", 2) == "שעתיים" + assert self.locale._format_timeframe("hours", 3) == "3 שעות" + + assert self.locale._format_timeframe("week", 1) == "שבוע" + assert self.locale._format_timeframe("weeks", 2) == "שבועיים" + assert self.locale._format_timeframe("weeks", 3) == "3 שבועות" + + assert self.locale._format_timeframe("months", 1) == "חודש" + assert self.locale._format_timeframe("months", 2) == "חודשיים" + assert self.locale._format_timeframe("months", 4) == "4 חודשים" + + assert self.locale._format_timeframe("years", 1) == "שנה" + assert self.locale._format_timeframe("years", 2) == "שנתיים" + assert self.locale._format_timeframe("years", 5) == "5 שנים" + + def test_describe_multi(self): + describe = self.locale.describe_multi + + fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] + assert describe(fulltest) == "בעוד 5 שנים, שבוע, שעה ו־6 דקות" + seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] + assert describe(seconds4000_0days) == "בעוד 0 ימים, שעה ו־6 דקות" + seconds4000 = [("hours", 1), ("minutes", 6)] + assert describe(seconds4000) == "בעוד שעה ו־6 דקות" + assert describe(seconds4000, only_distance=True) == "שעה ו־6 דקות" + seconds3700 = [("hours", 1), ("minutes", 1)] + assert describe(seconds3700) == "בעוד שעה ודקה" + seconds300_0hours = [("hours", 0), ("minutes", 5)] + assert describe(seconds300_0hours) == "בעוד 0 שעות ו־5 דקות" + seconds300 = [("minutes", 5)] + assert describe(seconds300) == "בעוד 5 דקות" + seconds60 = [("minutes", 1)] + assert describe(seconds60) == "בעוד דקה" + assert describe(seconds60, only_distance=True) == "דקה" + + +@pytest.mark.usefixtures("lang_locale") +class TestMarathiLocale: + def test_dateCoreFunctionality(self): + dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) + assert self.locale.month_name(dt.month) == "एप्रिल" + assert self.locale.month_abbreviation(dt.month) == "एप्रि" + assert self.locale.day_name(dt.isoweekday()) == "शनिवार" + assert self.locale.day_abbreviation(dt.isoweekday()) == "शनि" + + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == "2 तास" + assert self.locale._format_timeframe("hour", 0) == "एक तास" + + def test_format_relative_now(self): + result = self.locale._format_relative("सद्य", "now", 0) + assert result == "सद्य" + + def test_format_relative_past(self): + result = self.locale._format_relative("एक तास", "hour", 1) + assert result == "एक तास नंतर" + + def test_format_relative_future(self): + result = self.locale._format_relative("एक तास", "hour", -1) + assert result == "एक तास आधी" + + # Not currently implemented + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1" + + +@pytest.mark.usefixtures("lang_locale") +class TestFinnishLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == ("2 tuntia", "2 tunnin") + assert self.locale._format_timeframe("hour", 0) == ("tunti", "tunnin") + + def test_format_relative_now(self): + result = self.locale._format_relative(["juuri nyt", "juuri nyt"], "now", 0) + assert result == "juuri nyt" + + def test_format_relative_past(self): + result = self.locale._format_relative(["tunti", "tunnin"], "hour", 1) + assert result == "tunnin kuluttua" + + def test_format_relative_future(self): + result = self.locale._format_relative(["tunti", "tunnin"], "hour", -1) + assert result == "tunti sitten" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1." + + +@pytest.mark.usefixtures("lang_locale") +class TestGermanLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1." + + def test_define(self): + assert self.locale.describe("minute", only_distance=True) == "eine Minute" + assert self.locale.describe("minute", only_distance=False) == "in einer Minute" + assert self.locale.describe("hour", only_distance=True) == "eine Stunde" + assert self.locale.describe("hour", only_distance=False) == "in einer Stunde" + assert self.locale.describe("day", only_distance=True) == "ein Tag" + assert self.locale.describe("day", only_distance=False) == "in einem Tag" + assert self.locale.describe("week", only_distance=True) == "eine Woche" + assert self.locale.describe("week", only_distance=False) == "in einer Woche" + assert self.locale.describe("month", only_distance=True) == "ein Monat" + assert self.locale.describe("month", only_distance=False) == "in einem Monat" + assert self.locale.describe("year", only_distance=True) == "ein Jahr" + assert self.locale.describe("year", only_distance=False) == "in einem Jahr" + + def test_weekday(self): + dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) + assert self.locale.day_name(dt.isoweekday()) == "Samstag" + assert self.locale.day_abbreviation(dt.isoweekday()) == "Sa" + + +@pytest.mark.usefixtures("lang_locale") +class TestHungarianLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == "2 óra" + assert self.locale._format_timeframe("hour", 0) == "egy órával" + assert self.locale._format_timeframe("hours", -2) == "2 órával" + assert self.locale._format_timeframe("now", 0) == "éppen most" + + +@pytest.mark.usefixtures("lang_locale") +class TestEsperantoLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == "2 horoj" + assert self.locale._format_timeframe("hour", 0) == "un horo" + assert self.locale._format_timeframe("hours", -2) == "2 horoj" + assert self.locale._format_timeframe("now", 0) == "nun" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1a" + + +@pytest.mark.usefixtures("lang_locale") +class TestThaiLocale: + def test_year_full(self): + assert self.locale.year_full(2015) == "2558" + + def test_year_abbreviation(self): + assert self.locale.year_abbreviation(2015) == "58" + + def test_format_relative_now(self): + result = self.locale._format_relative("ขณะนี้", "now", 0) + assert result == "ขณะนี้" + + def test_format_relative_past(self): + result = self.locale._format_relative("1 ชั่วโมง", "hour", 1) + assert result == "ในอีก 1 ชั่วโมง" + result = self.locale._format_relative("{0} ชั่วโมง", "hours", 2) + assert result == "ในอีก {0} ชั่วโมง" + result = self.locale._format_relative("ไม่กี่วินาที", "seconds", 42) + assert result == "ในอีกไม่กี่วินาที" + + def test_format_relative_future(self): + result = self.locale._format_relative("1 ชั่วโมง", "hour", -1) + assert result == "1 ชั่วโมง ที่ผ่านมา" + + +@pytest.mark.usefixtures("lang_locale") +class TestBengaliLocale: + def test_ordinal_number(self): + assert self.locale._ordinal_number(0) == "0তম" + assert self.locale._ordinal_number(1) == "1ম" + assert self.locale._ordinal_number(3) == "3য়" + assert self.locale._ordinal_number(4) == "4র্থ" + assert self.locale._ordinal_number(5) == "5ম" + assert self.locale._ordinal_number(6) == "6ষ্ঠ" + assert self.locale._ordinal_number(10) == "10ম" + assert self.locale._ordinal_number(11) == "11তম" + assert self.locale._ordinal_number(42) == "42তম" + assert self.locale._ordinal_number(-1) is None + + +@pytest.mark.usefixtures("lang_locale") +class TestRomanianLocale: + def test_timeframes(self): + + assert self.locale._format_timeframe("hours", 2) == "2 ore" + assert self.locale._format_timeframe("months", 2) == "2 luni" + + assert self.locale._format_timeframe("days", 2) == "2 zile" + assert self.locale._format_timeframe("years", 2) == "2 ani" + + assert self.locale._format_timeframe("hours", 3) == "3 ore" + assert self.locale._format_timeframe("months", 4) == "4 luni" + assert self.locale._format_timeframe("days", 3) == "3 zile" + assert self.locale._format_timeframe("years", 5) == "5 ani" + + def test_relative_timeframes(self): + assert self.locale._format_relative("acum", "now", 0) == "acum" + assert self.locale._format_relative("o oră", "hour", 1) == "peste o oră" + assert self.locale._format_relative("o oră", "hour", -1) == "o oră în urmă" + assert self.locale._format_relative("un minut", "minute", 1) == "peste un minut" + assert ( + self.locale._format_relative("un minut", "minute", -1) == "un minut în urmă" + ) + assert ( + self.locale._format_relative("câteva secunde", "seconds", -1) + == "câteva secunde în urmă" + ) + assert ( + self.locale._format_relative("câteva secunde", "seconds", 1) + == "peste câteva secunde" + ) + assert self.locale._format_relative("o zi", "day", -1) == "o zi în urmă" + assert self.locale._format_relative("o zi", "day", 1) == "peste o zi" + + +@pytest.mark.usefixtures("lang_locale") +class TestArabicLocale: + def test_timeframes(self): + + # single + assert self.locale._format_timeframe("minute", 1) == "دقيقة" + assert self.locale._format_timeframe("hour", 1) == "ساعة" + assert self.locale._format_timeframe("day", 1) == "يوم" + assert self.locale._format_timeframe("month", 1) == "شهر" + assert self.locale._format_timeframe("year", 1) == "سنة" + + # double + assert self.locale._format_timeframe("minutes", 2) == "دقيقتين" + assert self.locale._format_timeframe("hours", 2) == "ساعتين" + assert self.locale._format_timeframe("days", 2) == "يومين" + assert self.locale._format_timeframe("months", 2) == "شهرين" + assert self.locale._format_timeframe("years", 2) == "سنتين" + + # up to ten + assert self.locale._format_timeframe("minutes", 3) == "3 دقائق" + assert self.locale._format_timeframe("hours", 4) == "4 ساعات" + assert self.locale._format_timeframe("days", 5) == "5 أيام" + assert self.locale._format_timeframe("months", 6) == "6 أشهر" + assert self.locale._format_timeframe("years", 10) == "10 سنوات" + + # more than ten + assert self.locale._format_timeframe("minutes", 11) == "11 دقيقة" + assert self.locale._format_timeframe("hours", 19) == "19 ساعة" + assert self.locale._format_timeframe("months", 24) == "24 شهر" + assert self.locale._format_timeframe("days", 50) == "50 يوم" + assert self.locale._format_timeframe("years", 115) == "115 سنة" + + +@pytest.mark.usefixtures("lang_locale") +class TestNepaliLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 3) == "3 घण्टा" + assert self.locale._format_timeframe("hour", 0) == "एक घण्टा" + + def test_format_relative_now(self): + result = self.locale._format_relative("अहिले", "now", 0) + assert result == "अहिले" + + def test_format_relative_future(self): + result = self.locale._format_relative("एक घण्टा", "hour", 1) + assert result == "एक घण्टा पछी" + + def test_format_relative_past(self): + result = self.locale._format_relative("एक घण्टा", "hour", -1) + assert result == "एक घण्टा पहिले" + + +@pytest.mark.usefixtures("lang_locale") +class TestIndonesianLocale: + def test_timeframes(self): + assert self.locale._format_timeframe("hours", 2) == "2 jam" + assert self.locale._format_timeframe("months", 2) == "2 bulan" + + assert self.locale._format_timeframe("days", 2) == "2 hari" + assert self.locale._format_timeframe("years", 2) == "2 tahun" + + assert self.locale._format_timeframe("hours", 3) == "3 jam" + assert self.locale._format_timeframe("months", 4) == "4 bulan" + assert self.locale._format_timeframe("days", 3) == "3 hari" + assert self.locale._format_timeframe("years", 5) == "5 tahun" + + def test_format_relative_now(self): + assert self.locale._format_relative("baru saja", "now", 0) == "baru saja" + + def test_format_relative_past(self): + assert self.locale._format_relative("1 jam", "hour", 1) == "dalam 1 jam" + assert self.locale._format_relative("1 detik", "seconds", 1) == "dalam 1 detik" + + def test_format_relative_future(self): + assert self.locale._format_relative("1 jam", "hour", -1) == "1 jam yang lalu" + + +@pytest.mark.usefixtures("lang_locale") +class TestTagalogLocale: + def test_singles_tl(self): + assert self.locale._format_timeframe("second", 1) == "isang segundo" + assert self.locale._format_timeframe("minute", 1) == "isang minuto" + assert self.locale._format_timeframe("hour", 1) == "isang oras" + assert self.locale._format_timeframe("day", 1) == "isang araw" + assert self.locale._format_timeframe("week", 1) == "isang linggo" + assert self.locale._format_timeframe("month", 1) == "isang buwan" + assert self.locale._format_timeframe("year", 1) == "isang taon" + + def test_meridians_tl(self): + assert self.locale.meridian(7, "A") == "ng umaga" + assert self.locale.meridian(18, "A") == "ng hapon" + assert self.locale.meridian(10, "a") == "nu" + assert self.locale.meridian(22, "a") == "nh" + + def test_describe_tl(self): + assert self.locale.describe("second", only_distance=True) == "isang segundo" + assert ( + self.locale.describe("second", only_distance=False) + == "isang segundo mula ngayon" + ) + assert self.locale.describe("minute", only_distance=True) == "isang minuto" + assert ( + self.locale.describe("minute", only_distance=False) + == "isang minuto mula ngayon" + ) + assert self.locale.describe("hour", only_distance=True) == "isang oras" + assert ( + self.locale.describe("hour", only_distance=False) + == "isang oras mula ngayon" + ) + assert self.locale.describe("day", only_distance=True) == "isang araw" + assert ( + self.locale.describe("day", only_distance=False) == "isang araw mula ngayon" + ) + assert self.locale.describe("week", only_distance=True) == "isang linggo" + assert ( + self.locale.describe("week", only_distance=False) + == "isang linggo mula ngayon" + ) + assert self.locale.describe("month", only_distance=True) == "isang buwan" + assert ( + self.locale.describe("month", only_distance=False) + == "isang buwan mula ngayon" + ) + assert self.locale.describe("year", only_distance=True) == "isang taon" + assert ( + self.locale.describe("year", only_distance=False) + == "isang taon mula ngayon" + ) + + def test_relative_tl(self): + # time + assert self.locale._format_relative("ngayon", "now", 0) == "ngayon" + assert ( + self.locale._format_relative("1 segundo", "seconds", 1) + == "1 segundo mula ngayon" + ) + assert ( + self.locale._format_relative("1 minuto", "minutes", 1) + == "1 minuto mula ngayon" + ) + assert ( + self.locale._format_relative("1 oras", "hours", 1) == "1 oras mula ngayon" + ) + assert self.locale._format_relative("1 araw", "days", 1) == "1 araw mula ngayon" + assert ( + self.locale._format_relative("1 linggo", "weeks", 1) + == "1 linggo mula ngayon" + ) + assert ( + self.locale._format_relative("1 buwan", "months", 1) + == "1 buwan mula ngayon" + ) + assert ( + self.locale._format_relative("1 taon", "years", 1) == "1 taon mula ngayon" + ) + assert ( + self.locale._format_relative("1 segundo", "seconds", -1) + == "nakaraang 1 segundo" + ) + assert ( + self.locale._format_relative("1 minuto", "minutes", -1) + == "nakaraang 1 minuto" + ) + assert self.locale._format_relative("1 oras", "hours", -1) == "nakaraang 1 oras" + assert self.locale._format_relative("1 araw", "days", -1) == "nakaraang 1 araw" + assert ( + self.locale._format_relative("1 linggo", "weeks", -1) + == "nakaraang 1 linggo" + ) + assert ( + self.locale._format_relative("1 buwan", "months", -1) == "nakaraang 1 buwan" + ) + assert self.locale._format_relative("1 taon", "years", -1) == "nakaraang 1 taon" + + def test_plurals_tl(self): + # Seconds + assert self.locale._format_timeframe("seconds", 0) == "0 segundo" + assert self.locale._format_timeframe("seconds", 1) == "1 segundo" + assert self.locale._format_timeframe("seconds", 2) == "2 segundo" + assert self.locale._format_timeframe("seconds", 4) == "4 segundo" + assert self.locale._format_timeframe("seconds", 5) == "5 segundo" + assert self.locale._format_timeframe("seconds", 21) == "21 segundo" + assert self.locale._format_timeframe("seconds", 22) == "22 segundo" + assert self.locale._format_timeframe("seconds", 25) == "25 segundo" + + # Minutes + assert self.locale._format_timeframe("minutes", 0) == "0 minuto" + assert self.locale._format_timeframe("minutes", 1) == "1 minuto" + assert self.locale._format_timeframe("minutes", 2) == "2 minuto" + assert self.locale._format_timeframe("minutes", 4) == "4 minuto" + assert self.locale._format_timeframe("minutes", 5) == "5 minuto" + assert self.locale._format_timeframe("minutes", 21) == "21 minuto" + assert self.locale._format_timeframe("minutes", 22) == "22 minuto" + assert self.locale._format_timeframe("minutes", 25) == "25 minuto" + + # Hours + assert self.locale._format_timeframe("hours", 0) == "0 oras" + assert self.locale._format_timeframe("hours", 1) == "1 oras" + assert self.locale._format_timeframe("hours", 2) == "2 oras" + assert self.locale._format_timeframe("hours", 4) == "4 oras" + assert self.locale._format_timeframe("hours", 5) == "5 oras" + assert self.locale._format_timeframe("hours", 21) == "21 oras" + assert self.locale._format_timeframe("hours", 22) == "22 oras" + assert self.locale._format_timeframe("hours", 25) == "25 oras" + + # Days + assert self.locale._format_timeframe("days", 0) == "0 araw" + assert self.locale._format_timeframe("days", 1) == "1 araw" + assert self.locale._format_timeframe("days", 2) == "2 araw" + assert self.locale._format_timeframe("days", 3) == "3 araw" + assert self.locale._format_timeframe("days", 21) == "21 araw" + + # Weeks + assert self.locale._format_timeframe("weeks", 0) == "0 linggo" + assert self.locale._format_timeframe("weeks", 1) == "1 linggo" + assert self.locale._format_timeframe("weeks", 2) == "2 linggo" + assert self.locale._format_timeframe("weeks", 4) == "4 linggo" + assert self.locale._format_timeframe("weeks", 5) == "5 linggo" + assert self.locale._format_timeframe("weeks", 21) == "21 linggo" + assert self.locale._format_timeframe("weeks", 22) == "22 linggo" + assert self.locale._format_timeframe("weeks", 25) == "25 linggo" + + # Months + assert self.locale._format_timeframe("months", 0) == "0 buwan" + assert self.locale._format_timeframe("months", 1) == "1 buwan" + assert self.locale._format_timeframe("months", 2) == "2 buwan" + assert self.locale._format_timeframe("months", 4) == "4 buwan" + assert self.locale._format_timeframe("months", 5) == "5 buwan" + assert self.locale._format_timeframe("months", 21) == "21 buwan" + assert self.locale._format_timeframe("months", 22) == "22 buwan" + assert self.locale._format_timeframe("months", 25) == "25 buwan" + + # Years + assert self.locale._format_timeframe("years", 1) == "1 taon" + assert self.locale._format_timeframe("years", 2) == "2 taon" + assert self.locale._format_timeframe("years", 5) == "5 taon" + + def test_multi_describe_tl(self): + describe = self.locale.describe_multi + + fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] + assert describe(fulltest) == "5 taon 1 linggo 1 oras 6 minuto mula ngayon" + seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] + assert describe(seconds4000_0days) == "0 araw 1 oras 6 minuto mula ngayon" + seconds4000 = [("hours", 1), ("minutes", 6)] + assert describe(seconds4000) == "1 oras 6 minuto mula ngayon" + assert describe(seconds4000, only_distance=True) == "1 oras 6 minuto" + seconds3700 = [("hours", 1), ("minutes", 1)] + assert describe(seconds3700) == "1 oras 1 minuto mula ngayon" + seconds300_0hours = [("hours", 0), ("minutes", 5)] + assert describe(seconds300_0hours) == "0 oras 5 minuto mula ngayon" + seconds300 = [("minutes", 5)] + assert describe(seconds300) == "5 minuto mula ngayon" + seconds60 = [("minutes", 1)] + assert describe(seconds60) == "1 minuto mula ngayon" + assert describe(seconds60, only_distance=True) == "1 minuto" + seconds60 = [("seconds", 1)] + assert describe(seconds60) == "1 segundo mula ngayon" + assert describe(seconds60, only_distance=True) == "1 segundo" + + def test_ordinal_number_tl(self): + assert self.locale.ordinal_number(0) == "ika-0" + assert self.locale.ordinal_number(1) == "ika-1" + assert self.locale.ordinal_number(2) == "ika-2" + assert self.locale.ordinal_number(3) == "ika-3" + assert self.locale.ordinal_number(10) == "ika-10" + assert self.locale.ordinal_number(23) == "ika-23" + assert self.locale.ordinal_number(100) == "ika-100" + assert self.locale.ordinal_number(103) == "ika-103" + assert self.locale.ordinal_number(114) == "ika-114" + + +@pytest.mark.usefixtures("lang_locale") +class TestEstonianLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "just nüüd" + assert self.locale._format_timeframe("second", 1) == "ühe sekundi" + assert self.locale._format_timeframe("seconds", 3) == "3 sekundi" + assert self.locale._format_timeframe("seconds", 30) == "30 sekundi" + assert self.locale._format_timeframe("minute", 1) == "ühe minuti" + assert self.locale._format_timeframe("minutes", 4) == "4 minuti" + assert self.locale._format_timeframe("minutes", 40) == "40 minuti" + assert self.locale._format_timeframe("hour", 1) == "tunni aja" + assert self.locale._format_timeframe("hours", 5) == "5 tunni" + assert self.locale._format_timeframe("hours", 23) == "23 tunni" + assert self.locale._format_timeframe("day", 1) == "ühe päeva" + assert self.locale._format_timeframe("days", 6) == "6 päeva" + assert self.locale._format_timeframe("days", 12) == "12 päeva" + assert self.locale._format_timeframe("month", 1) == "ühe kuu" + assert self.locale._format_timeframe("months", 7) == "7 kuu" + assert self.locale._format_timeframe("months", 11) == "11 kuu" + assert self.locale._format_timeframe("year", 1) == "ühe aasta" + assert self.locale._format_timeframe("years", 8) == "8 aasta" + assert self.locale._format_timeframe("years", 12) == "12 aasta" + + assert self.locale._format_timeframe("now", 0) == "just nüüd" + assert self.locale._format_timeframe("second", -1) == "üks sekund" + assert self.locale._format_timeframe("seconds", -9) == "9 sekundit" + assert self.locale._format_timeframe("seconds", -12) == "12 sekundit" + assert self.locale._format_timeframe("minute", -1) == "üks minut" + assert self.locale._format_timeframe("minutes", -2) == "2 minutit" + assert self.locale._format_timeframe("minutes", -10) == "10 minutit" + assert self.locale._format_timeframe("hour", -1) == "tund aega" + assert self.locale._format_timeframe("hours", -3) == "3 tundi" + assert self.locale._format_timeframe("hours", -11) == "11 tundi" + assert self.locale._format_timeframe("day", -1) == "üks päev" + assert self.locale._format_timeframe("days", -2) == "2 päeva" + assert self.locale._format_timeframe("days", -12) == "12 päeva" + assert self.locale._format_timeframe("month", -1) == "üks kuu" + assert self.locale._format_timeframe("months", -3) == "3 kuud" + assert self.locale._format_timeframe("months", -13) == "13 kuud" + assert self.locale._format_timeframe("year", -1) == "üks aasta" + assert self.locale._format_timeframe("years", -4) == "4 aastat" + assert self.locale._format_timeframe("years", -14) == "14 aastat" + + +@pytest.mark.usefixtures("lang_locale") +class TestPortugueseLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "agora" + assert self.locale._format_timeframe("second", 1) == "um segundo" + assert self.locale._format_timeframe("seconds", 30) == "30 segundos" + assert self.locale._format_timeframe("minute", 1) == "um minuto" + assert self.locale._format_timeframe("minutes", 40) == "40 minutos" + assert self.locale._format_timeframe("hour", 1) == "uma hora" + assert self.locale._format_timeframe("hours", 23) == "23 horas" + assert self.locale._format_timeframe("day", 1) == "um dia" + assert self.locale._format_timeframe("days", 12) == "12 dias" + assert self.locale._format_timeframe("month", 1) == "um mês" + assert self.locale._format_timeframe("months", 11) == "11 meses" + assert self.locale._format_timeframe("year", 1) == "um ano" + assert self.locale._format_timeframe("years", 12) == "12 anos" + + +@pytest.mark.usefixtures("lang_locale") +class TestBrazilianPortugueseLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "agora" + assert self.locale._format_timeframe("second", 1) == "um segundo" + assert self.locale._format_timeframe("seconds", 30) == "30 segundos" + assert self.locale._format_timeframe("minute", 1) == "um minuto" + assert self.locale._format_timeframe("minutes", 40) == "40 minutos" + assert self.locale._format_timeframe("hour", 1) == "uma hora" + assert self.locale._format_timeframe("hours", 23) == "23 horas" + assert self.locale._format_timeframe("day", 1) == "um dia" + assert self.locale._format_timeframe("days", 12) == "12 dias" + assert self.locale._format_timeframe("month", 1) == "um mês" + assert self.locale._format_timeframe("months", 11) == "11 meses" + assert self.locale._format_timeframe("year", 1) == "um ano" + assert self.locale._format_timeframe("years", 12) == "12 anos" + assert self.locale._format_relative("uma hora", "hour", -1) == "faz uma hora" + + +@pytest.mark.usefixtures("lang_locale") +class TestHongKongLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "剛才" + assert self.locale._format_timeframe("second", 1) == "1秒" + assert self.locale._format_timeframe("seconds", 30) == "30秒" + assert self.locale._format_timeframe("minute", 1) == "1分鐘" + assert self.locale._format_timeframe("minutes", 40) == "40分鐘" + assert self.locale._format_timeframe("hour", 1) == "1小時" + assert self.locale._format_timeframe("hours", 23) == "23小時" + assert self.locale._format_timeframe("day", 1) == "1天" + assert self.locale._format_timeframe("days", 12) == "12天" + assert self.locale._format_timeframe("week", 1) == "1星期" + assert self.locale._format_timeframe("weeks", 38) == "38星期" + assert self.locale._format_timeframe("month", 1) == "1個月" + assert self.locale._format_timeframe("months", 11) == "11個月" + assert self.locale._format_timeframe("year", 1) == "1年" + assert self.locale._format_timeframe("years", 12) == "12年" + + +@pytest.mark.usefixtures("lang_locale") +class TestChineseTWLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "剛才" + assert self.locale._format_timeframe("second", 1) == "1秒" + assert self.locale._format_timeframe("seconds", 30) == "30秒" + assert self.locale._format_timeframe("minute", 1) == "1分鐘" + assert self.locale._format_timeframe("minutes", 40) == "40分鐘" + assert self.locale._format_timeframe("hour", 1) == "1小時" + assert self.locale._format_timeframe("hours", 23) == "23小時" + assert self.locale._format_timeframe("day", 1) == "1天" + assert self.locale._format_timeframe("days", 12) == "12天" + assert self.locale._format_timeframe("week", 1) == "1週" + assert self.locale._format_timeframe("weeks", 38) == "38週" + assert self.locale._format_timeframe("month", 1) == "1個月" + assert self.locale._format_timeframe("months", 11) == "11個月" + assert self.locale._format_timeframe("year", 1) == "1年" + assert self.locale._format_timeframe("years", 12) == "12年" + + +@pytest.mark.usefixtures("lang_locale") +class TestSwahiliLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "sasa hivi" + assert self.locale._format_timeframe("second", 1) == "sekunde" + assert self.locale._format_timeframe("seconds", 3) == "sekunde 3" + assert self.locale._format_timeframe("seconds", 30) == "sekunde 30" + assert self.locale._format_timeframe("minute", 1) == "dakika moja" + assert self.locale._format_timeframe("minutes", 4) == "dakika 4" + assert self.locale._format_timeframe("minutes", 40) == "dakika 40" + assert self.locale._format_timeframe("hour", 1) == "saa moja" + assert self.locale._format_timeframe("hours", 5) == "saa 5" + assert self.locale._format_timeframe("hours", 23) == "saa 23" + assert self.locale._format_timeframe("day", 1) == "siku moja" + assert self.locale._format_timeframe("days", 6) == "siku 6" + assert self.locale._format_timeframe("days", 12) == "siku 12" + assert self.locale._format_timeframe("month", 1) == "mwezi moja" + assert self.locale._format_timeframe("months", 7) == "miezi 7" + assert self.locale._format_timeframe("week", 1) == "wiki moja" + assert self.locale._format_timeframe("weeks", 2) == "wiki 2" + assert self.locale._format_timeframe("months", 11) == "miezi 11" + assert self.locale._format_timeframe("year", 1) == "mwaka moja" + assert self.locale._format_timeframe("years", 8) == "miaka 8" + assert self.locale._format_timeframe("years", 12) == "miaka 12" + + def test_format_relative_now(self): + result = self.locale._format_relative("sasa hivi", "now", 0) + assert result == "sasa hivi" + + def test_format_relative_past(self): + result = self.locale._format_relative("saa moja", "hour", 1) + assert result == "muda wa saa moja" + + def test_format_relative_future(self): + result = self.locale._format_relative("saa moja", "hour", -1) + assert result == "saa moja iliyopita" + + +@pytest.mark.usefixtures("lang_locale") +class TestKoreanLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "지금" + assert self.locale._format_timeframe("second", 1) == "1초" + assert self.locale._format_timeframe("seconds", 2) == "2초" + assert self.locale._format_timeframe("minute", 1) == "1분" + assert self.locale._format_timeframe("minutes", 2) == "2분" + assert self.locale._format_timeframe("hour", 1) == "한시간" + assert self.locale._format_timeframe("hours", 2) == "2시간" + assert self.locale._format_timeframe("day", 1) == "하루" + assert self.locale._format_timeframe("days", 2) == "2일" + assert self.locale._format_timeframe("week", 1) == "1주" + assert self.locale._format_timeframe("weeks", 2) == "2주" + assert self.locale._format_timeframe("month", 1) == "한달" + assert self.locale._format_timeframe("months", 2) == "2개월" + assert self.locale._format_timeframe("year", 1) == "1년" + assert self.locale._format_timeframe("years", 2) == "2년" + + def test_format_relative(self): + assert self.locale._format_relative("지금", "now", 0) == "지금" + + assert self.locale._format_relative("1초", "second", 1) == "1초 후" + assert self.locale._format_relative("2초", "seconds", 2) == "2초 후" + assert self.locale._format_relative("1분", "minute", 1) == "1분 후" + assert self.locale._format_relative("2분", "minutes", 2) == "2분 후" + assert self.locale._format_relative("한시간", "hour", 1) == "한시간 후" + assert self.locale._format_relative("2시간", "hours", 2) == "2시간 후" + assert self.locale._format_relative("하루", "day", 1) == "내일" + assert self.locale._format_relative("2일", "days", 2) == "모레" + assert self.locale._format_relative("3일", "days", 3) == "글피" + assert self.locale._format_relative("4일", "days", 4) == "그글피" + assert self.locale._format_relative("5일", "days", 5) == "5일 후" + assert self.locale._format_relative("1주", "week", 1) == "1주 후" + assert self.locale._format_relative("2주", "weeks", 2) == "2주 후" + assert self.locale._format_relative("한달", "month", 1) == "한달 후" + assert self.locale._format_relative("2개월", "months", 2) == "2개월 후" + assert self.locale._format_relative("1년", "year", 1) == "내년" + assert self.locale._format_relative("2년", "years", 2) == "내후년" + assert self.locale._format_relative("3년", "years", 3) == "3년 후" + + assert self.locale._format_relative("1초", "second", -1) == "1초 전" + assert self.locale._format_relative("2초", "seconds", -2) == "2초 전" + assert self.locale._format_relative("1분", "minute", -1) == "1분 전" + assert self.locale._format_relative("2분", "minutes", -2) == "2분 전" + assert self.locale._format_relative("한시간", "hour", -1) == "한시간 전" + assert self.locale._format_relative("2시간", "hours", -2) == "2시간 전" + assert self.locale._format_relative("하루", "day", -1) == "어제" + assert self.locale._format_relative("2일", "days", -2) == "그제" + assert self.locale._format_relative("3일", "days", -3) == "그끄제" + assert self.locale._format_relative("4일", "days", -4) == "4일 전" + assert self.locale._format_relative("1주", "week", -1) == "1주 전" + assert self.locale._format_relative("2주", "weeks", -2) == "2주 전" + assert self.locale._format_relative("한달", "month", -1) == "한달 전" + assert self.locale._format_relative("2개월", "months", -2) == "2개월 전" + assert self.locale._format_relative("1년", "year", -1) == "작년" + assert self.locale._format_relative("2년", "years", -2) == "제작년" + assert self.locale._format_relative("3년", "years", -3) == "3년 전" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(0) == "0번째" + assert self.locale.ordinal_number(1) == "첫번째" + assert self.locale.ordinal_number(2) == "두번째" + assert self.locale.ordinal_number(3) == "세번째" + assert self.locale.ordinal_number(4) == "네번째" + assert self.locale.ordinal_number(5) == "다섯번째" + assert self.locale.ordinal_number(6) == "여섯번째" + assert self.locale.ordinal_number(7) == "일곱번째" + assert self.locale.ordinal_number(8) == "여덟번째" + assert self.locale.ordinal_number(9) == "아홉번째" + assert self.locale.ordinal_number(10) == "열번째" + assert self.locale.ordinal_number(11) == "11번째" + assert self.locale.ordinal_number(12) == "12번째" + assert self.locale.ordinal_number(100) == "100번째" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py new file mode 100644 index 00000000000..9fb4e68f3cf --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py @@ -0,0 +1,1657 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import calendar +import os +import time +from datetime import datetime + +import pytest +from dateutil import tz + +import arrow +from arrow import formatter, parser +from arrow.constants import MAX_TIMESTAMP_US +from arrow.parser import DateTimeParser, ParserError, ParserMatchError + +from .utils import make_full_tz_list + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParser: + def test_parse_multiformat(self, mocker): + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_a", + side_effect=parser.ParserError, + ) + + with pytest.raises(parser.ParserError): + self.parser._parse_multiformat("str", ["fmt_a"]) + + mock_datetime = mocker.Mock() + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_b", + return_value=mock_datetime, + ) + + result = self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) + assert result == mock_datetime + + def test_parse_multiformat_all_fail(self, mocker): + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_a", + side_effect=parser.ParserError, + ) + + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_b", + side_effect=parser.ParserError, + ) + + with pytest.raises(parser.ParserError): + self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) + + def test_parse_multiformat_unself_expected_fail(self, mocker): + class UnselfExpectedError(Exception): + pass + + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_a", + side_effect=UnselfExpectedError, + ) + + with pytest.raises(UnselfExpectedError): + self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) + + def test_parse_token_nonsense(self): + parts = {} + self.parser._parse_token("NONSENSE", "1900", parts) + assert parts == {} + + def test_parse_token_invalid_meridians(self): + parts = {} + self.parser._parse_token("A", "a..m", parts) + assert parts == {} + self.parser._parse_token("a", "p..m", parts) + assert parts == {} + + def test_parser_no_caching(self, mocker): + + mocked_parser = mocker.patch( + "arrow.parser.DateTimeParser._generate_pattern_re", fmt="fmt_a" + ) + self.parser = parser.DateTimeParser(cache_size=0) + for _ in range(100): + self.parser._generate_pattern_re("fmt_a") + assert mocked_parser.call_count == 100 + + def test_parser_1_line_caching(self, mocker): + mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") + self.parser = parser.DateTimeParser(cache_size=1) + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + assert mocked_parser.call_count == 1 + assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_b") + assert mocked_parser.call_count == 2 + assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + assert mocked_parser.call_count == 3 + assert mocked_parser.call_args_list[2] == mocker.call(fmt="fmt_a") + + def test_parser_multiple_line_caching(self, mocker): + mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") + self.parser = parser.DateTimeParser(cache_size=2) + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + assert mocked_parser.call_count == 1 + assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_b") + assert mocked_parser.call_count == 2 + assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") + + # fmt_a and fmt_b are in the cache, so no new calls should be made + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_b") + assert mocked_parser.call_count == 2 + assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") + assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") + + def test_YY_and_YYYY_format_list(self): + + assert self.parser.parse("15/01/19", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( + 2019, 1, 15 + ) + + # Regression test for issue #580 + assert self.parser.parse("15/01/2019", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( + 2019, 1, 15 + ) + + assert ( + self.parser.parse( + "15/01/2019T04:05:06.789120Z", + ["D/M/YYThh:mm:ss.SZ", "D/M/YYYYThh:mm:ss.SZ"], + ) + == datetime(2019, 1, 15, 4, 5, 6, 789120, tzinfo=tz.tzutc()) + ) + + # regression test for issue #447 + def test_timestamp_format_list(self): + # should not match on the "X" token + assert ( + self.parser.parse( + "15 Jul 2000", + ["MM/DD/YYYY", "YYYY-MM-DD", "X", "DD-MMMM-YYYY", "D MMM YYYY"], + ) + == datetime(2000, 7, 15) + ) + + with pytest.raises(ParserError): + self.parser.parse("15 Jul", "X") + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserParse: + def test_parse_list(self, mocker): + + mocker.patch( + "arrow.parser.DateTimeParser._parse_multiformat", + string="str", + formats=["fmt_a", "fmt_b"], + return_value="result", + ) + + result = self.parser.parse("str", ["fmt_a", "fmt_b"]) + assert result == "result" + + def test_parse_unrecognized_token(self, mocker): + + mocker.patch.dict("arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP") + del arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP["YYYY"] + + # need to make another local parser to apply patch changes + _parser = parser.DateTimeParser() + with pytest.raises(parser.ParserError): + _parser.parse("2013-01-01", "YYYY-MM-DD") + + def test_parse_parse_no_match(self): + + with pytest.raises(ParserError): + self.parser.parse("01-01", "YYYY-MM-DD") + + def test_parse_separators(self): + + with pytest.raises(ParserError): + self.parser.parse("1403549231", "YYYY-MM-DD") + + def test_parse_numbers(self): + + self.expected = datetime(2012, 1, 1, 12, 5, 10) + assert ( + self.parser.parse("2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss") + == self.expected + ) + + def test_parse_year_two_digit(self): + + self.expected = datetime(1979, 1, 1, 12, 5, 10) + assert ( + self.parser.parse("79-01-01 12:05:10", "YY-MM-DD HH:mm:ss") == self.expected + ) + + def test_parse_timestamp(self): + + tz_utc = tz.tzutc() + int_timestamp = int(time.time()) + self.expected = datetime.fromtimestamp(int_timestamp, tz=tz_utc) + assert self.parser.parse("{:d}".format(int_timestamp), "X") == self.expected + + float_timestamp = time.time() + self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) + assert self.parser.parse("{:f}".format(float_timestamp), "X") == self.expected + + # test handling of ns timestamp (arrow will round to 6 digits regardless) + self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:f}123".format(float_timestamp), "X") == self.expected + ) + + # test ps timestamp (arrow will round to 6 digits regardless) + self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:f}123456".format(float_timestamp), "X") + == self.expected + ) + + # NOTE: negative timestamps cannot be handled by datetime on Window + # Must use timedelta to handle them. ref: https://stackoverflow.com/questions/36179914 + if os.name != "nt": + # regression test for issue #662 + negative_int_timestamp = -int_timestamp + self.expected = datetime.fromtimestamp(negative_int_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:d}".format(negative_int_timestamp), "X") + == self.expected + ) + + negative_float_timestamp = -float_timestamp + self.expected = datetime.fromtimestamp(negative_float_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:f}".format(negative_float_timestamp), "X") + == self.expected + ) + + # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will + # break cases like "15 Jul 2000" and a format list (see issue #447) + with pytest.raises(ParserError): + natural_lang_string = "Meet me at {} at the restaurant.".format( + float_timestamp + ) + self.parser.parse(natural_lang_string, "X") + + with pytest.raises(ParserError): + self.parser.parse("1565982019.", "X") + + with pytest.raises(ParserError): + self.parser.parse(".1565982019", "X") + + def test_parse_expanded_timestamp(self): + # test expanded timestamps that include milliseconds + # and microseconds as multiples rather than decimals + # requested in issue #357 + + tz_utc = tz.tzutc() + timestamp = 1569982581.413132 + timestamp_milli = int(round(timestamp * 1000)) + timestamp_micro = int(round(timestamp * 1000000)) + + # "x" token should parse integer timestamps below MAX_TIMESTAMP normally + self.expected = datetime.fromtimestamp(int(timestamp), tz=tz_utc) + assert self.parser.parse("{:d}".format(int(timestamp)), "x") == self.expected + + self.expected = datetime.fromtimestamp(round(timestamp, 3), tz=tz_utc) + assert self.parser.parse("{:d}".format(timestamp_milli), "x") == self.expected + + self.expected = datetime.fromtimestamp(timestamp, tz=tz_utc) + assert self.parser.parse("{:d}".format(timestamp_micro), "x") == self.expected + + # anything above max µs timestamp should fail + with pytest.raises(ValueError): + self.parser.parse("{:d}".format(int(MAX_TIMESTAMP_US) + 1), "x") + + # floats are not allowed with the "x" token + with pytest.raises(ParserMatchError): + self.parser.parse("{:f}".format(timestamp), "x") + + def test_parse_names(self): + + self.expected = datetime(2012, 1, 1) + + assert self.parser.parse("January 1, 2012", "MMMM D, YYYY") == self.expected + assert self.parser.parse("Jan 1, 2012", "MMM D, YYYY") == self.expected + + def test_parse_pm(self): + + self.expected = datetime(1, 1, 1, 13, 0, 0) + assert self.parser.parse("1 pm", "H a") == self.expected + assert self.parser.parse("1 pm", "h a") == self.expected + + self.expected = datetime(1, 1, 1, 1, 0, 0) + assert self.parser.parse("1 am", "H A") == self.expected + assert self.parser.parse("1 am", "h A") == self.expected + + self.expected = datetime(1, 1, 1, 0, 0, 0) + assert self.parser.parse("12 am", "H A") == self.expected + assert self.parser.parse("12 am", "h A") == self.expected + + self.expected = datetime(1, 1, 1, 12, 0, 0) + assert self.parser.parse("12 pm", "H A") == self.expected + assert self.parser.parse("12 pm", "h A") == self.expected + + def test_parse_tz_hours_only(self): + + self.expected = datetime(2025, 10, 17, 5, 30, 10, tzinfo=tz.tzoffset(None, 0)) + parsed = self.parser.parse("2025-10-17 05:30:10+00", "YYYY-MM-DD HH:mm:ssZ") + assert parsed == self.expected + + def test_parse_tz_zz(self): + + self.expected = datetime(2013, 1, 1, tzinfo=tz.tzoffset(None, -7 * 3600)) + assert self.parser.parse("2013-01-01 -07:00", "YYYY-MM-DD ZZ") == self.expected + + @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) + def test_parse_tz_name_zzz(self, full_tz_name): + + self.expected = datetime(2013, 1, 1, tzinfo=tz.gettz(full_tz_name)) + assert ( + self.parser.parse("2013-01-01 {}".format(full_tz_name), "YYYY-MM-DD ZZZ") + == self.expected + ) + + # note that offsets are not timezones + with pytest.raises(ParserError): + self.parser.parse("2013-01-01 12:30:45.9+1000", "YYYY-MM-DDZZZ") + + with pytest.raises(ParserError): + self.parser.parse("2013-01-01 12:30:45.9+10:00", "YYYY-MM-DDZZZ") + + with pytest.raises(ParserError): + self.parser.parse("2013-01-01 12:30:45.9-10", "YYYY-MM-DDZZZ") + + def test_parse_subsecond(self): + self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) + assert ( + self.parser.parse("2013-01-01 12:30:45.9", "YYYY-MM-DD HH:mm:ss.S") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) + assert ( + self.parser.parse("2013-01-01 12:30:45.98", "YYYY-MM-DD HH:mm:ss.SS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) + assert ( + self.parser.parse("2013-01-01 12:30:45.987", "YYYY-MM-DD HH:mm:ss.SSS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) + assert ( + self.parser.parse("2013-01-01 12:30:45.9876", "YYYY-MM-DD HH:mm:ss.SSSS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) + assert ( + self.parser.parse("2013-01-01 12:30:45.98765", "YYYY-MM-DD HH:mm:ss.SSSSS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + assert ( + self.parser.parse( + "2013-01-01 12:30:45.987654", "YYYY-MM-DD HH:mm:ss.SSSSSS" + ) + == self.expected + ) + + def test_parse_subsecond_rounding(self): + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + datetime_format = "YYYY-MM-DD HH:mm:ss.S" + + # round up + string = "2013-01-01 12:30:45.9876539" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # round down + string = "2013-01-01 12:30:45.98765432" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # round half-up + string = "2013-01-01 12:30:45.987653521" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # round half-down + string = "2013-01-01 12:30:45.9876545210" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # overflow (zero out the subseconds and increment the seconds) + # regression tests for issue #636 + def test_parse_subsecond_rounding_overflow(self): + datetime_format = "YYYY-MM-DD HH:mm:ss.S" + + self.expected = datetime(2013, 1, 1, 12, 30, 46) + string = "2013-01-01 12:30:45.9999995" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + self.expected = datetime(2013, 1, 1, 12, 31, 0) + string = "2013-01-01 12:30:59.9999999" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + self.expected = datetime(2013, 1, 2, 0, 0, 0) + string = "2013-01-01 23:59:59.9999999" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # 6 digits should remain unrounded + self.expected = datetime(2013, 1, 1, 12, 30, 45, 999999) + string = "2013-01-01 12:30:45.999999" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # Regression tests for issue #560 + def test_parse_long_year(self): + with pytest.raises(ParserError): + self.parser.parse("09 January 123456789101112", "DD MMMM YYYY") + + with pytest.raises(ParserError): + self.parser.parse("123456789101112 09 January", "YYYY DD MMMM") + + with pytest.raises(ParserError): + self.parser.parse("68096653015/01/19", "YY/M/DD") + + def test_parse_with_extra_words_at_start_and_end_invalid(self): + input_format_pairs = [ + ("blah2016", "YYYY"), + ("blah2016blah", "YYYY"), + ("2016blah", "YYYY"), + ("2016-05blah", "YYYY-MM"), + ("2016-05-16blah", "YYYY-MM-DD"), + ("2016-05-16T04:05:06.789120blah", "YYYY-MM-DDThh:mm:ss.S"), + ("2016-05-16T04:05:06.789120ZblahZ", "YYYY-MM-DDThh:mm:ss.SZ"), + ("2016-05-16T04:05:06.789120Zblah", "YYYY-MM-DDThh:mm:ss.SZ"), + ("2016-05-16T04:05:06.789120blahZ", "YYYY-MM-DDThh:mm:ss.SZ"), + ] + + for pair in input_format_pairs: + with pytest.raises(ParserError): + self.parser.parse(pair[0], pair[1]) + + def test_parse_with_extra_words_at_start_and_end_valid(self): + # Spaces surrounding the parsable date are ok because we + # allow the parsing of natural language input. Additionally, a single + # character of specific punctuation before or after the date is okay. + # See docs for full list of valid punctuation. + + assert self.parser.parse("blah 2016 blah", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse("blah 2016", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse("2016 blah", "YYYY") == datetime(2016, 1, 1) + + # test one additional space along with space divider + assert self.parser.parse( + "blah 2016-05-16 04:05:06.789120", "YYYY-MM-DD hh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert self.parser.parse( + "2016-05-16 04:05:06.789120 blah", "YYYY-MM-DD hh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + # test one additional space along with T divider + assert self.parser.parse( + "blah 2016-05-16T04:05:06.789120", "YYYY-MM-DDThh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert self.parser.parse( + "2016-05-16T04:05:06.789120 blah", "YYYY-MM-DDThh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert ( + self.parser.parse( + "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", + "YYYY-MM-DDThh:mm:ss.S", + ) + == datetime(2016, 5, 16, 4, 5, 6, 789120) + ) + + assert ( + self.parser.parse( + "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", + "YYYY-MM-DD hh:mm:ss.S", + ) + == datetime(2016, 5, 16, 4, 5, 6, 789120) + ) + + # regression test for issue #701 + # tests cases of a partial match surrounded by punctuation + # for the list of valid punctuation, see documentation + def test_parse_with_punctuation_fences(self): + assert self.parser.parse( + "Meet me at my house on Halloween (2019-31-10)", "YYYY-DD-MM" + ) == datetime(2019, 10, 31) + + assert self.parser.parse( + "Monday, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY" + ) == datetime(2019, 9, 9) + + assert self.parser.parse("A date is 11.11.2011.", "DD.MM.YYYY") == datetime( + 2011, 11, 11 + ) + + with pytest.raises(ParserMatchError): + self.parser.parse("11.11.2011.1 is not a valid date.", "DD.MM.YYYY") + + with pytest.raises(ParserMatchError): + self.parser.parse( + "This date has too many punctuation marks following it (11.11.2011).", + "DD.MM.YYYY", + ) + + def test_parse_with_leading_and_trailing_whitespace(self): + assert self.parser.parse(" 2016", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse("2016 ", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse(" 2016 ", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse( + " 2016-05-16 04:05:06.789120 ", "YYYY-MM-DD hh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert self.parser.parse( + " 2016-05-16T04:05:06.789120 ", "YYYY-MM-DDThh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + def test_parse_YYYY_DDDD(self): + assert self.parser.parse("1998-136", "YYYY-DDDD") == datetime(1998, 5, 16) + + assert self.parser.parse("1998-006", "YYYY-DDDD") == datetime(1998, 1, 6) + + with pytest.raises(ParserError): + self.parser.parse("1998-456", "YYYY-DDDD") + + def test_parse_YYYY_DDD(self): + assert self.parser.parse("1998-6", "YYYY-DDD") == datetime(1998, 1, 6) + + assert self.parser.parse("1998-136", "YYYY-DDD") == datetime(1998, 5, 16) + + with pytest.raises(ParserError): + self.parser.parse("1998-756", "YYYY-DDD") + + # month cannot be passed with DDD and DDDD tokens + def test_parse_YYYY_MM_DDDD(self): + with pytest.raises(ParserError): + self.parser.parse("2015-01-009", "YYYY-MM-DDDD") + + # year is required with the DDD and DDDD tokens + def test_parse_DDD_only(self): + with pytest.raises(ParserError): + self.parser.parse("5", "DDD") + + def test_parse_DDDD_only(self): + with pytest.raises(ParserError): + self.parser.parse("145", "DDDD") + + def test_parse_ddd_and_dddd(self): + fr_parser = parser.DateTimeParser("fr") + + # Day of week should be ignored when a day is passed + # 2019-10-17 is a Thursday, so we know day of week + # is ignored if the same date is outputted + expected = datetime(2019, 10, 17) + assert self.parser.parse("Tue 2019-10-17", "ddd YYYY-MM-DD") == expected + assert fr_parser.parse("mar 2019-10-17", "ddd YYYY-MM-DD") == expected + assert self.parser.parse("Tuesday 2019-10-17", "dddd YYYY-MM-DD") == expected + assert fr_parser.parse("mardi 2019-10-17", "dddd YYYY-MM-DD") == expected + + # Get first Tuesday after epoch + expected = datetime(1970, 1, 6) + assert self.parser.parse("Tue", "ddd") == expected + assert fr_parser.parse("mar", "ddd") == expected + assert self.parser.parse("Tuesday", "dddd") == expected + assert fr_parser.parse("mardi", "dddd") == expected + + # Get first Tuesday in 2020 + expected = datetime(2020, 1, 7) + assert self.parser.parse("Tue 2020", "ddd YYYY") == expected + assert fr_parser.parse("mar 2020", "ddd YYYY") == expected + assert self.parser.parse("Tuesday 2020", "dddd YYYY") == expected + assert fr_parser.parse("mardi 2020", "dddd YYYY") == expected + + # Get first Tuesday in February 2020 + expected = datetime(2020, 2, 4) + assert self.parser.parse("Tue 02 2020", "ddd MM YYYY") == expected + assert fr_parser.parse("mar 02 2020", "ddd MM YYYY") == expected + assert self.parser.parse("Tuesday 02 2020", "dddd MM YYYY") == expected + assert fr_parser.parse("mardi 02 2020", "dddd MM YYYY") == expected + + # Get first Tuesday in February after epoch + expected = datetime(1970, 2, 3) + assert self.parser.parse("Tue 02", "ddd MM") == expected + assert fr_parser.parse("mar 02", "ddd MM") == expected + assert self.parser.parse("Tuesday 02", "dddd MM") == expected + assert fr_parser.parse("mardi 02", "dddd MM") == expected + + # Times remain intact + expected = datetime(2020, 2, 4, 10, 25, 54, 123456, tz.tzoffset(None, -3600)) + assert ( + self.parser.parse( + "Tue 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + assert ( + fr_parser.parse( + "mar 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + assert ( + self.parser.parse( + "Tuesday 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + assert ( + fr_parser.parse( + "mardi 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + + def test_parse_ddd_and_dddd_ignore_case(self): + # Regression test for issue #851 + expected = datetime(2019, 6, 24) + assert ( + self.parser.parse("MONDAY, June 24, 2019", "dddd, MMMM DD, YYYY") + == expected + ) + + def test_parse_ddd_and_dddd_then_format(self): + # Regression test for issue #446 + arw_formatter = formatter.DateTimeFormatter() + assert arw_formatter.format(self.parser.parse("Mon", "ddd"), "ddd") == "Mon" + assert ( + arw_formatter.format(self.parser.parse("Monday", "dddd"), "dddd") + == "Monday" + ) + assert arw_formatter.format(self.parser.parse("Tue", "ddd"), "ddd") == "Tue" + assert ( + arw_formatter.format(self.parser.parse("Tuesday", "dddd"), "dddd") + == "Tuesday" + ) + assert arw_formatter.format(self.parser.parse("Wed", "ddd"), "ddd") == "Wed" + assert ( + arw_formatter.format(self.parser.parse("Wednesday", "dddd"), "dddd") + == "Wednesday" + ) + assert arw_formatter.format(self.parser.parse("Thu", "ddd"), "ddd") == "Thu" + assert ( + arw_formatter.format(self.parser.parse("Thursday", "dddd"), "dddd") + == "Thursday" + ) + assert arw_formatter.format(self.parser.parse("Fri", "ddd"), "ddd") == "Fri" + assert ( + arw_formatter.format(self.parser.parse("Friday", "dddd"), "dddd") + == "Friday" + ) + assert arw_formatter.format(self.parser.parse("Sat", "ddd"), "ddd") == "Sat" + assert ( + arw_formatter.format(self.parser.parse("Saturday", "dddd"), "dddd") + == "Saturday" + ) + assert arw_formatter.format(self.parser.parse("Sun", "ddd"), "ddd") == "Sun" + assert ( + arw_formatter.format(self.parser.parse("Sunday", "dddd"), "dddd") + == "Sunday" + ) + + def test_parse_HH_24(self): + assert self.parser.parse( + "2019-10-30T24:00:00", "YYYY-MM-DDTHH:mm:ss" + ) == datetime(2019, 10, 31, 0, 0, 0, 0) + assert self.parser.parse("2019-10-30T24:00", "YYYY-MM-DDTHH:mm") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse("2019-10-30T24", "YYYY-MM-DDTHH") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse( + "2019-10-30T24:00:00.0", "YYYY-MM-DDTHH:mm:ss.S" + ) == datetime(2019, 10, 31, 0, 0, 0, 0) + assert self.parser.parse( + "2019-10-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" + ) == datetime(2019, 11, 1, 0, 0, 0, 0) + assert self.parser.parse( + "2019-12-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" + ) == datetime(2020, 1, 1, 0, 0, 0, 0) + assert self.parser.parse( + "2019-12-31T23:59:59.9999999", "YYYY-MM-DDTHH:mm:ss.S" + ) == datetime(2020, 1, 1, 0, 0, 0, 0) + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:01:00", "YYYY-MM-DDTHH:mm:ss") + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:00:01", "YYYY-MM-DDTHH:mm:ss") + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:00:00.1", "YYYY-MM-DDTHH:mm:ss.S") + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:00:00.999999", "YYYY-MM-DDTHH:mm:ss.S") + + def test_parse_W(self): + + assert self.parser.parse("2011-W05-4", "W") == datetime(2011, 2, 3) + assert self.parser.parse("2011W054", "W") == datetime(2011, 2, 3) + assert self.parser.parse("2011-W05", "W") == datetime(2011, 1, 31) + assert self.parser.parse("2011W05", "W") == datetime(2011, 1, 31) + assert self.parser.parse("2011-W05-4T14:17:01", "WTHH:mm:ss") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + assert self.parser.parse("2011W054T14:17:01", "WTHH:mm:ss") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + assert self.parser.parse("2011-W05T14:17:01", "WTHH:mm:ss") == datetime( + 2011, 1, 31, 14, 17, 1 + ) + assert self.parser.parse("2011W05T141701", "WTHHmmss") == datetime( + 2011, 1, 31, 14, 17, 1 + ) + assert self.parser.parse("2011W054T141701", "WTHHmmss") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + + bad_formats = [ + "201W22", + "1995-W1-4", + "2001-W34-90", + "2001--W34", + "2011-W03--3", + "thstrdjtrsrd676776r65", + "2002-W66-1T14:17:01", + "2002-W23-03T14:17:01", + ] + + for fmt in bad_formats: + with pytest.raises(ParserError): + self.parser.parse(fmt, "W") + + def test_parse_normalize_whitespace(self): + assert self.parser.parse( + "Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True + ) == datetime(2005, 6, 1, 13, 33) + + with pytest.raises(ParserError): + self.parser.parse("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA") + + assert ( + self.parser.parse( + "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", + "YYYY-MM-DD T HH:mm:ss S", + normalize_whitespace=True, + ) + == datetime(2013, 5, 5, 12, 30, 45, 123456) + ) + + with pytest.raises(ParserError): + self.parser.parse( + "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", + "YYYY-MM-DD T HH:mm:ss S", + ) + + assert self.parser.parse( + " \n Jun 1\t 2005\n ", "MMM D YYYY", normalize_whitespace=True + ) == datetime(2005, 6, 1) + + with pytest.raises(ParserError): + self.parser.parse(" \n Jun 1\t 2005\n ", "MMM D YYYY") + + +@pytest.mark.usefixtures("dt_parser_regex") +class TestDateTimeParserRegex: + def test_format_year(self): + + assert self.format_regex.findall("YYYY-YY") == ["YYYY", "YY"] + + def test_format_month(self): + + assert self.format_regex.findall("MMMM-MMM-MM-M") == ["MMMM", "MMM", "MM", "M"] + + def test_format_day(self): + + assert self.format_regex.findall("DDDD-DDD-DD-D") == ["DDDD", "DDD", "DD", "D"] + + def test_format_hour(self): + + assert self.format_regex.findall("HH-H-hh-h") == ["HH", "H", "hh", "h"] + + def test_format_minute(self): + + assert self.format_regex.findall("mm-m") == ["mm", "m"] + + def test_format_second(self): + + assert self.format_regex.findall("ss-s") == ["ss", "s"] + + def test_format_subsecond(self): + + assert self.format_regex.findall("SSSSSS-SSSSS-SSSS-SSS-SS-S") == [ + "SSSSSS", + "SSSSS", + "SSSS", + "SSS", + "SS", + "S", + ] + + def test_format_tz(self): + + assert self.format_regex.findall("ZZZ-ZZ-Z") == ["ZZZ", "ZZ", "Z"] + + def test_format_am_pm(self): + + assert self.format_regex.findall("A-a") == ["A", "a"] + + def test_format_timestamp(self): + + assert self.format_regex.findall("X") == ["X"] + + def test_format_timestamp_milli(self): + + assert self.format_regex.findall("x") == ["x"] + + def test_escape(self): + + escape_regex = parser.DateTimeParser._ESCAPE_RE + + assert escape_regex.findall("2018-03-09 8 [h] 40 [hello]") == ["[h]", "[hello]"] + + def test_month_names(self): + p = parser.DateTimeParser("en_us") + + text = "_".join(calendar.month_name[1:]) + + result = p._input_re_map["MMMM"].findall(text) + + assert result == calendar.month_name[1:] + + def test_month_abbreviations(self): + p = parser.DateTimeParser("en_us") + + text = "_".join(calendar.month_abbr[1:]) + + result = p._input_re_map["MMM"].findall(text) + + assert result == calendar.month_abbr[1:] + + def test_digits(self): + + assert parser.DateTimeParser._ONE_OR_TWO_DIGIT_RE.findall("4-56") == ["4", "56"] + assert parser.DateTimeParser._ONE_OR_TWO_OR_THREE_DIGIT_RE.findall( + "4-56-789" + ) == ["4", "56", "789"] + assert parser.DateTimeParser._ONE_OR_MORE_DIGIT_RE.findall( + "4-56-789-1234-12345" + ) == ["4", "56", "789", "1234", "12345"] + assert parser.DateTimeParser._TWO_DIGIT_RE.findall("12-3-45") == ["12", "45"] + assert parser.DateTimeParser._THREE_DIGIT_RE.findall("123-4-56") == ["123"] + assert parser.DateTimeParser._FOUR_DIGIT_RE.findall("1234-56") == ["1234"] + + def test_tz(self): + tz_z_re = parser.DateTimeParser._TZ_Z_RE + assert tz_z_re.findall("-0700") == [("-", "07", "00")] + assert tz_z_re.findall("+07") == [("+", "07", "")] + assert tz_z_re.search("15/01/2019T04:05:06.789120Z") is not None + assert tz_z_re.search("15/01/2019T04:05:06.789120") is None + + tz_zz_re = parser.DateTimeParser._TZ_ZZ_RE + assert tz_zz_re.findall("-07:00") == [("-", "07", "00")] + assert tz_zz_re.findall("+07") == [("+", "07", "")] + assert tz_zz_re.search("15/01/2019T04:05:06.789120Z") is not None + assert tz_zz_re.search("15/01/2019T04:05:06.789120") is None + + tz_name_re = parser.DateTimeParser._TZ_NAME_RE + assert tz_name_re.findall("Europe/Warsaw") == ["Europe/Warsaw"] + assert tz_name_re.findall("GMT") == ["GMT"] + + def test_timestamp(self): + timestamp_re = parser.DateTimeParser._TIMESTAMP_RE + assert timestamp_re.findall("1565707550.452729") == ["1565707550.452729"] + assert timestamp_re.findall("-1565707550.452729") == ["-1565707550.452729"] + assert timestamp_re.findall("-1565707550") == ["-1565707550"] + assert timestamp_re.findall("1565707550") == ["1565707550"] + assert timestamp_re.findall("1565707550.") == [] + assert timestamp_re.findall(".1565707550") == [] + + def test_timestamp_milli(self): + timestamp_expanded_re = parser.DateTimeParser._TIMESTAMP_EXPANDED_RE + assert timestamp_expanded_re.findall("-1565707550") == ["-1565707550"] + assert timestamp_expanded_re.findall("1565707550") == ["1565707550"] + assert timestamp_expanded_re.findall("1565707550.452729") == [] + assert timestamp_expanded_re.findall("1565707550.") == [] + assert timestamp_expanded_re.findall(".1565707550") == [] + + def test_time(self): + time_re = parser.DateTimeParser._TIME_RE + time_seperators = [":", ""] + + for sep in time_seperators: + assert time_re.findall("12") == [("12", "", "", "", "")] + assert time_re.findall("12{sep}35".format(sep=sep)) == [ + ("12", "35", "", "", "") + ] + assert time_re.findall("12{sep}35{sep}46".format(sep=sep)) == [ + ("12", "35", "46", "", "") + ] + assert time_re.findall("12{sep}35{sep}46.952313".format(sep=sep)) == [ + ("12", "35", "46", ".", "952313") + ] + assert time_re.findall("12{sep}35{sep}46,952313".format(sep=sep)) == [ + ("12", "35", "46", ",", "952313") + ] + + assert time_re.findall("12:") == [] + assert time_re.findall("12:35:46.") == [] + assert time_re.findall("12:35:46,") == [] + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserISO: + def test_YYYY(self): + + assert self.parser.parse_iso("2013") == datetime(2013, 1, 1) + + def test_YYYY_DDDD(self): + assert self.parser.parse_iso("1998-136") == datetime(1998, 5, 16) + + assert self.parser.parse_iso("1998-006") == datetime(1998, 1, 6) + + with pytest.raises(ParserError): + self.parser.parse_iso("1998-456") + + # 2016 is a leap year, so Feb 29 exists (leap day) + assert self.parser.parse_iso("2016-059") == datetime(2016, 2, 28) + assert self.parser.parse_iso("2016-060") == datetime(2016, 2, 29) + assert self.parser.parse_iso("2016-061") == datetime(2016, 3, 1) + + # 2017 is not a leap year, so Feb 29 does not exist + assert self.parser.parse_iso("2017-059") == datetime(2017, 2, 28) + assert self.parser.parse_iso("2017-060") == datetime(2017, 3, 1) + assert self.parser.parse_iso("2017-061") == datetime(2017, 3, 2) + + # Since 2016 is a leap year, the 366th day falls in the same year + assert self.parser.parse_iso("2016-366") == datetime(2016, 12, 31) + + # Since 2017 is not a leap year, the 366th day falls in the next year + assert self.parser.parse_iso("2017-366") == datetime(2018, 1, 1) + + def test_YYYY_DDDD_HH_mm_ssZ(self): + + assert self.parser.parse_iso("2013-036 04:05:06+01:00") == datetime( + 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-036 04:05:06Z") == datetime( + 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc() + ) + + def test_YYYY_MM_DDDD(self): + with pytest.raises(ParserError): + self.parser.parse_iso("2014-05-125") + + def test_YYYY_MM(self): + + for separator in DateTimeParser.SEPARATORS: + assert self.parser.parse_iso(separator.join(("2013", "02"))) == datetime( + 2013, 2, 1 + ) + + def test_YYYY_MM_DD(self): + + for separator in DateTimeParser.SEPARATORS: + assert self.parser.parse_iso( + separator.join(("2013", "02", "03")) + ) == datetime(2013, 2, 3) + + def test_YYYY_MM_DDTHH_mmZ(self): + + assert self.parser.parse_iso("2013-02-03T04:05+01:00") == datetime( + 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DDTHH_mm(self): + + assert self.parser.parse_iso("2013-02-03T04:05") == datetime(2013, 2, 3, 4, 5) + + def test_YYYY_MM_DDTHH(self): + + assert self.parser.parse_iso("2013-02-03T04") == datetime(2013, 2, 3, 4) + + def test_YYYY_MM_DDTHHZ(self): + + assert self.parser.parse_iso("2013-02-03T04+01:00") == datetime( + 2013, 2, 3, 4, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DDTHH_mm_ssZ(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DDTHH_mm_ss(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06") == datetime( + 2013, 2, 3, 4, 5, 6 + ) + + def test_YYYY_MM_DD_HH_mmZ(self): + + assert self.parser.parse_iso("2013-02-03 04:05+01:00") == datetime( + 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DD_HH_mm(self): + + assert self.parser.parse_iso("2013-02-03 04:05") == datetime(2013, 2, 3, 4, 5) + + def test_YYYY_MM_DD_HH(self): + + assert self.parser.parse_iso("2013-02-03 04") == datetime(2013, 2, 3, 4) + + def test_invalid_time(self): + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03 044") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03 04:05:06.") + + def test_YYYY_MM_DD_HH_mm_ssZ(self): + + assert self.parser.parse_iso("2013-02-03 04:05:06+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DD_HH_mm_ss(self): + + assert self.parser.parse_iso("2013-02-03 04:05:06") == datetime( + 2013, 2, 3, 4, 5, 6 + ) + + def test_YYYY_MM_DDTHH_mm_ss_S(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06.7") == datetime( + 2013, 2, 3, 4, 5, 6, 700000 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78") == datetime( + 2013, 2, 3, 4, 5, 6, 780000 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.789") == datetime( + 2013, 2, 3, 4, 5, 6, 789000 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.7891") == datetime( + 2013, 2, 3, 4, 5, 6, 789100 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78912") == datetime( + 2013, 2, 3, 4, 5, 6, 789120 + ) + + # ISO 8601:2004(E), ISO, 2004-12-01, 4.2.2.4 ... the decimal fraction + # shall be divided from the integer part by the decimal sign specified + # in ISO 31-0, i.e. the comma [,] or full stop [.]. Of these, the comma + # is the preferred sign. + assert self.parser.parse_iso("2013-02-03T04:05:06,789123678") == datetime( + 2013, 2, 3, 4, 5, 6, 789124 + ) + + # there is no limit on the number of decimal places + assert self.parser.parse_iso("2013-02-03T04:05:06.789123678") == datetime( + 2013, 2, 3, 4, 5, 6, 789124 + ) + + def test_YYYY_MM_DDTHH_mm_ss_SZ(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06.7+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 700000, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 780000, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.789+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 789000, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.7891+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 789100, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78912+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03 04:05:06.78912Z") == datetime( + 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzutc() + ) + + def test_W(self): + + assert self.parser.parse_iso("2011-W05-4") == datetime(2011, 2, 3) + + assert self.parser.parse_iso("2011-W05-4T14:17:01") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + + assert self.parser.parse_iso("2011W054") == datetime(2011, 2, 3) + + assert self.parser.parse_iso("2011W054T141701") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + + def test_invalid_Z(self): + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912z") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912zz") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912Zz") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912ZZ") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912+Z") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912-Z") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912 Z") + + def test_parse_subsecond(self): + self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) + assert self.parser.parse_iso("2013-01-01 12:30:45.9") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) + assert self.parser.parse_iso("2013-01-01 12:30:45.98") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) + assert self.parser.parse_iso("2013-01-01 12:30:45.987") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) + assert self.parser.parse_iso("2013-01-01 12:30:45.9876") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) + assert self.parser.parse_iso("2013-01-01 12:30:45.98765") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + assert self.parser.parse_iso("2013-01-01 12:30:45.987654") == self.expected + + # use comma as subsecond separator + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + assert self.parser.parse_iso("2013-01-01 12:30:45,987654") == self.expected + + def test_gnu_date(self): + """Regression tests for parsing output from GNU date.""" + # date -Ins + assert self.parser.parse_iso("2016-11-16T09:46:30,895636557-0800") == datetime( + 2016, 11, 16, 9, 46, 30, 895636, tzinfo=tz.tzoffset(None, -3600 * 8) + ) + + # date --rfc-3339=ns + assert self.parser.parse_iso("2016-11-16 09:51:14.682141526-08:00") == datetime( + 2016, 11, 16, 9, 51, 14, 682142, tzinfo=tz.tzoffset(None, -3600 * 8) + ) + + def test_isoformat(self): + + dt = datetime.utcnow() + + assert self.parser.parse_iso(dt.isoformat()) == dt + + def test_parse_iso_normalize_whitespace(self): + assert self.parser.parse_iso( + "2013-036 \t 04:05:06Z", normalize_whitespace=True + ) == datetime(2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc()) + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-036 \t 04:05:06Z") + + assert self.parser.parse_iso( + "\t 2013-05-05T12:30:45.123456 \t \n", normalize_whitespace=True + ) == datetime(2013, 5, 5, 12, 30, 45, 123456) + + with pytest.raises(ParserError): + self.parser.parse_iso("\t 2013-05-05T12:30:45.123456 \t \n") + + def test_parse_iso_with_leading_and_trailing_whitespace(self): + datetime_string = " 2016-11-15T06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = " 2016-11-15T06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = "2016-11-15T06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = "2016-11-15T 06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + # leading whitespace + datetime_string = " 2016-11-15 06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + # trailing whitespace + datetime_string = "2016-11-15 06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = " 2016-11-15 06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + # two dividing spaces + datetime_string = "2016-11-15 06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + def test_parse_iso_with_extra_words_at_start_and_end_invalid(self): + test_inputs = [ + "blah2016", + "blah2016blah", + "blah 2016 blah", + "blah 2016", + "2016 blah", + "blah 2016-05-16 04:05:06.789120", + "2016-05-16 04:05:06.789120 blah", + "blah 2016-05-16T04:05:06.789120", + "2016-05-16T04:05:06.789120 blah", + "2016blah", + "2016-05blah", + "2016-05-16blah", + "2016-05-16T04:05:06.789120blah", + "2016-05-16T04:05:06.789120ZblahZ", + "2016-05-16T04:05:06.789120Zblah", + "2016-05-16T04:05:06.789120blahZ", + "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", + "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", + ] + + for ti in test_inputs: + with pytest.raises(ParserError): + self.parser.parse_iso(ti) + + def test_iso8601_basic_format(self): + assert self.parser.parse_iso("20180517") == datetime(2018, 5, 17) + + assert self.parser.parse_iso("20180517T10") == datetime(2018, 5, 17, 10) + + assert self.parser.parse_iso("20180517T105513.843456") == datetime( + 2018, 5, 17, 10, 55, 13, 843456 + ) + + assert self.parser.parse_iso("20180517T105513Z") == datetime( + 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzutc() + ) + + assert self.parser.parse_iso("20180517T105513.843456-0700") == datetime( + 2018, 5, 17, 10, 55, 13, 843456, tzinfo=tz.tzoffset(None, -25200) + ) + + assert self.parser.parse_iso("20180517T105513-0700") == datetime( + 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) + ) + + assert self.parser.parse_iso("20180517T105513-07") == datetime( + 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) + ) + + # ordinal in basic format: YYYYDDDD + assert self.parser.parse_iso("1998136") == datetime(1998, 5, 16) + + # timezone requires +- seperator + with pytest.raises(ParserError): + self.parser.parse_iso("20180517T1055130700") + + with pytest.raises(ParserError): + self.parser.parse_iso("20180517T10551307") + + # too many digits in date + with pytest.raises(ParserError): + self.parser.parse_iso("201860517T105513Z") + + # too many digits in time + with pytest.raises(ParserError): + self.parser.parse_iso("20180517T1055213Z") + + def test_midnight_end_day(self): + assert self.parser.parse_iso("2019-10-30T24:00:00") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-10-30T24:00") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-10-30T24:00:00.0") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-10-31T24:00:00") == datetime( + 2019, 11, 1, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-12-31T24:00:00") == datetime( + 2020, 1, 1, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-12-31T23:59:59.9999999") == datetime( + 2020, 1, 1, 0, 0, 0, 0 + ) + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:01:00") + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:00:01") + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:00:00.1") + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:00:00.999999") + + +@pytest.mark.usefixtures("tzinfo_parser") +class TestTzinfoParser: + def test_parse_local(self): + + assert self.parser.parse("local") == tz.tzlocal() + + def test_parse_utc(self): + + assert self.parser.parse("utc") == tz.tzutc() + assert self.parser.parse("UTC") == tz.tzutc() + + def test_parse_iso(self): + + assert self.parser.parse("01:00") == tz.tzoffset(None, 3600) + assert self.parser.parse("11:35") == tz.tzoffset(None, 11 * 3600 + 2100) + assert self.parser.parse("+01:00") == tz.tzoffset(None, 3600) + assert self.parser.parse("-01:00") == tz.tzoffset(None, -3600) + + assert self.parser.parse("0100") == tz.tzoffset(None, 3600) + assert self.parser.parse("+0100") == tz.tzoffset(None, 3600) + assert self.parser.parse("-0100") == tz.tzoffset(None, -3600) + + assert self.parser.parse("01") == tz.tzoffset(None, 3600) + assert self.parser.parse("+01") == tz.tzoffset(None, 3600) + assert self.parser.parse("-01") == tz.tzoffset(None, -3600) + + def test_parse_str(self): + + assert self.parser.parse("US/Pacific") == tz.gettz("US/Pacific") + + def test_parse_fails(self): + + with pytest.raises(parser.ParserError): + self.parser.parse("fail") + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserMonthName: + def test_shortmonth_capitalized(self): + + assert self.parser.parse("2013-Jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + def test_shortmonth_allupper(self): + + assert self.parser.parse("2013-JAN-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + def test_shortmonth_alllower(self): + + assert self.parser.parse("2013-jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + def test_month_capitalized(self): + + assert self.parser.parse("2013-January-01", "YYYY-MMMM-DD") == datetime( + 2013, 1, 1 + ) + + def test_month_allupper(self): + + assert self.parser.parse("2013-JANUARY-01", "YYYY-MMMM-DD") == datetime( + 2013, 1, 1 + ) + + def test_month_alllower(self): + + assert self.parser.parse("2013-january-01", "YYYY-MMMM-DD") == datetime( + 2013, 1, 1 + ) + + def test_localized_month_name(self): + parser_ = parser.DateTimeParser("fr_fr") + + assert parser_.parse("2013-Janvier-01", "YYYY-MMMM-DD") == datetime(2013, 1, 1) + + def test_localized_month_abbreviation(self): + parser_ = parser.DateTimeParser("it_it") + + assert parser_.parse("2013-Gen-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserMeridians: + def test_meridians_lowercase(self): + assert self.parser.parse("2013-01-01 5am", "YYYY-MM-DD ha") == datetime( + 2013, 1, 1, 5 + ) + + assert self.parser.parse("2013-01-01 5pm", "YYYY-MM-DD ha") == datetime( + 2013, 1, 1, 17 + ) + + def test_meridians_capitalized(self): + assert self.parser.parse("2013-01-01 5AM", "YYYY-MM-DD hA") == datetime( + 2013, 1, 1, 5 + ) + + assert self.parser.parse("2013-01-01 5PM", "YYYY-MM-DD hA") == datetime( + 2013, 1, 1, 17 + ) + + def test_localized_meridians_lowercase(self): + parser_ = parser.DateTimeParser("hu_hu") + assert parser_.parse("2013-01-01 5 de", "YYYY-MM-DD h a") == datetime( + 2013, 1, 1, 5 + ) + + assert parser_.parse("2013-01-01 5 du", "YYYY-MM-DD h a") == datetime( + 2013, 1, 1, 17 + ) + + def test_localized_meridians_capitalized(self): + parser_ = parser.DateTimeParser("hu_hu") + assert parser_.parse("2013-01-01 5 DE", "YYYY-MM-DD h A") == datetime( + 2013, 1, 1, 5 + ) + + assert parser_.parse("2013-01-01 5 DU", "YYYY-MM-DD h A") == datetime( + 2013, 1, 1, 17 + ) + + # regression test for issue #607 + def test_es_meridians(self): + parser_ = parser.DateTimeParser("es") + + assert parser_.parse( + "Junio 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a" + ) == datetime(2019, 6, 30, 20, 0) + + with pytest.raises(ParserError): + parser_.parse( + "Junio 30, 2019 - 08:00 pasdfasdfm", "MMMM DD, YYYY - hh:mm a" + ) + + def test_fr_meridians(self): + parser_ = parser.DateTimeParser("fr") + + # the French locale always uses a 24 hour clock, so it does not support meridians + with pytest.raises(ParserError): + parser_.parse("Janvier 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a") + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserMonthOrdinalDay: + def test_english(self): + parser_ = parser.DateTimeParser("en_us") + + assert parser_.parse("January 1st, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 1 + ) + assert parser_.parse("January 2nd, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 2 + ) + assert parser_.parse("January 3rd, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 3 + ) + assert parser_.parse("January 4th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 4 + ) + assert parser_.parse("January 11th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 11 + ) + assert parser_.parse("January 12th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 12 + ) + assert parser_.parse("January 13th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 13 + ) + assert parser_.parse("January 21st, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 21 + ) + assert parser_.parse("January 31st, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 31 + ) + + with pytest.raises(ParserError): + parser_.parse("January 1th, 2013", "MMMM Do, YYYY") + + with pytest.raises(ParserError): + parser_.parse("January 11st, 2013", "MMMM Do, YYYY") + + def test_italian(self): + parser_ = parser.DateTimeParser("it_it") + + assert parser_.parse("Gennaio 1º, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 1 + ) + + def test_spanish(self): + parser_ = parser.DateTimeParser("es_es") + + assert parser_.parse("Enero 1º, 2013", "MMMM Do, YYYY") == datetime(2013, 1, 1) + + def test_french(self): + parser_ = parser.DateTimeParser("fr_fr") + + assert parser_.parse("Janvier 1er, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 1 + ) + + assert parser_.parse("Janvier 2e, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 2 + ) + + assert parser_.parse("Janvier 11e, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 11 + ) + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserSearchDate: + def test_parse_search(self): + + assert self.parser.parse( + "Today is 25 of September of 2003", "DD of MMMM of YYYY" + ) == datetime(2003, 9, 25) + + def test_parse_search_with_numbers(self): + + assert self.parser.parse( + "2000 people met the 2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss" + ) == datetime(2012, 1, 1, 12, 5, 10) + + assert self.parser.parse( + "Call 01-02-03 on 79-01-01 12:05:10", "YY-MM-DD HH:mm:ss" + ) == datetime(1979, 1, 1, 12, 5, 10) + + def test_parse_search_with_names(self): + + assert self.parser.parse("June was born in May 1980", "MMMM YYYY") == datetime( + 1980, 5, 1 + ) + + def test_parse_search_locale_with_names(self): + p = parser.DateTimeParser("sv_se") + + assert p.parse("Jan föddes den 31 Dec 1980", "DD MMM YYYY") == datetime( + 1980, 12, 31 + ) + + assert p.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") == datetime( + 1975, 8, 25 + ) + + def test_parse_search_fails(self): + + with pytest.raises(parser.ParserError): + self.parser.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") + + def test_escape(self): + + format = "MMMM D, YYYY [at] h:mma" + assert self.parser.parse( + "Thursday, December 10, 2015 at 5:09pm", format + ) == datetime(2015, 12, 10, 17, 9) + + format = "[MMMM] M D, YYYY [at] h:mma" + assert self.parser.parse("MMMM 12 10, 2015 at 5:09pm", format) == datetime( + 2015, 12, 10, 17, 9 + ) + + format = "[It happened on] MMMM Do [in the year] YYYY [a long time ago]" + assert self.parser.parse( + "It happened on November 25th in the year 1990 a long time ago", format + ) == datetime(1990, 11, 25) + + format = "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]" + assert self.parser.parse( + "It happened on November 25th in the year 1990 a long time ago", format + ) == datetime(1990, 11, 25) + + format = "[I'm][ entirely][ escaped,][ weee!]" + assert self.parser.parse("I'm entirely escaped, weee!", format) == datetime( + 1, 1, 1 + ) + + # Special RegEx characters + format = "MMM DD, YYYY |^${}().*+?<>-& h:mm A" + assert self.parser.parse( + "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM", format + ) == datetime(2017, 12, 31, 2, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py new file mode 100644 index 00000000000..e48b4de066c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +import time +from datetime import datetime + +import pytest + +from arrow import util + + +class TestUtil: + def test_next_weekday(self): + # Get first Monday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 0) == datetime(1970, 1, 5) + + # Get first Tuesday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 1) == datetime(1970, 1, 6) + + # Get first Wednesday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 2) == datetime(1970, 1, 7) + + # Get first Thursday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 3) == datetime(1970, 1, 1) + + # Get first Friday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 4) == datetime(1970, 1, 2) + + # Get first Saturday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 5) == datetime(1970, 1, 3) + + # Get first Sunday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 6) == datetime(1970, 1, 4) + + # Weekdays are 0-indexed + with pytest.raises(ValueError): + util.next_weekday(datetime(1970, 1, 1), 7) + + with pytest.raises(ValueError): + util.next_weekday(datetime(1970, 1, 1), -1) + + def test_total_seconds(self): + td = datetime(2019, 1, 1) - datetime(2018, 1, 1) + assert util.total_seconds(td) == td.total_seconds() + + def test_is_timestamp(self): + timestamp_float = time.time() + timestamp_int = int(timestamp_float) + + assert util.is_timestamp(timestamp_int) + assert util.is_timestamp(timestamp_float) + assert util.is_timestamp(str(timestamp_int)) + assert util.is_timestamp(str(timestamp_float)) + + assert not util.is_timestamp(True) + assert not util.is_timestamp(False) + + class InvalidTimestamp: + pass + + assert not util.is_timestamp(InvalidTimestamp()) + + full_datetime = "2019-06-23T13:12:42" + assert not util.is_timestamp(full_datetime) + + def test_normalize_timestamp(self): + timestamp = 1591161115.194556 + millisecond_timestamp = 1591161115194 + microsecond_timestamp = 1591161115194556 + + assert util.normalize_timestamp(timestamp) == timestamp + assert util.normalize_timestamp(millisecond_timestamp) == 1591161115.194 + assert util.normalize_timestamp(microsecond_timestamp) == 1591161115.194556 + + with pytest.raises(ValueError): + util.normalize_timestamp(3e17) + + def test_iso_gregorian(self): + with pytest.raises(ValueError): + util.iso_to_gregorian(2013, 0, 5) + + with pytest.raises(ValueError): + util.iso_to_gregorian(2013, 8, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py new file mode 100644 index 00000000000..2a048feb3fe --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +import pytz +from dateutil.zoneinfo import get_zonefile_instance + +from arrow import util + + +def make_full_tz_list(): + dateutil_zones = set(get_zonefile_instance().zones) + pytz_zones = set(pytz.all_timezones) + return dateutil_zones.union(pytz_zones) + + +def assert_datetime_equality(dt1, dt2, within=10): + assert dt1.tzinfo == dt2.tzinfo + assert abs(util.total_seconds(dt1 - dt2)) < within diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini new file mode 100644 index 00000000000..46576b12e35 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini @@ -0,0 +1,53 @@ +[tox] +minversion = 3.18.0 +envlist = py{py3,27,35,36,37,38,39},lint,docs +skip_missing_interpreters = true + +[gh-actions] +python = + pypy3: pypy3 + 2.7: py27 + 3.5: py35 + 3.6: py36 + 3.7: py37 + 3.8: py38 + 3.9: py39 + +[testenv] +deps = -rrequirements.txt +allowlist_externals = pytest +commands = pytest + +[testenv:lint] +basepython = python3 +skip_install = true +deps = pre-commit +commands = + pre-commit install + pre-commit run --all-files --show-diff-on-failure + +[testenv:docs] +basepython = python3 +skip_install = true +changedir = docs +deps = + doc8 + sphinx + python-dateutil +allowlist_externals = make +commands = + doc8 index.rst ../README.rst --extension .rst --ignore D001 + make html SPHINXOPTS="-W --keep-going" + +[pytest] +addopts = -v --cov-branch --cov=arrow --cov-fail-under=100 --cov-report=term-missing --cov-report=xml +testpaths = tests + +[isort] +line_length = 88 +multi_line_output = 3 +include_trailing_comma = true + +[flake8] +per-file-ignores = arrow/__init__.py:F401 +ignore = E203,E501,W503 diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py diff --git a/openpype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/builtins/builtins/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/builtins/builtins/__init__.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore new file mode 100644 index 00000000000..be621609ab9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore @@ -0,0 +1,42 @@ +# General +*.py[cod] + +# Packages +*.egg +*.egg-info +dist +build +.eggs/ +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox + +# Caches +Thumbs.db + +# Development +.project +.pydevproject +.settings +.idea/ +.history/ +.vscode/ + +# Testing +.cache +test-reports/* +.pytest_cache/* \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python new file mode 100644 index 00000000000..9dc010d8034 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt new file mode 100644 index 00000000000..d9a10c0d8e8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in new file mode 100644 index 00000000000..3216ee548c6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in @@ -0,0 +1,4 @@ +include LICENSE.txt +include README.rst +recursive-include resource *.py +recursive-include doc *.rst *.conf *.py *.png *.css diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst new file mode 100644 index 00000000000..074a35f97c2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst @@ -0,0 +1,34 @@ +################# +ftrack Python API +################# + +Python API for ftrack. + +.. important:: + + This is the new Python client for the ftrack API. If you are migrating from + the old client then please read the dedicated `migration guide `_. + +************* +Documentation +************* + +Full documentation, including installation and setup guides, can be found at +http://ftrack-python-api.rtd.ftrack.com/en/stable/ + +********************* +Copyright and license +********************* + +Copyright (c) 2014 ftrack + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this work except in compliance with the License. You may obtain a copy of the +License in the LICENSE.txt file, or at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed +under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml new file mode 100644 index 00000000000..355f00f7529 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml @@ -0,0 +1,24 @@ +# Test configuration for bitbucket pipelines. +options: + max-time: 20 +definitions: + services: + ftrack: + image: + name: ftrackdocker/test-server:latest + username: $DOCKER_HUB_USERNAME + password: $DOCKER_HUB_PASSWORD + email: $DOCKER_HUB_EMAIL +pipelines: + default: + - parallel: + - step: + name: run tests against python 2.7.x + image: python:2.7 + caches: + - pip + services: + - ftrack + script: + - bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' $FTRACK_SERVER)" != "200" ]]; do sleep 1; done' + - python setup.py test \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css new file mode 100644 index 00000000000..3456b0c3c5c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css @@ -0,0 +1,16 @@ +@import "css/theme.css"; + +.domain-summary li { + float: left; + min-width: 12em; +} + +.domain-summary ul:before, ul:after { + content: ''; + clear: both; + display:block; +} + +.rst-content table.docutils td:last-child { + white-space: normal; +} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst new file mode 100644 index 00000000000..4e165b01223 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************ +ftrack_api.accessor.base +************************ + +.. automodule:: ftrack_api.accessor.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst new file mode 100644 index 00000000000..f7d9dddf376 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************ +ftrack_api.accessor.disk +************************ + +.. automodule:: ftrack_api.accessor.disk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst new file mode 100644 index 00000000000..0adc23fe2de --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************* +ftrack_api.accessor +******************* + +.. automodule:: ftrack_api.accessor + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst new file mode 100644 index 00000000000..62bd7f41659 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************ +ftrack_api.accessor.server +************************ + +.. automodule:: ftrack_api.accessor.server diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst new file mode 100644 index 00000000000..9fd8994eb11 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.attribute +******************** + +.. automodule:: ftrack_api.attribute diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst new file mode 100644 index 00000000000..cbf9128a5a6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +**************** +ftrack_api.cache +**************** + +.. automodule:: ftrack_api.cache diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst new file mode 100644 index 00000000000..607d574cb56 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +********************* +ftrack_api.collection +********************* + +.. automodule:: ftrack_api.collection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst new file mode 100644 index 00000000000..0bc4ce35f10 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +******************************* +ftrack_api.entity.asset_version +******************************* + +.. automodule:: ftrack_api.entity.asset_version diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst new file mode 100644 index 00000000000..f4beedc9a4e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +********************** +ftrack_api.entity.base +********************** + +.. automodule:: ftrack_api.entity.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst new file mode 100644 index 00000000000..c9ce0a0cf11 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +*************************** +ftrack_api.entity.component +*************************** + +.. automodule:: ftrack_api.entity.component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst new file mode 100644 index 00000000000..483c16641bd --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +************************* +ftrack_api.entity.factory +************************* + +.. automodule:: ftrack_api.entity.factory diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst new file mode 100644 index 00000000000..fce68c0e94f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************** +ftrack_api.entity +***************** + +.. automodule:: ftrack_api.entity + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst new file mode 100644 index 00000000000..9d22a7c378d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************* +ftrack_api.entity.job +********************* + +.. automodule:: ftrack_api.entity.job diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst new file mode 100644 index 00000000000..60e006a10c9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************** +ftrack_api.entity.location +************************** + +.. automodule:: ftrack_api.entity.location diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst new file mode 100644 index 00000000000..3588e48e5b5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************** +ftrack_api.entity.note +********************** + +.. automodule:: ftrack_api.entity.note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst new file mode 100644 index 00000000000..5777ab0b404 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +******************************** +ftrack_api.entity.project_schema +******************************** + +.. automodule:: ftrack_api.entity.project_schema diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst new file mode 100644 index 00000000000..0014498b9cc --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************** +ftrack_api.entity.user +********************** + +.. automodule:: ftrack_api.entity.user diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst new file mode 100644 index 00000000000..2b0ca8d3ed7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +********************* +ftrack_api.event.base +********************* + +.. automodule:: ftrack_api.event.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst new file mode 100644 index 00000000000..f5827170603 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +*************************** +ftrack_api.event.expression +*************************** + +.. automodule:: ftrack_api.event.expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst new file mode 100644 index 00000000000..36d7a331639 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.event.hub +******************** + +.. automodule:: ftrack_api.event.hub diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst new file mode 100644 index 00000000000..0986e8e2f4f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +**************** +ftrack_api.event +**************** + +.. automodule:: ftrack_api.event + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst new file mode 100644 index 00000000000..974f3758177 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +*************************** +ftrack_api.event.subscriber +*************************** + +.. automodule:: ftrack_api.event.subscriber diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst new file mode 100644 index 00000000000..94a20e36112 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************************** +ftrack_api.event.subscription +***************************** + +.. automodule:: ftrack_api.event.subscription diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst new file mode 100644 index 00000000000..64c3a699d75 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.exception +******************** + +.. automodule:: ftrack_api.exception diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst new file mode 100644 index 00000000000..9b8154bdc36 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.formatter +******************** + +.. automodule:: ftrack_api.formatter diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst new file mode 100644 index 00000000000..ea3517ca68e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst @@ -0,0 +1,20 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _api_reference: + +************* +API Reference +************* + +ftrack_api +========== + +.. automodule:: ftrack_api + +.. toctree:: + :maxdepth: 1 + :glob: + + */index + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst new file mode 100644 index 00000000000..8223ee72f2b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************* +ftrack_api.inspection +********************* + +.. automodule:: ftrack_api.inspection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst new file mode 100644 index 00000000000..ecb883d3853 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +****************** +ftrack_api.logging +****************** + +.. automodule:: ftrack_api.logging diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst new file mode 100644 index 00000000000..b2dff9933d7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +******************** +ftrack_api.operation +******************** + +.. automodule:: ftrack_api.operation diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst new file mode 100644 index 00000000000..a4993d94cfb --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************** +ftrack_api.plugin +***************** + +.. automodule:: ftrack_api.plugin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst new file mode 100644 index 00000000000..acbd8d237a2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +**************** +ftrack_api.query +**************** + +.. automodule:: ftrack_api.query diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst new file mode 100644 index 00000000000..09cdad8627b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst @@ -0,0 +1,10 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _api_reference/resource_identifier_transformer.base: + +*********************************************** +ftrack_api.resource_identifier_transformer.base +*********************************************** + +.. automodule:: ftrack_api.resource_identifier_transformer.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst new file mode 100644 index 00000000000..755f052c9df --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst @@ -0,0 +1,16 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _api_reference/resource_identifier_transformer: + +****************************************** +ftrack_api.resource_identifier_transformer +****************************************** + +.. automodule:: ftrack_api.resource_identifier_transformer + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst new file mode 100644 index 00000000000..dcce173d1f6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +****************** +ftrack_api.session +****************** + +.. automodule:: ftrack_api.session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst new file mode 100644 index 00000000000..55a1cc75d2c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************* +ftrack_api.structure.base +************************* + +.. automodule:: ftrack_api.structure.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst new file mode 100644 index 00000000000..ade2c7ae887 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +*********************** +ftrack_api.structure.id +*********************** + +.. automodule:: ftrack_api.structure.id diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst new file mode 100644 index 00000000000..cbd4545cf71 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.structure +******************** + +.. automodule:: ftrack_api.structure + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst new file mode 100644 index 00000000000..403173e257a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +*************************** +ftrack_api.structure.origin +*************************** + +.. automodule:: ftrack_api.structure.origin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst new file mode 100644 index 00000000000..5c0d88026bf --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +***************************** +ftrack_api.structure.standard +***************************** + +.. automodule:: ftrack_api.structure.standard diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst new file mode 100644 index 00000000000..55dc0125a8c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************** +ftrack_api.symbol +***************** + +.. automodule:: ftrack_api.symbol diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst new file mode 100644 index 00000000000..bfc5cef4019 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst @@ -0,0 +1,175 @@ +.. + :copyright: Copyright (c) 2015 ftrack + + +.. _caching: + +******* +Caching +******* + +The API makes use of caching in order to provide more efficient retrieval of +data by reducing the number of calls to the remote server:: + + # First call to retrieve user performs a request to the server. + user = session.get('User', 'some-user-id') + + # A later call in the same session to retrieve the same user just gets + # the existing instance from the cache without a request to the server. + user = session.get('User', 'some-user-id') + +It also seamlessly merges related data together regardless of how it was +retrieved:: + + >>> timelog = user['timelogs'][0] + >>> with session.auto_populating(False): + >>> print timelog['comment'] + NOT_SET + >>> session.query( + ... 'select comment from Timelog where id is "{0}"' + ... .format(timelog['id']) + ... ).all() + >>> with session.auto_populating(False): + >>> print timelog['comment'] + 'Some comment' + +By default, each :class:`~ftrack_api.session.Session` is configured with a +simple :class:`~ftrack_api.cache.MemoryCache()` and the cache is lost as soon as +the session expires. + +Configuring a session cache +=========================== + +It is possible to configure the cache that a session uses. An example would be a +persistent auto-populated cache that survives between sessions:: + + import os + import ftrack_api.cache + + # Specify where the file based cache should be stored. + cache_path = os.path.join(tempfile.gettempdir(), 'ftrack_session_cache.dbm') + + + # Define a cache maker that returns a file based cache. Note that a + # function is used because the file based cache should use the session's + # encode and decode methods to serialise the entity data to a format that + # can be written to disk (JSON). + def cache_maker(session): + '''Return cache to use for *session*.''' + return ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=session.encode, + decode=session.decode + ) + + # Create the session using the cache maker. + session = ftrack_api.Session(cache=cache_maker) + +.. note:: + + There can be a performance penalty when using a more complex cache setup. + For example, serialising data and also writing and reading from disk can be + relatively slow operations. + +Regardless of the cache specified, the session will always construct a +:class:`~ftrack_api.cache.LayeredCache` with a +:class:`~ftrack_api.cache.MemoryCache` at the top level and then your cache at +the second level. This is to ensure consistency of instances returned by the +session. + +You can check (or even modify) at any time what cache configuration a session is +using by accessing the `cache` attribute on a +:class:`~ftrack_api.session.Session`:: + + >>> print session.cache + + +Writing a new cache interface +============================= + +If you have a custom cache backend you should be able to integrate it into the +system by writing a cache interface that matches the one defined by +:class:`ftrack_api.cache.Cache`. This typically involves a subclass and +overriding the :meth:`~ftrack_api.cache.Cache.get`, +:meth:`~ftrack_api.cache.Cache.set` and :meth:`~ftrack_api.cache.Cache.remove` +methods. + + +Managing what gets cached +========================= + +The cache system is quite flexible when it comes to controlling what should be +cached. + +Consider you have a layered cache where the bottom layer cache should be +persisted between sessions. In this setup you probably don't want the persisted +cache to hold non-persisted values, such as modified entity values or newly +created entities not yet committed to the server. However, you might want the +top level memory cache to hold onto these values. + +Here is one way to set this up. First define a new proxy cache that is selective +about what it sets:: + + import ftrack_api.inspection + + + class SelectiveCache(ftrack_api.cache.ProxyCache): + '''Proxy cache that won't cache newly created entities.''' + + def set(self, key, value): + '''Set *value* for *key*.''' + if isinstance(value, ftrack_api.entity.base.Entity): + if ( + ftrack_api.inspection.state(value) + is ftrack_api.symbol.CREATED + ): + return + + super(SelectiveCache, self).set(key, value) + +Now use this custom cache to wrap the serialised cache in the setup above: + +.. code-block:: python + :emphasize-lines: 3, 9 + + def cache_maker(session): + '''Return cache to use for *session*.''' + return SelectiveCache( + ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=session.encode, + decode=session.decode + ) + ) + +Now to prevent modified attributes also being persisted, tweak the encode +settings for the file cache: + +.. code-block:: python + :emphasize-lines: 1, 9-12 + + import functools + + + def cache_maker(session): + '''Return cache to use for *session*.''' + return SelectiveCache( + ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=functools.partial( + session.encode, + entity_attribute_strategy='persisted_only' + ), + decode=session.decode + ) + ) + +And use the updated cache maker for your session:: + + session = ftrack_api.Session(cache=cache_maker) + +.. note:: + + For some type of attributes that are computed, long term caching is not + recommended and such values will not be encoded with the `persisted_only` + strategy. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py new file mode 100644 index 00000000000..11544721555 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py @@ -0,0 +1,102 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +'''ftrack Python API documentation build configuration file.''' + +import os +import re + +# -- General ------------------------------------------------------------------ + +# Extensions. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.extlinks', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.viewcode', + 'lowdown' +] + + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'ftrack Python API' +copyright = u'2014, ftrack' + +# Version +with open( + os.path.join( + os.path.dirname(__file__), '..', 'source', + 'ftrack_api', '_version.py' + ) +) as _version_file: + _version = re.match( + r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL + ).group(1) + +version = _version +release = _version + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_template'] + +# A list of prefixes to ignore for module listings. +modindex_common_prefix = [ + 'ftrack_api.' +] + +# -- HTML output -------------------------------------------------------------- + +if not os.environ.get('READTHEDOCS', None) == 'True': + # Only import and set the theme if building locally. + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +html_static_path = ['_static'] +html_style = 'ftrack.css' + +# If True, copy source rst files to output for reference. +html_copy_source = True + + +# -- Autodoc ------------------------------------------------------------------ + +autodoc_default_flags = ['members', 'undoc-members', 'inherited-members'] +autodoc_member_order = 'bysource' + + +def autodoc_skip(app, what, name, obj, skip, options): + '''Don't skip __init__ method for autodoc.''' + if name == '__init__': + return False + + return skip + + +# -- Intersphinx -------------------------------------------------------------- + +intersphinx_mapping = { + 'python': ('http://docs.python.org/', None), + 'ftrack': ( + 'http://rtd.ftrack.com/docs/ftrack/en/stable/', None + ) +} + + +# -- Todos --------------------------------------------------------------------- + +todo_include_todos = os.environ.get('FTRACK_DOC_INCLUDE_TODOS', False) == 'True' + + +# -- Setup -------------------------------------------------------------------- + +def setup(app): + app.connect('autodoc-skip-member', autodoc_skip) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf new file mode 100644 index 00000000000..3c927cc1eeb --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf @@ -0,0 +1,2 @@ +[html4css1 writer] +field-name-limit:0 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst new file mode 100644 index 00000000000..99019ee44f8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst @@ -0,0 +1,56 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _environment_variables: + +********************* +Environment variables +********************* + +The following is a consolidated list of environment variables that this API +can reference: + +.. envvar:: FTRACK_SERVER + + The full url of the ftrack server to connect to. For example + "https://mycompany.ftrackapp.com" + +.. envvar:: FTRACK_API_USER + + The username of the ftrack user to act on behalf of when performing actions + in the system. + + .. note:: + + When this environment variable is not set, the API will typically also + check other standard operating system variables that hold the username + of the current logged in user. To do this it uses + :func:`getpass.getuser`. + +.. envvar:: FTRACK_API_KEY + + The API key to use when performing actions in the system. The API key is + used to determine the permissions that a script has in the system. + +.. envvar:: FTRACK_APIKEY + + For backwards compatibility. See :envvar:`FTRACK_API_KEY`. + +.. envvar:: FTRACK_EVENT_PLUGIN_PATH + + Paths to search recursively for plugins to load and use in a session. + Multiple paths can be specified by separating with the value of + :attr:`os.pathsep` (e.g. ':' or ';'). + +.. envvar:: FTRACK_API_SCHEMA_CACHE_PATH + + Path to a directory that will be used for storing and retrieving a cache of + the entity schemas fetched from the server. + +.. envvar:: http_proxy / https_proxy + + If you need to use a proxy to connect to ftrack you can use the + "standard" :envvar:`http_proxy` and :envvar:`https_proxy`. Please note that they + are lowercase. + + For example "export https_proxy=http://proxy.mycompany.com:8080" \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst new file mode 100644 index 00000000000..0c44a1b68c9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst @@ -0,0 +1,137 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _event_list: + +********** +Event list +********** + +The following is a consolidated list of events published directly by this API. + +For some events, a template plugin file is also listed for download +(:guilabel:`Download template plugin`) to help get you started with writing your +own plugin for a particular event. + +.. seealso:: + + * :ref:`handling_events` + * :ref:`ftrack server event list ` + +.. _event_list/ftrack.api.session.construct-entity-type: + +ftrack.api.session.construct-entity-type +======================================== + +:download:`Download template plugin +` + +:ref:`Synchronous `. Published by +the session to retrieve constructed class for specified schema:: + + Event( + topic='ftrack.api.session.construct-entity-type', + data=dict( + schema=schema, + schemas=schemas + ) + ) + +Expects returned data to be:: + + A Python class. + +.. seealso:: :ref:`working_with_entities/entity_types`. + +.. _event_list/ftrack.api.session.configure-location: + +ftrack.api.session.configure-location +===================================== + +:download:`Download template plugin +` + +:ref:`Synchronous `. Published by +the session to allow configuring of location instances:: + + Event( + topic='ftrack.api.session.configure-location', + data=dict( + session=self + ) + ) + +.. seealso:: :ref:`Configuring locations `. + +.. _event_list/ftrack.location.component-added: + +ftrack.location.component-added +=============================== + +Published whenever a component is added to a location:: + + Event( + topic='ftrack.location.component-added', + data=dict( + component_id='e2dc0524-b576-11d3-9612-080027331d74', + location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' + ) + ) + +.. _event_list/ftrack.location.component-removed: + +ftrack.location.component-removed +================================= + +Published whenever a component is removed from a location:: + + Event( + topic='ftrack.location.component-removed', + data=dict( + component_id='e2dc0524-b576-11d3-9612-080027331d74', + location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' + ) + ) + +.. _event_list/ftrack.api.session.ready: + +ftrack.api.session.ready +======================== + +:ref:`Synchronous `. Published after +a :class:`~ftrack_api.session.Session` has been initialized and +is ready to be used:: + + Event( + topic='ftrack.api.session.ready', + data=dict( + session=, + ) + ) + +.. warning:: + + Since the event is synchronous and blocking, avoid doing any unnecessary + work as it will slow down session initialization. + +.. seealso:: + + Also see example usage in :download:`example_plugin_using_session.py + `. + + +.. _event_list/ftrack.api.session.reset: + +ftrack.api.session.reset +======================== + +:ref:`Synchronous `. Published after +a :class:`~ftrack_api.session.Session` has been reset and is ready to be used +again:: + + Event( + topic='ftrack.api.session.reset', + data=dict( + session=, + ) + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst new file mode 100644 index 00000000000..985eb9bb442 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst @@ -0,0 +1,82 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/assignments_and_allocations: + +**************************************** +Working with assignments and allocations +**************************************** + +.. currentmodule:: ftrack_api.session + +The API exposes `assignments` and `allocations` relationships on objects in +the project hierarchy. You can use these to retrieve the allocated or assigned +resources, which can be either groups or users. + +Allocations can be used to allocate users or groups to a project team, while +assignments are more explicit and is used to assign users to tasks. Both +assignment and allocations are modelled as `Appointment` objects, with a +`type` attribute indicating the type of the appoinment. + +The following example retrieves all users part of the project team:: + + # Retrieve a project + project = session.query('Project').first() + + # Set to hold all users part of the project team + project_team = set() + + # Add all allocated groups and users + for allocation in project['allocations']: + + # Resource may be either a group or a user + resource = allocation['resource'] + + # If the resource is a group, add its members + if isinstance(resource, session.types['Group']): + for membership in resource['memberships']: + user = membership['user'] + project_team.add(user) + + # The resource is a user, add it. + else: + user = resource + project_team.add(user) + +The next example shows how to assign the current user to a task:: + + # Retrieve a task and the current user + task = session.query('Task').first() + current_user = session.query( + u'User where username is {0}'.format(session.api_user) + ).one() + + # Create a new Appointment of type assignment. + session.create('Appointment', { + 'context': task, + 'resource': current_user, + 'type': 'assignment' + }) + + # Finally, persist the new assignment + session.commit() + +To list all users assigned to a task, see the following example:: + + task = session.query('Task').first() + users = session.query( + 'select first_name, last_name from User ' + 'where assignments any (context_id = "{0}")'.format(task['id']) + ) + for user in users: + print user['first_name'], user['last_name'] + +To list the current user's assigned tasks, see the example below:: + + assigned_tasks = session.query( + 'select link from Task ' + 'where assignments any (resource.username = "{0}")'.format(session.api_user) + ) + for task in assigned_tasks: + print u' / '.join(item['name'] for item in task['link']) + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst new file mode 100644 index 00000000000..6a39bb20d15 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst @@ -0,0 +1,23 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/component: + +*********************** +Working with components +*********************** + +.. currentmodule:: ftrack_api.session + +Components can be created manually or using the provide helper methods on a +:meth:`session ` or existing +:meth:`asset version +`:: + + component = version.create_component('/path/to/file_or_sequence.jpg') + session.commit() + +When a component is created using the helpers it is automatically added to a +location. + +.. seealso:: :ref:`Locations tutorial ` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst new file mode 100644 index 00000000000..033942b4428 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst @@ -0,0 +1,94 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/custom_attribute: + +*********************** +Using custom attributes +*********************** + +.. currentmodule:: ftrack_api.session + +Custom attributes can be written and read from entities using the +``custom_attributes`` property. + +The ``custom_attributes`` property provides a similar interface to a dictionary. + +Keys can be printed using the keys method:: + + >>> task['custom_attributes'].keys() + [u'my_text_field'] + +or access keys and values as items:: + + >>> print task['custom_attributes'].items() + [(u'my_text_field', u'some text')] + +Read existing custom attribute values:: + + >>> print task['custom_attributes']['my_text_field'] + 'some text' + +Updating a custom attributes can also be done similar to a dictionary:: + + task['custom_attributes']['my_text_field'] = 'foo' + +To query for tasks with a custom attribute, ``my_text_field``, you can use the +key from the configuration:: + + for task in session.query( + 'Task where custom_attributes any ' + '(key is "my_text_field" and value is "bar")' + ): + print task['name'] + +Limitations +=========== + +Expression attributes +--------------------- + +Expression attributes are not yet supported and the reported value will +always be the non-evaluated expression. + +Hierarchical attributes +----------------------- + +Hierarchical attributes are not yet fully supported in the API. Hierarchical +attributes support both read and write, but when read they are not calculated +and instead the `raw` value is returned:: + + # The hierarchical attribute `my_attribute` is set on Shot but this will not + # be reflected on the children. Instead the raw value is returned. + print shot['custom_attributes']['my_attribute'] + 'foo' + print task['custom_attributes']['my_attribute'] + None + +To work around this limitation it is possible to use the legacy api for +hierarchical attributes or to manually query the parents for values and use the +first value that is set. + +Validation +========== + +Custom attributes are validated on the ftrack server before persisted. The +validation will check that the type of the data is correct for the custom +attribute. + + * number - :py:class:`int` or :py:class:`float` + * text - :py:class:`str` or :py:class:`unicode` + * enumerator - :py:class:`list` + * boolean - :py:class:`bool` + * date - :py:class:`datetime.datetime` or :py:class:`datetime.date` + +If the value set is not valid a :py:exc:`ftrack_api.exception.ServerError` is +raised with debug information:: + + shot['custom_attributes']['fstart'] = 'test' + + Traceback (most recent call last): + ... + ftrack_api.exception.ServerError: Server reported error: + ValidationError(Custom attribute value for "fstart" must be of type number. + Got "test" of type ) \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst new file mode 100644 index 00000000000..2be01ffe479 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst @@ -0,0 +1,53 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/encode_media: + +************** +Encoding media +************** + +Media such as images and video can be encoded by the ftrack server to allow +playing it in the ftrack web interface. Media can be encoded using +:meth:`ftrack_api.session.Session.encode_media` which accepts a path to a file +or an existing component in the ftrack.server location. + +Here is an example of how to encode a video and read the output:: + + job = session.encode_media('/PATH/TO/MEDIA') + job_data = json.loads(job['data']) + + print 'Source component id', job_data['source_component_id'] + print 'Keeping original component', job_data['keep_original'] + for output in job_data['output']: + print u'Output component - id: {0}, format: {1}'.format( + output['component_id'], output['format'] + ) + +You can also call the corresponding helper method on an :meth:`asset version +`, to have the +encoded components automatically associated with the version:: + + job = asset_version.encode_media('/PATH/TO/MEDIA') + +It is also possible to get the URL to an encoded component once the job has +finished:: + + job = session.encode_media('/PATH/TO/MEDIA') + + # Wait for job to finish. + + location = session.query('Location where name is "ftrack.server"').one() + for component in job['job_components']: + print location.get_url(component) + +Media can also be an existing component in another location. Before encoding it, +the component needs to be added to the ftrack.server location:: + + location = session.query('Location where name is "ftrack.server"').one() + location.add_component(component) + session.commit() + + job = session.encode_media(component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst new file mode 100644 index 00000000000..43e31484f40 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst @@ -0,0 +1,56 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. _example/entity_links: + +****************** +Using entity links +****************** + +A link can be used to represent a dependency or another relation between +two entities in ftrack. + +There are two types of entities that can be linked: + +* Versions can be linked to other asset versions, where the link entity type + is `AssetVersionLink`. +* Objects like Task, Shot or Folder, where the link entity type is + `TypedContextLink`. + +Both `AssetVersion` and `TypedContext` objects have the same relations +`incoming_links` and `outgoing_links`. To list the incoming links to a Shot we +can use the relationship `incoming_links`:: + + for link in shot['incoming_links']: + print link['from'], link['to'] + +In the above example `link['to']` is the shot and `link['from']` could be an +asset build or something else that is linked to the shot. There is an equivalent +`outgoing_links` that can be used to access outgoing links on an object. + +To create a new link between objects or asset versions create a new +`TypedContextLink` or `AssetVersionLink` entity with the from and to properties +set. In this example we will link two asset versions:: + + session.create('AssetVersionLink', { + 'from': from_asset_version, + 'to': to_asset_version + }) + session.commit() + +Using asset version link shortcut +================================= + +Links on asset version can also be created by the use of the `uses_versions` and +`used_in_versions` relations:: + + rig_version['uses_versions'].append(model_version) + session.commit() + +This has the same result as creating the `AssetVersionLink` entity as in the +previous section. + +Which versions are using the model can be listed with:: + + for version in model_version['used_in_versions']: + print '{0} is using {1}'.format(version, model_version) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst new file mode 100644 index 00000000000..4fca37d754e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst @@ -0,0 +1,52 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example: + +************** +Usage examples +************** + +The following examples show how to use the API to accomplish specific tasks +using the default configuration. + +.. note:: + + If you are using a server with a customised configuration you may need to + alter the examples slightly to make them work correctly. + +Most of the examples assume you have the *ftrack_api* package imported and have +already constructed a :class:`Session`:: + + import ftrack_api + + session = ftrack_api.Session() + + +.. toctree:: + + project + component + review_session + metadata + custom_attribute + manage_custom_attribute_configuration + link_attribute + scope + job + note + list + timer + assignments_and_allocations + thumbnail + encode_media + entity_links + web_review + publishing + security_roles + task_template + sync_ldap_users + invite_user + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst new file mode 100644 index 00000000000..342f0ef6025 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst @@ -0,0 +1,31 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/invite_user: + +********************* +Invite user +********************* + +Here we create a new user and send them a invitation through mail + + +Create a new user:: + + user_email = 'artist@mail.vfx-company.com' + + new_user = session.create( + 'User', { + 'username':user_email, + 'email':user_email, + 'is_active':True + } + ) + + session.commit() + + +Invite our new user:: + + new_user.send_invite() + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst new file mode 100644 index 00000000000..296a0f5e173 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst @@ -0,0 +1,97 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/job: + +************* +Managing jobs +************* + +.. currentmodule:: ftrack_api.session + +Jobs can be used to display feedback to users in the ftrack web interface when +performing long running tasks in the API. + +To create a job use :meth:`Session.create`:: + + user = # Get a user from ftrack. + + job = session.create('Job', { + 'user': user, + 'status': 'running' + }) + +The created job will appear as running in the :guilabel:`jobs` menu for the +specified user. To set a description on the job, add a dictionary containing +description as the `data` key: + +.. note:: + + In the current version of the API the dictionary needs to be JSON + serialised. + +.. code-block:: python + + import json + + job = session.create('Job', { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'My custom job description.' + }) + }) + +When the long running task has finished simply set the job as completed and +continue with the next task. + +.. code-block:: python + + job['status'] = 'done' + session.commit() + +Attachments +=========== + +Job attachments are files that are attached to a job. In the ftrack web +interface these attachments can be downloaded by clicking on a job in the `Jobs` +menu. + +To get a job's attachments through the API you can use the `job_components` +relation and then use the ftrack server location to get the download URL:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + for job_component in job['job_components']: + print 'Download URL: {0}'.format( + server_location.get_url(job_component['component']) + ) + +To add an attachment to a job you have to add it to the ftrack server location +and create a `jobComponent`:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + # Create component and name it "My file". + component = session.create_component( + '/path/to/file', + data={'name': 'My file'}, + location=server_location + ) + + # Attach the component to the job. + session.create( + 'JobComponent', + {'component_id': component['id'], 'job_id': job['id']} + ) + + session.commit() + +.. note:: + + The ftrack web interface does only support downloading one attachment so + attaching more than one will have limited support in the web interface. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst new file mode 100644 index 00000000000..1dcea842cdb --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst @@ -0,0 +1,55 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/link_attribute: + +********************* +Using link attributes +********************* + +The `link` attribute can be used to retreive the ids and names of the parents of +an object. It is particularly useful in cases where the path of an object must +be presented in a UI, but can also be used to speedup certain query patterns. + +You can use the `link` attribute on any entity inheriting from a +`Context` or `AssetVersion`. Here we use it on the `Task` entity:: + + task = session.query( + 'select link from Task where name is "myTask"' + ).first() + print task['link'] + +It can also be used create a list of parent entities, including the task +itself:: + + entities = [] + for item in task['link']: + entities.append(session.get(item['type'], item['id'])) + +The `link` attribute is an ordered list of dictionaries containting data +of the parents and the item itself. Each dictionary contains the following +entries: + + id + The id of the object and can be used to do a :meth:`Session.get`. + name + The name of the object. + type + The schema id of the object. + +A more advanced use-case is to get the parent names and ids of all timelogs for +a user:: + + for timelog in session.query( + 'select context.link, start, duration from Timelog ' + 'where user.username is "john.doe"' + ): + print timelog['context']['link'], timelog['start'], timelog['duration'] + +The attribute is also available from the `AssetVersion` asset relation:: + + for asset_version in session.query( + 'select link from AssetVersion ' + 'where user.username is "john.doe"' + ): + print asset_version['link'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst new file mode 100644 index 00000000000..155b25f9af6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst @@ -0,0 +1,46 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/list: + +*********** +Using lists +*********** + +.. currentmodule:: ftrack_api.session + +Lists can be used to create a collection of asset versions or objects such as +tasks. It could be a list of items that should be sent to client, be included in +todays review session or items that belong together in way that is different +from the project hierarchy. + +There are two types of lists, one for asset versions and one for other objects +such as tasks. + +To create a list use :meth:`Session.create`:: + + user = # Get a user from ftrack. + project = # Get a project from ftrack. + list_category = # Get a list category from ftrack. + + asset_version_list = session.create('AssetVersionList', { + 'owner': user, + 'project': project, + 'category': list_category + }) + + task_list = session.create('TypedContextList', { + 'owner': user, + 'project': project, + 'category': list_category + }) + +Then add items to the list like this:: + + asset_version_list['items'].append(asset_version) + task_list['items'].append(task) + +And remove items from the list like this:: + + asset_version_list['items'].remove(asset_version) + task_list['items'].remove(task) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst new file mode 100644 index 00000000000..e3d7c4062c1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst @@ -0,0 +1,320 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/manage_custom_attribute_configuration: + +**************************************** +Managing custom attribute configurations +**************************************** + +From the API it is not only possible to +:ref:`read and update custom attributes for entities `, +but also managing custom attribute configurations. + +Existing custom attribute configurations can be queried as :: + + # Print all existing custom attribute configurations. + print session.query('CustomAttributeConfiguration').all() + +Use :meth:`Session.create` to create a new custom attribute configuration:: + + # Get the custom attribute type. + custom_attribute_type = session.query( + 'CustomAttributeType where name is "text"' + ).one() + + # Create a custom attribute configuration. + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Asset version text attribute', + 'key': 'asset_version_text_attribute', + 'default': 'bar', + 'config': json.dumps({'markdown': False}) + }) + + # Persist it to the ftrack instance. + session.commit() + +.. tip:: + + The example above does not add security roles. This can be done either + from System Settings in the ftrack web application, or by following the + :ref:`example/manage_custom_attribute_configuration/security_roles` example. + +Global or project specific +========================== + +A custom attribute can be global or project specific depending on the +`project_id` attribute:: + + # Create a custom attribute configuration. + session.create('CustomAttributeConfiguration', { + # Set the `project_id` and the custom attribute will only be available + # on `my_project`. + 'project_id': my_project['id'], + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Asset version text attribute', + 'key': 'asset_version_text_attribute', + 'default': 'bar', + 'config': json.dumps({'markdown': False}) + }) + session.commit() + +A project specific custom attribute can be changed to a global:: + + custom_attribute_configuration['project_id'] = None + session.commit() + +Changing a global custom attribute configuration to a project specific is not +allowed. + +Entity types +============ + +Custom attribute configuration entity types are using a legacy notation. A +configuration can have one of the following as `entity_type`: + +:task: + Represents TypedContext (Folder, Shot, Sequence, Task, etc.) custom + attribute configurations. When setting this as entity_type the + object_type_id must be set as well. + + Creating a text custom attribute for Folder:: + + custom_attribute_type = session.query( + 'CustomAttributeType where name is "text"' + ).one() + object_type = session.query('ObjectType where name is "Folder"').one() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'task', + 'object_type_id': object_type['id'], + 'type': custom_attribute_type, + 'label': 'Foo', + 'key': 'foo', + 'default': 'bar', + }) + session.commit() + + Can be associated with a `project_id`. + +:show: + Represents Projects custom attribute configurations. + + Can be associated with a `project_id`. + +:assetversion: + Represents AssetVersion custom attribute configurations. + + Can be associated with a `project_id`. + +:user: + Represents User custom attribute configurations. + + Must be `global` and cannot be associated with a `project_id`. + +:list: + Represents List custom attribute configurations. + + Can be associated with a `project_id`. + +:asset: + Represents Asset custom attribute configurations. + + .. note:: + + Asset custom attributes have limited support in the ftrack web + interface. + + Can be associated with a `project_id`. + +It is not possible to change type after a custom attribute configuration has +been created. + +Custom attribute configuration types +==================================== + +Custom attributes can be of different data types depending on what type is set +in the configuration. Some types requires an extra json encoded config to be +set: + +:text: + A sting type custom attribute. + + The `default` value must be either :py:class:`str` or :py:class:`unicode`. + + Can be either presented as raw text or markdown formatted in applicaitons + which support it. This is configured through a markwdown key:: + + # Get the custom attribute type. + custom_attribute_type = session.query( + 'CustomAttributeType where name is "text"' + ).one() + + # Create a custom attribute configuration. + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Asset version text attribute', + 'key': 'asset_version_text_attribute', + 'default': 'bar', + 'config': json.dumps({'markdown': False}) + }) + + # Persist it to the ftrack instance. + session.commit() + +:boolean: + + A boolean type custom attribute. + + The `default` value must be a :py:class:`bool`. + + No config is required. + +:date: + A date type custom attribute. + + The `default` value must be an :term:`arrow` date - e.g. + arrow.Arrow(2017, 2, 8). + + No config is required. + +:enumerator: + An enumerator type custom attribute. + + The `default` value must be a list with either :py:class:`str` or + :py:class:`unicode`. + + The enumerator can either be single or multi select. The config must a json + dump of a dictionary containing `multiSelect` and `data`. Where + `multiSelect` is True or False and data is a list of options. Each option + should be a dictionary containing `value` and `menu`, where `menu` is meant + to be used as label in a user interface. + + Create a custom attribute enumerator:: + + custom_attribute_type = session.query( + 'CustomAttributeType where name is "enumerator"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Enumerator attribute', + 'key': 'enumerator_attribute', + 'default': ['bar'], + 'config': json.dumps({ + 'multiSelect': True, + 'data': json.dumps([ + {'menu': 'Foo', 'value': 'foo'}, + {'menu': 'Bar', 'value': 'bar'} + ]) + }) + }) + session.commit() + +:dynamic enumerator: + + An enumerator type where available options are fetched from remote. Created + in the same way as enumerator but without `data`. + +:number: + + A number custom attribute can be either decimal or integer for presentation. + + This can be configured through the `isdecimal` config option:: + + custom_attribute_type = session.query( + 'CustomAttributeType where name is "number"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Number attribute', + 'key': 'number_attribute', + 'default': 42, + 'config': json.dumps({ + 'isdecimal': True + }) + }) + session.commit() + +Changing default +================ + +It is possible to update the `default` value of a custom attribute +configuration. This will not change the value of any existing custom +attributes:: + + # Change the default value of custom attributes. This will only affect + # newly created entities. + custom_attribute_configuration['default'] = 43 + session.commit() + +.. _example/manage_custom_attribute_configuration/security_roles: + +Security roles +============== + +By default new custom attribute configurations and the entity values are not +readable or writable by any security role. + +This can be configured through the `read_security_roles` and `write_security_roles` +attributes:: + + # Pick random security role. + security_role = session.query('SecurityRole').first() + custom_attribute_type = session.query( + 'CustomAttributeType where name is "date"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Date attribute', + 'key': 'date_attribute', + 'default': arrow.Arrow(2017, 2, 8), + 'write_security_roles': [security_role], + 'read_security_roles': [security_role] + }) + session.commit() + +.. note:: + + Setting the correct security role is important and must be changed to + whatever security role is appropriate for your configuration and intended + purpose. + +Custom attribute groups +======================= + +A custom attribute configuration can be categorized using a +`CustomAttributeGroup`:: + + group = session.query('CustomAttributeGroup').first() + security_role = session.query('SecurityRole').first() + custom_attribute_type = session.query( + 'CustomAttributeType where name is "enumerator"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Enumerator attribute', + 'key': 'enumerator_attribute', + 'default': ['bar'], + 'config': json.dumps({ + 'multiSelect': True, + 'data': json.dumps([ + {'menu': 'Foo', 'value': 'foo'}, + {'menu': 'Bar', 'value': 'bar'} + ]) + }), + 'group': group, + 'write_security_roles': [security_role], + 'read_security_roles': [security_role] + }) + session.commit() + +.. seealso:: + + :ref:`example/custom_attribute` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst new file mode 100644 index 00000000000..7b168810177 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst @@ -0,0 +1,43 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/metadata: + +************** +Using metadata +************** + +.. currentmodule:: ftrack_api.session + +Key/value metadata can be written to entities using the metadata property +and also used to query entities. + +The metadata property has a similar interface as a dictionary and keys can be +printed using the keys method:: + + >>> print new_sequence['metadata'].keys() + ['frame_padding', 'focal_length'] + +or items:: + + >>> print new_sequence['metadata'].items() + [('frame_padding': '4'), ('focal_length': '70')] + +Read existing metadata:: + + >>> print new_sequence['metadata']['frame_padding'] + '4' + +Setting metadata can be done in a few ways where that later one will replace +any existing metadata:: + + new_sequence['metadata']['frame_padding'] = '5' + new_sequence['metadata'] = { + 'frame_padding': '4' + } + +Entities can also be queried using metadata:: + + session.query( + 'Sequence where metadata any (key is "frame_padding" and value is "4")' + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst new file mode 100644 index 00000000000..8f8f1bb57da --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst @@ -0,0 +1,169 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/note: + +*********** +Using notes +*********** + +Notes can be written on almost all levels in ftrack. To retrieve notes on an +entity you can either query them or use the relation called `notes`:: + + task = session.query('Task').first() + + # Retrieve notes using notes property. + notes_on_task = task['notes'] + + # Or query them. + notes_on_task = session.query('Note where parent_id is "{}"'.format( + task['id'] + )) + +.. note:: + + It's currently not possible to use the `parent` property when querying + notes or to use the `parent` property on notes:: + + task = session.query('Task').first() + + # This won't work in the current version of the API. + session.query('Note where parent.id is "{}"'.format( + task['id'] + )) + + # Neither will this. + parent_of_note = note['parent'] + +To create new notes you can either use the helper method called +:meth:`~ftrack_api.entity.note.CreateNoteMixin.create_note` on any entity that +can have notes or use :meth:`Session.create` to create them manually:: + + user = session.query('User').first() + + # Create note using the helper method. + note = task.create_note('My new note', author=user) + + # Manually create a note + note = session.create('Note', { + 'content': 'My new note', + 'author': user + }) + + task['notes'].append(note) + +Replying to an existing note can also be done with a helper method or by +using :meth:`Session.create`:: + + # Create using helper method. + first_note_on_task = task['notes'][0] + first_note_on_task.create_reply('My new reply on note', author=user) + + # Create manually + reply = session.create('Note', { + 'content': 'My new note', + 'author': user + }) + + first_note_on_task.replies.append(reply) + +Notes can have labels. Use the label argument to set labels on the +note using the helper method:: + + label = session.query( + 'NoteLabel where name is "External Note"' + ).first() + + note = task.create_note( + 'New note with external category', author=user, labels=[label] + ) + +Or add labels to notes when creating a note manually:: + + label = session.query( + 'NoteLabel where name is "External Note"' + ).first() + + note = session.create('Note', { + 'content': 'New note with external category', + 'author': user + }) + + session.create('NoteLabelLink', { + 'note_id': note['id], + 'label_id': label['id'] + }) + + task['notes'].append(note) + +.. note:: + + Support for labels on notes was added in ftrack server version 4.3. For + older versions of the server, NoteCategory can be used instead. + +To specify a category when creating a note simply pass a `NoteCategory` instance +to the helper method:: + + category = session.query( + 'NoteCategory where name is "External Note"' + ).first() + + note = task.create_note( + 'New note with external category', author=user, category=category + ) + +When writing notes you might want to direct the note to someone. This is done +by adding users as recipients. If a user is added as a recipient the user will +receive notifications and the note will be displayed in their inbox. + +To add recipients pass a list of user or group instances to the helper method:: + + john = session.query('User where username is "john"').one() + animation_group = session.query('Group where name is "Animation"').first() + + note = task.create_note( + 'Note with recipients', author=user, recipients=[john, animation_group] + ) + +Attachments +=========== + +Note attachments are files that are attached to a note. In the ftrack web +interface these attachments appears next to the note and can be downloaded by +the user. + +To get a note's attachments through the API you can use the `note_components` +relation and then use the ftrack server location to get the download URL:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + for note_component in note['note_components']: + print 'Download URL: {0}'.format( + server_location.get_url(note_component['component']) + ) + +To add an attachment to a note you have to add it to the ftrack server location +and create a `NoteComponent`:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + # Create component and name it "My file". + component = session.create_component( + '/path/to/file', + data={'name': 'My file'}, + location=server_location + ) + + # Attach the component to the note. + session.create( + 'NoteComponent', + {'component_id': component['id'], 'note_id': note['id']} + ) + + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst new file mode 100644 index 00000000000..0b4c0879d69 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst @@ -0,0 +1,65 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/project: + +********************* +Working with projects +********************* + +.. currentmodule:: ftrack_api.session + +Creating a project +================== + +A project with sequences, shots and tasks can be created in one single +transaction. Tasks need to have a type and status set on creation based on the +project schema:: + + import uuid + + # Create a unique name for the project. + name = 'projectname_{0}'.format(uuid.uuid1().hex) + + # Naively pick the first project schema. For this example to work the + # schema must contain `Shot` and `Sequence` object types. + project_schema = session.query('ProjectSchema').first() + + # Create the project with the chosen schema. + project = session.create('Project', { + 'name': name, + 'full_name': name + '_full', + 'project_schema': project_schema + }) + + # Retrieve default types. + default_shot_status = project_schema.get_statuses('Shot')[0] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + # Create sequences, shots and tasks. + for sequence_number in range(1, 5): + sequence = session.create('Sequence', { + 'name': 'seq_{0}'.format(sequence_number), + 'parent': project + }) + + for shot_number in range(1, 5): + shot = session.create('Shot', { + 'name': '{0}0'.format(shot_number).zfill(3), + 'parent': sequence, + 'status': default_shot_status + }) + + for task_number in range(1, 5): + session.create('Task', { + 'name': 'task_{0}'.format(task_number), + 'parent': shot, + 'status': default_task_status, + 'type': default_task_type + }) + + # Commit all changes to the server. + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst new file mode 100644 index 00000000000..bf1da18ab9b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst @@ -0,0 +1,73 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/publishing: + +******************* +Publishing versions +******************* + +To know more about publishing and the concepts around publishing, read the +`ftrack article `_ +about publishing. + +To publish an asset you first need to get the context where the asset should be +published:: + + # Get a task from a given id. + task = session.get('Task', '423ac382-e61d-4802-8914-dce20c92b740') + +And the parent of the task which will be used to publish the asset on:: + + asset_parent = task['parent'] + +Then we create an asset and a version on the asset:: + + asset_type = session.query('AssetType where name is "Geometry"').one() + asset = session.create('Asset', { + 'name': 'My asset', + 'type': asset_type, + 'parent': asset_parent + }) + asset_version = session.create('AssetVersion', { + 'asset': asset, + 'task': task + }) + +.. note:: + + The task is not used as the parent of the asset, instead the task is linked + directly to the AssetVersion. + +Then when we have a version where we can create the components:: + + asset_version.create_component( + '/path/to/a/file.mov', location='auto' + ) + asset_version.create_component( + '/path/to/a/another-file.mov', location='auto' + ) + + session.commit() + +This will automatically create a new component and add it to the location which +has been configured as the first in priority. + +Components can also be named and added to a custom location like this:: + + location = session.query('Location where name is "my-location"') + asset_version.create_component( + '/path/to/a/file.mov', + data={ + 'name': 'foobar' + }, + location=location + ) + +.. seealso:: + + * :ref:`example/component` + * :ref:`example/web_review` + * :ref:`example/thumbnail` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst new file mode 100644 index 00000000000..68f7870d1c6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst @@ -0,0 +1,87 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/review_session: + +********************* +Using review sessions +********************* + +.. currentmodule:: ftrack_api.session + +Client review sessions can either be queried manually or by using a project +instance. + +.. code-block:: python + + review_sessions = session.query( + 'ReviewSession where name is "Weekly review"' + ) + + project_review_sessions = project['review_sessions'] + +To create a new review session on a specific project use :meth:`Session.create`. + +.. code-block:: python + + review_session = session.create('ReviewSession', { + 'name': 'Weekly review', + 'description': 'See updates from last week.', + 'project': project + }) + +To add objects to a review session create them using +:meth:`Session.create` and reference a review session and an asset version. + +.. code-block:: python + + review_session = session.create('ReviewSessionObject', { + 'name': 'Compositing', + 'description': 'Fixed shadows.', + 'version': 'Version 3', + 'review_session': review_session, + 'asset_version': asset_version + }) + +To list all objects in a review session. + +.. code-block:: python + + review_session_objects = review_session['review_session_objects'] + +Listing and adding collaborators to review session can be done using +:meth:`Session.create` and the `review_session_invitees` relation on a +review session. + +.. code-block:: python + + invitee = session.create('ReviewSessionInvitee', { + 'name': 'John Doe', + 'email': 'john.doe@example.com', + 'review_session': review_session + }) + + session.commit() + + invitees = review_session['review_session_invitees'] + +To remove a collaborator simply delete the object using +:meth:`Session.delete`. + +.. code-block:: python + + session.delete(invitee) + +To send out an invite email to a signle collaborator use +:meth:`Session.send_review_session_invite`. + +.. code-block:: python + + session.send_review_session_invite(invitee) + +Multiple invitees can have emails sent to them in one batch using +:meth:`Session.send_review_session_invites`. + +.. code-block:: python + + session.send_review_session_invites(a_list_of_invitees) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst new file mode 100644 index 00000000000..3be42322cef --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst @@ -0,0 +1,27 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/scope: + +************ +Using scopes +************ + +.. currentmodule:: ftrack_api.session + +Entities can be queried based on their scopes:: + + >>> tasks = session.query( + ... 'Task where scopes.name is "London"' + ... ) + +Scopes can be read and modified for entities:: + + >>> scope = session.query( + ... 'Scope where name is "London"' + ... )[0] + ... + ... if scope in task['scopes']: + ... task['scopes'].remove(scope) + ... else: + ... task['scopes'].append(scope) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst new file mode 100644 index 00000000000..4219e3d1263 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst @@ -0,0 +1,73 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/security_roles: + +********************************* +Working with user security roles +********************************* + +.. currentmodule:: ftrack_api.session + +The API exposes `SecurityRole` and `UserSecurityRole` that can be used to +specify who should have access to certain data on different projects. + +List all available security roles like this:: + + security_roles = session.query( + 'select name from SecurityRole where type is "PROJECT"' + ) + +.. note:: + + We only query for project roles since those are the ones we can add to a + user for certain projects. Other types include API and ASSIGNED. Type API + can only be added to global API keys, which is currently not supported via + the api and type ASSIGNED only applies to assigned tasks. + +To get all security roles from a user we can either use relations like this:: + + for user_security_role in user['user_security_roles']: + if user_security_role['is_all_projects']: + result_string = 'all projects' + else: + result_string = ', '.join( + [project['full_name'] for project in user_security_role['projects']] + ) + + print 'User has security role "{0}" which is valid on {1}.'.format( + user_security_role['security_role']['name'], + result_string + ) + +or query them directly like this:: + + user_security_roles = session.query( + 'UserSecurityRole where user.username is "{0}"'.format(session.api_user) + ).all() + +User security roles can also be added to a user for all projects like this:: + + project_manager_role = session.query( + 'SecurityRole where name is "Project Manager"' + ).one() + + session.create('UserSecurityRole', { + 'is_all_projects': True, + 'user': user, + 'security_role': project_manager_role + }) + session.commit() + +or for certain projects only like this:: + + projects = session.query( + 'Project where full_name is "project1" or full_name is "project2"' + ).all()[:] + + session.create('UserSecurityRole', { + 'user': user, + 'security_role': project_manager_role, + 'projects': projects + }) + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst new file mode 100644 index 00000000000..5ea0e47dc68 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst @@ -0,0 +1,30 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/sync_with_ldap: + +******************** +Sync users with LDAP +******************** + +.. currentmodule:: ftrack_api.session + + +If ftrack is configured to connect to LDAP you may trigger a +synchronization through the api using the +:meth:`ftrack_api.session.Session.call`:: + + result = session.call([ + dict( + action='delayed_job', + job_type='SYNC_USERS_LDAP' + ) + ]) + job = result[0]['data] + +You will get a `ftrack_api.entity.job.Job` instance back which can be used +to check the success of the job:: + + if job.get('status') == 'failed': + # The job failed get the error. + logging.error(job.get('data')) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst new file mode 100644 index 00000000000..c6161e834a4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst @@ -0,0 +1,56 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/task_template: + +*************************** +Working with Task Templates +*************************** + +Task templates can help you organize your workflows by building a collection +of tasks to be applied for specific contexts. They can be applied to all `Context` +objects for example Project, Sequences, Shots, etc... + +Query task templates +======================= + +Retrive all task templates and there tasks for a project:: + + project = session.query('Project').first() + + for task_template in project['project_schema']['task_templates']: + print('\ntask template: {0}'.format( + task_template['name'] + )) + + for task_type in [t['task_type'] for t in task_template['items']]: + print('\ttask type: {0}'.format( + task_type['name'] + )) + + + +"Apply" a task template +======================= +Create all tasks in a random task template directly under the project:: + + + project = session.query('Project').first() + + task_template = random.choice( + project['project_schema']['task_templates'] + ) + + for task_type in [t['task_type'] for t in task_template['items']]: + session.create( + 'Task', { + 'name': task_type['name'], + 'type': task_type, + 'parent': project + } + ) + + session.commit() + + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst new file mode 100644 index 00000000000..64199869a56 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst @@ -0,0 +1,71 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. _example/thumbnail: + +*********************** +Working with thumbnails +*********************** + +Components can be used as thumbnails on various entities, including +`Project`, `Task`, `AssetVersion` and `User`. To create and set a thumbnail +you can use the helper method +:meth:`~ftrack_api.entity.component.CreateThumbnailMixin.create_thumbnail` on +any entity that can have a thumbnail:: + + task = session.get('Task', my_task_id) + thumbnail_component = task.create_thumbnail('/path/to/image.jpg') + +It is also possible to set an entity thumbnail by setting its `thumbnail` +relation or `thumbnail_id` attribute to a component you would +like to use as a thumbnail. For a component to be usable as a thumbnail, +it should + + 1. Be a FileComponent. + 2. Exist in the *ftrack.server* :term:`location`. + 3. Be of an appropriate resolution and valid file type. + +The following example creates a new component in the server location, and +uses that as a thumbnail for a task:: + + task = session.get('Task', my_task_id) + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + thumbnail_component = session.create_component( + '/path/to/image.jpg', + dict(name='thumbnail'), + location=server_location + ) + task['thumbnail'] = thumbnail_component + session.commit() + +The next example reuses a version's thumbnail for the asset parent thumbnail:: + + asset_version = session.get('AssetVersion', my_asset_version_id) + asset_parent = asset_version['asset']['parent'] + asset_parent['thumbnail_id'] = asset_version['thumbnail_id'] + session.commit() + +.. _example/thumbnail/url: + +Retrieving thumbnail URL +======================== + +To get an URL to a thumbnail, `thumbnail_component`, which can be used used +to download or display the image in an interface, use the following:: + + import ftrack_api.symbol + server_location = session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) + thumbnail_url = server_location.get_thumbnail_url(thumbnail_component) + thumbnail_url_tiny = server_location.get_thumbnail_url( + thumbnail_component, size=100 + ) + thumbnail_url_large = server_location.get_thumbnail_url( + thumbnail_component, size=500 + ) + +.. seealso:: + + :ref:`example/component` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst new file mode 100644 index 00000000000..eb86e2f8976 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst @@ -0,0 +1,37 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/timer: + +************ +Using timers +************ + +.. currentmodule:: ftrack_api.session + +Timers can be used to track how much time has been spend working on something. + +To start a timer for a user:: + + user = # Get a user from ftrack. + task = # Get a task from ftrack. + + user.start_timer(task) + +A timer has now been created for that user and should show up in the ftrack web +UI. + +To stop the currently running timer for a user and create a timelog from it:: + + user = # Get a user from ftrack. + + timelog = user.stop_timer() + +.. note:: + + Starting a timer when a timer is already running will raise in an exception. + Use the force parameter to automatically stop the running timer first. + + .. code-block:: python + + user.start_timer(task, force=True) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst new file mode 100644 index 00000000000..f1dede570ff --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst @@ -0,0 +1,78 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/web_review: + +************************* +Publishing for web review +************************* + +Follow the :ref:`example/encode_media` example if you want to +upload and encode media using ftrack. + +If you already have a file encoded in the correct format and want to bypass +the built-in encoding in ftrack, you can create the component manually +and add it to the `ftrack.server` location:: + + # Retrieve or create version. + version = session.query('AssetVersion', 'SOME-ID') + + server_location = session.query('Location where name is "ftrack.server"').one() + filepath = '/path/to/local/file.mp4' + + component = version.create_component( + path=filepath, + data={ + 'name': 'ftrackreview-mp4' + }, + location=server_location + ) + + # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. + component['metadata']['ftr_meta'] = json.dumps({ + 'frameIn': 0, + 'frameOut': 150, + 'frameRate': 25 + }) + + component.session.commit() + +To publish an image for review the steps are similar:: + + # Retrieve or create version. + version = session.query('AssetVersion', 'SOME-ID') + + server_location = session.query('Location where name is "ftrack.server"').one() + filepath = '/path/to/image.jpg' + + component = version.create_component( + path=filepath, + data={ + 'name': 'ftrackreview-image' + }, + location=server_location + ) + + # Meta data needs to contain *format*. + component['metadata']['ftr_meta'] = json.dumps({ + 'format': 'image' + }) + + component.session.commit() + +Here is a list of components names and how they should be used: + +================== ===================================== +Component name Use +================== ===================================== +ftrackreview-image Images reviewable in the browser +ftrackreview-mp4 H.264/mp4 video reviewable in browser +ftrackreview-webm WebM video reviewable in browser +================== ===================================== + +.. note:: + + Make sure to use the pre-defined component names and set the `ftr_meta` on + the components or review will not work. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst new file mode 100644 index 00000000000..aa5cc779760 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst @@ -0,0 +1,76 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******** +Glossary +******** + +.. glossary:: + + accessor + An implementation (typically a :term:`Python` plugin) for accessing + a particular type of storage using a specific protocol. + + .. seealso:: :ref:`locations/overview/accessors` + + action + Actions in ftrack provide a standardised way to integrate other tools, + either off-the-shelf or custom built, directly into your ftrack + workflow. + + .. seealso:: :ref:`ftrack:using/actions` + + api + Application programming interface. + + arrow + A Python library that offers a sensible, human-friendly approach to + creating, manipulating, formatting and converting dates, times, and + timestamps. Read more at http://crsmithdev.com/arrow/ + + asset + A container for :term:`asset versions `, typically + representing the output from an artist. For example, 'geometry' + from a modeling artist. Has an :term:`asset type` that categorises the + asset. + + asset type + Category for a particular asset. + + asset version + A specific version of data for an :term:`asset`. Can contain multiple + :term:`components `. + + component + A container to hold any type of data (such as a file or file sequence). + An :term:`asset version` can have any number of components, each with + a specific name. For example, a published version of geometry might + have two components containing the high and low resolution files, with + the component names as 'hires' and 'lowres' respectively. + + PEP-8 + Style guide for :term:`Python` code. Read the guide at + https://www.python.org/dev/peps/pep-0008/ + + plugin + :term:`Python` plugins are used by the API to extend it with new + functionality, such as :term:`locations ` or :term:`actions `. + + .. seealso:: :ref:`understanding_sessions/plugins` + + python + A programming language that lets you work more quickly and integrate + your systems more effectively. Often used in creative industries. Visit + the language website at http://www.python.org + + PyPi + :term:`Python` package index. The Python Package Index or PyPI is the + official third-party software repository for the Python programming + language. Visit the website at https://pypi.python.org/pypi + + resource identifier + A string that is stored in ftrack as a reference to a resource (such as + a file) in a specific location. Used by :term:`accessors ` to + determine how to access data. + + .. seealso:: :ref:`locations/overview/resource_identifiers` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst new file mode 100644 index 00000000000..1d378473fac --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst @@ -0,0 +1,315 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _handling_events: + +*************** +Handling events +*************** + +.. currentmodule:: ftrack_api.event + +Events are generated in ftrack when things happen such as a task being updated +or a new version being published. Each :class:`~ftrack_api.session.Session` +automatically connects to the event server and can be used to subscribe to +specific events and perform an action as a result. That action could be updating +another related entity based on a status change or generating folders when a new +shot is created for example. + +The :class:`~hub.EventHub` for each :class:`~ftrack_api.session.Session` is +accessible via :attr:`Session.event_hub +<~ftrack_api.session.Session.event_hub>`. + +.. _handling_events/subscribing: + +Subscribing to events +===================== + +To listen to events, you register a function against a subscription using +:meth:`Session.event_hub.subscribe `. The subscription +uses the :ref:`expression ` syntax and will filter +against each :class:`~base.Event` instance to determine if the registered +function should receive that event. If the subscription matches, the registered +function will be called with the :class:`~base.Event` instance as its sole +argument. The :class:`~base.Event` instance is a mapping like structure and can +be used like a normal dictionary. + +The following example subscribes a function to receive all 'ftrack.update' +events and then print out the entities that were updated:: + + import ftrack_api + + + def my_callback(event): + '''Event callback printing all new or updated entities.''' + for entity in event['data'].get('entities', []): + + # Print data for the entity. + print(entity) + + + # Subscribe to events with the update topic. + session = ftrack_api.Session() + session.event_hub.subscribe('topic=ftrack.update', my_callback) + +At this point, if you run this, your code would exit almost immediately. This +is because the event hub listens for events in a background thread. Typically, +you only want to stay connected whilst using the session, but in some cases you +will want to block and listen for events solely - a dedicated event processor. +To do this, use the :meth:`EventHub.wait ` method:: + + # Wait for events to be received and handled. + session.event_hub.wait() + +You cancel waiting for events by using a system interrupt (:kbd:`Ctrl-C`). +Alternatively, you can specify a *duration* to process events for:: + + # Only wait and process events for 5 seconds. + session.event_hub.wait(duration=5) + +.. note:: + + Events are continually received and queued for processing in the background + as soon as the connection to the server is established. As a result you may + see a flurry of activity as soon as you call + :meth:`~hub.EventHub.wait` for the first time. + +.. _handling_events/subscribing/subscriber_information: + +Subscriber information +---------------------- + +When subscribing, you can also specify additional information about your +subscriber. This contextual information can be useful when routing events, +particularly when :ref:`targeting events +`. By default, the +:class:`~hub.EventHub` will set some default information, but it can be +useful to enhance this. To do so, simply pass in *subscriber* as a dictionary of +data to the :meth:`~hub.EventHub.subscribe` method:: + + session.event_hub.subscribe( + 'topic=ftrack.update', + my_callback, + subscriber={ + 'id': 'my-unique-subscriber-id', + 'applicationId': 'maya' + } + ) + +.. _handling_events/subscribing/sending_replies: + +Sending replies +--------------- + +When handling an event it is sometimes useful to be able to send information +back to the source of the event. For example, +:ref:`ftrack:developing/events/list/ftrack.location.request-resolve` would +expect a resolved path to be sent back. + +You can craft a custom reply event if you want, but an easier way is just to +return the appropriate data from your handler. Any non *None* value will be +automatically sent as a reply:: + + def on_event(event): + # Send following data in automatic reply. + return {'success': True, 'message': 'Cool!'} + + session.event_hub.subscribe('topic=test-reply', on_event) + +.. seealso:: + + :ref:`handling_events/publishing/handling_replies` + +.. note:: + + Some events are published :ref:`synchronously + `. In this case, any returned data + is passed back to the publisher directly. + +.. _handling_events/subscribing/stopping_events: + +Stopping events +--------------- + +The *event* instance passed to each event handler also provides a method for +stopping the event, :meth:`Event.stop `. + +Once an event has been stopped, no further handlers for that specific event +will be called **locally**. Other handlers in other processes may still be +called. + +Combining this with setting appropriate priorities when subscribing to a topic +allows handlers to prevent lower priority handlers running when desired. + + >>> import ftrack_api + >>> import ftrack_api.event.base + >>> + >>> def callback_a(event): + ... '''Stop the event!''' + ... print('Callback A') + ... event.stop() + >>> + >>> def callback_b(event): + ... '''Never run.''' + ... print('Callback B') + >>> + >>> session = ftrack_api.Session() + >>> session.event_hub.subscribe( + ... 'topic=test-stop-event', callback_a, priority=10 + ... ) + >>> session.event_hub.subscribe( + ... 'topic=test-stop-event', callback_b, priority=20 + ... ) + >>> session.event_hub.publish( + ... ftrack_api.event.base.Event(topic='test-stop-event') + ... ) + >>> session.event_hub.wait(duration=5) + Callback A called. + +.. _handling_events/publishing: + +Publishing events +================= + +So far we have looked at listening to events coming from ftrack. However, you +are also free to publish your own events (or even publish relevant ftrack +events). + +To do this, simply construct an instance of :class:`ftrack_api.event.base.Event` +and pass it to :meth:`EventHub.publish ` via the session:: + + import ftrack_api.event.base + + event = ftrack_api.event.base.Event( + topic='my-company.some-topic', + data={'key': 'value'} + ) + session.event_hub.publish(event) + +The event hub will automatically add some information to your event before it +gets published, including the *source* of the event. By default the event source +is just the event hub, but you can customise this to provide more relevant +information if you want. For example, if you were publishing from within Maya:: + + session.event_hub.publish(ftrack_api.event.base.Event( + topic='my-company.some-topic', + data={'key': 'value'}, + source={ + 'applicationId': 'maya' + } + )) + +Remember that all supplied information can be used by subscribers to filter +events so the more accurate the information the better. + +.. _handling_events/publishing/synchronously: + +Publish synchronously +--------------------- + +It is also possible to call :meth:`~hub.EventHub.publish` synchronously by +passing `synchronous=True`. In synchronous mode, only local handlers will be +called. The result from each called handler is collected and all the results +returned together in a list:: + + >>> import ftrack_api + >>> import ftrack_api.event.base + >>> + >>> def callback_a(event): + ... return 'A' + >>> + >>> def callback_b(event): + ... return 'B' + >>> + >>> session = ftrack_api.Session() + >>> session.event_hub.subscribe( + ... 'topic=test-synchronous', callback_a, priority=10 + ... ) + >>> session.event_hub.subscribe( + ... 'topic=test-synchronous', callback_b, priority=20 + ... ) + >>> results = session.event_hub.publish( + ... ftrack_api.event.base.Event(topic='test-synchronous'), + ... synchronous=True + ... ) + >>> print results + ['A', 'B'] + +.. _handling_events/publishing/handling_replies: + +Handling replies +---------------- + +When publishing an event it is also possible to pass a callable that will be +called with any :ref:`reply event ` +received in response to the published event. + +To do so, simply pass in a callable as the *on_reply* parameter:: + + def handle_reply(event): + print 'Got reply', event + + session.event_hub.publish( + ftrack_api.event.base.Event(topic='test-reply'), + on_reply=handle_reply + ) + +.. _handling_events/publishing/targeting: + +Targeting events +---------------- + +In addition to subscribers filtering events to receive, it is also possible to +give an event a specific target to help route it to the right subscriber. + +To do this, set the *target* value on the event to an :ref:`expression +`. The expression will filter against registered +:ref:`subscriber information +`. + +For example, if you have many subscribers listening for a event, but only want +one of those subscribers to get the event, you can target the event to the +subscriber using its registered subscriber id:: + + session.event_hub.publish( + ftrack_api.event.base.Event( + topic='my-company.topic', + data={'key': 'value'}, + target='id=my-custom-subscriber-id' + ) + ) + +.. _handling_events/expressions: + +Expressions +=========== + +An expression is used to filter against a data structure, returning whether the +structure fulfils the expression requirements. Expressions are currently used +for subscriptions when :ref:`subscribing to events +` and for targets when :ref:`publishing targeted +events `. + +The form of the expression is loosely groupings of 'key=value' with conjunctions +to join them. + +For example, a common expression for subscriptions is to filter against an event +topic:: + + 'topic=ftrack.location.component-added' + +However, you can also perform more complex filtering, including accessing +nested parameters:: + + 'topic=ftrack.location.component-added and data.locationId=london' + +.. note:: + + If the structure being tested does not have any value for the specified + key reference then it is treated as *not* matching. + +You can also use a single wildcard '*' at the end of any value for matching +multiple values. For example, the following would match all events that have a +topic starting with 'ftrack.':: + + 'topic=ftrack.*' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png new file mode 100644 index 0000000000000000000000000000000000000000..7438cb52bebd5dd1c0c5814cd7e1d5f2fdf6a572 GIT binary patch literal 7313 zcmV;C9B$)@P)KOJ#IsdX9YK2_oxysWkE--^>ub$^C=yJ%K%Nmb;2;67`L)eAY4kcqufz9M);<8V8!p z=o?uv&!ZhL4m4L)*EDnoGk5M=e08kfw{zRawV>DKmo+j(-X%mx?aklXJ34-A7N39K ze@&v_bdT*GepTS$Og#Q7dZ2A3&$bdOFRNkcPiuQ=E~c>2)91UHrmhjwBz$)TLRRNP zR|~zM+_`ToU4~HW(JdcWod`9orf&147T29$o58)^S`X+i%A<8<#YDSnTx70uIw!yKJOJ(=j z_ibhR<=Sq>C~i};bUo6;?ouuandaZ8P_5UzeB~e3mw({$&@~8xy5|X}E(ysG)Yu33 z^P8z^w=Q`m##C7lgr%;z)@FS_pP%=BWOp!mA=L-f6`cXLzq$XPX;6iXTY*)o}39>}p6n$tmGpgu~iD$?*G{5_Z4unBilu z{R3c;HHe3GZuSE_?i;+UDDaB`KBEh1ab&xL%>g>FpL*yUXeA1=I*7S=F^gkYe)=^C5*GB&v?!spa8xuMf* zmPG5fz_8}ut|zHpefHCZ^axtf=98)&#TF=C$aJV2A&Y}8G7U0VK62K9sP(*hO=SyT zR9I?eV`KUxTr8xuSV%nUDjYag=0Cak=Aq&`S?z?Aj^G=;P3bpi53r^6e&OpB!vrGa z*`NQ?)zu9)y6)eHot_&LHFS8#pCzZkw+l?Hilf2kSWq1OB06H={iEF2F>I1VAHnqH zp>GIT!5UKGlGTDBD5?z3VQYnV%Wn{}0^eMCC96?$s-{9h z24`5k)U}MvRH1y>n)wn-Rss+vMF+&!rW&CsSJ~9_l&rsYoAQ$JxEXE@QLP!H4|dvhV@zzFEit5UD1(uV%F(A@$o(wL6|ZUd->r zbmBpu!>XXN{(z{YkJ%+gRCPC8S*Q~2_|j}FcV)?ntPeOTtEDFJLi8iCSSJ>QMD?sJ zO*BFSuK70vn`y!n)k**os=<-E+eUTm?Zuh{Yh3E%Pb0Pa}Z-vat3=DLAFQibi8U4Gs`V6VN@w&x^%vX|Fp?m0USUnp?k!k3pgx!>WvAV4voV2?O zD_enq%~g2B>0??fsCH5^re$LdSuA9EHxCW`>!1f!&z|JS5d2mkLVid;rY9q9H7l_GM+q?69=#2NpVjScK5agadSJ$Z|vVmHHoh)~$3Yj)nC^yaP|b3lI~N-js~>WMo9FBO+oPZCFR5L zQs()w`8VKA7KCJ@a9n*WA*snU7^Rc*J3)Sh_~kEuDTMf&i>7Jv4d4I%_YL9OkmpoI zBq-(0d_C8M#O%7XvobX1UwWZ`Ro?VG4wKNfNXUjD&Y8fM&Z{)ShY4^1b6-UP zK~zj07y{P%vKfYPVuFOF-2Dw{fXrf6hiOgdD+oxCGz$7YB4O6n7c^@o0e+@BAgKus zTODj_=(Do+II zZz-Yxn+hPun1n4? zudyH>KYsklC!hS&g}CD5k3Tj9ud%{6k#uaH>Cn@~l%y?+o^j6F(3Gs@h9$<#l)Il7 z&W3JW;0Z0DWiJ)uGws`vt3$&c`ZMkrpZY2!D9Al%n0L#ih zBe3VTC_B0=QyJSeMHT_csc;0TXlU}P>B?C@cs84Zxz8yGeZuC>Va$$bI9Wh$rj^Gi zGs(vkYFv(#34Ja|o3jE{MI~SPIE<_QT(xhGZlv793VAWr>5ItBe%8;+n3T-`@6N(H z2~D83h{4;i4tZcaa9ri<*o>FgdWU@U=#f%NdSHGsgwqcUQca(49<3xVOsD1JCdk~- ziq1BfZd3Ch!@_wD6@ym<$^=a1eq@!jgrJnK>(b?hT8QJ;-8uc@sML&%^R2<6=)dd1JXZD41vkL z{w4J!0~6dJiU9gV&kcA6cPlLX_4IllSsy-p`0UxU&pa?c8N%t?irmFq!1{;B7xUZR zk!zg>``QLOq<|KRur>XLipBO8!8R@iMR-qlPA56$&e-87xAO|<3>1kwc2#6Wi-NR< z*i|S<-kTvHASXK{U~02{PDOvPLwe?pD?+mr}+JG3x$euNJ+AxO`^A>M9C@;?dC$q+th~3es|xK7+B_c~*o6f(*;u z_tFx0F0G-{F8e_gmhHKU&UQ$I7DJVK&K;h)<6e*|Bp-o88)~WVO zM3-A}8tzbSAm0X{uYwHr2Adl=FvsAl)vx=d&PN}8Wd09__Q3pP2tHff#7LCWMTlh@ z*)rD&d@gdq$2uVU`E6x=&x~r-kOz2DU55jLt&e z6Qt%{WJ*2B-4Go2LTldVW?VA#I7S43Z|C(Eiq zDvHz!vV1N`JdVOslooaXD+Ql{8KZKYWw3nTjW}saNZJ9PP+sEkf(s(+{ zGj}m;9|6=$!*0e2y5p(2AZe5yLEbNmtgY)Hr9$7Y_RY}&da2UT=>(^YK(FD?oxuw;%hI+4?g(dT#!gOh2E}zv|Yx9kQ%Fr1E9dok)b#+k0d@kGZaFo!=Q$( z6b@Gx)>=VcZqy?`HiuiG(Ww@<*(wP%tkfxAD`}u{EY^Dv`u^PgxSfVkl|Cusd7=on z+P6nX!vk;rI}32_)^W}M&)${&rqLwv`Gbc26UP65%`@^_jyGF=GWzb*Lb)wDht~cM zqh;;f3GseFg833Yc4yfgv>MBYG)REJ5};v(T4=Q5d8?XA(rlW^e5bE2)x)@_y1J^T ze_h>e)b%Gm{hb;>h7vf|za}A)hs~2n{=X@35K$02quodr;xbk21mOTe!_(JO07%QS zo;`bZ@7}#{zy0?1?b|Dy-~s-GLU;uN$HGuWiu{h_NdD{Zdv&Mo{ieUvQKU$b`sth^ zMT*p&I*JvmORgqxFkG4m# zVg<;6D?E-9CjH{(ciRpB%#NE{>Hg~0{wa4;0rJOJrQj<(K;U?o%C2eJr=>HS1sT)4 zQ$m#Aj{BJ7EK>9oAdd|fUj7t(g$D@yK22ll|5!5glrQ9dJOUQ1Ju0R&n7^qbgRZlk zv)aA_FhM1;&+gZ7K!j$nV9i`#U|>slMB=FR5tm+$mWfm|LcG9 zdgIX@gL>W^@LED{GJj)!3cFDOG7P`iei?j)2MSE8&i2pqPn)hdd*x!b+!~b_iA=3w zFTp38&kQI#I?~dk;={!3DA}u9H4(S5JH@iy_TR`%tv<@zhrU}$Ehe7Y)z$OS-r+^Fh z2g5Kf4Ab+Yiq2js#j` zodY{tuwe?=d$hc=Wz`z{D=|t7|4z44mUyX7A+;_{c9}(;JeBl3c%3sJUPs}jLdUIq zhaT_&6Kv zyvdaAuNs$1u<;wN&j66Vb3IgU zBuvTjfwAob)s=NS_LW>GC7)lgeKHt7sHEp3*+UA^*Yr_8w?2iw_$&{F<}Dfq3Xq{V z_o5JZg$D|dup9K47=OZ+!%Oa#)Mzogg$9URV8t&9w2p6EDa4uyLAbX%$q}Z)sK#C3uIN{3Yw(4RP zCr2wuU84vo>vmKn*C{kQX%j0}6qVaa==qC#UVRbfphtDJhYZWv=i9+ocz{4~gxshO z$4esFuKb$?0+Pxp9L*q@oA|tMKsl2GGPsFm?gMzpRxxC)8)K2Df4F}Q*P36{WZGmh zg0&t-$i-Ysa2Wz%2p4#SoTJe>%zhTp3?3mbAX)+q zL+}RxQm!*BAfuN58j$+yem>0Wpd&U#lavBvNd6)FBKQgq5SUn<**IcnE}4y9yRl!a zw5a65oxQH#_gYrP=?4H3zF7Qe+aJm%T2PXzc^y2Jp|$AG2IO86>PEBGo>NNU;7AnV zZL^ZkV)kW!I}wosQZ6G}%hth3{R8We^}G9@8nhJ*;ew9?U5hLwQ%m@OS=rs!E9Awb zX*n7S+~n| zP6Q-_bDHV+Q~J0dd@BzXo!lk#w8FT-KK2qc8_%m#)}O(D%RCRhF#rSts=GT+r!}I{ zyz3tDa}Fwcv&4!LT7+kN^8=K@@FJEY*K(Ig!ThDg8I&8PVGRdizT=Z+!f~?4m#j7e=S7vv{hKxSziMc1vdAISSGAEqekW zqnbh3XqHn3iG!*PMJ>O+av`D-&6_TX8yKP=E~2TR-H20SN-{Q!;ct)@Q@9l5Wps@Bb15##~-e!tl!Dul#Wt zpEVCVBDY6ka`xL@HW-1H9zcWNXRPlBY9pB-Yx^EpNIr&h0jtof(biFn9zK{#ExKLN zBxKi@xaNA2-s@wbFZ|gX=^S{ZTy!LyPU7Hmgn`!^t4IezmzqC&Oe#PI5&`nKFl}6k zggAVJ+QPG$;Kxg`S)IqIbXAIHhWpQPqBEr9$8H(JpyancduVa;p*{w{vU^UsPaLQ}hJ9Wv4l;5)} z`)yzSQb)03#R`y$6`xR6cj_ooq%OIdKnDj0|B}%Wn4(CL0wiEhdWDC=q#^~#BcO7* zJn0G#c=bnHqezhgWWW_3|7-8c6=b!QXnzs^z!&HTh>1Jxl#CFscqPJq@8OMzyKxtJ z!+utxgoJ=Z34sD73KX=Zf>uwf2VGBlw>c5RluA{lIv{nx`SUob23#KEI6mZ~I&RqT zZz1Hj-+t@=goifxGw4_vd84ez)4Z3zvGWZQq=-B5*Nbc;vmi9BLHq z%PL!!>EunecVy3p_7qaMZ%1(_B%DQXJ8Aet=DDJz=GLHX)9@kypEoB#KwqbGQu};S zHH31NZF!jeRy{weMdCX!ipZ5bDpDoEWeV4D>kLq=OQP^JlsYRV$0iC7RGuu+JV}@} z{c$a}OU(7J>SgMEp_5`63ShkeqKkH6J<5%c55uchul%3z&<1N`#n_qNjq;9At~5WP zsZrWu%5}fiS>6!F`aC*o>lbM5`w}I2nIQC?Fvmg)oLV_?P_$36)f)zddRZc~_ITizTh{QTS-%L6 zPZMj5tBju-q8j9yk{~i1?k*^5T0LI|K(7N^Kt_T?{l8hieAEezZY@QSCU`n*78_qHtAz!?B;s1n(Hdq@A{Rchg==cN1(~d5% z(w`t7pHbnAm8^|-0F?Bu3_V=mMO2(6vgPU-G%}C(g+=G4N!km>X2gbM32<>u_(}3! zoc3}tyj#59+yr?ZUS^OJr(E{hCO)IDlVwQOy1A~`*(wdMt(2X`oo?pdN^CP64^F0; zc}HSyPQn4^+ZpoVc>esk{}UeC;1)v8+bwuT4h357FU`%Ju>i$L6+&CPgoLMf%RLR@ z!ZO5tLxzQ1{-OxBXEvlu61s%UsI)Ds$4!23Zi1>dEY2?(wL(aexepq}aj|#bc!0G! zgOgn~s@D<{EW^`us)9JhFC`?R>f}6FUi(93HUp7~saD&MIyd0ivuFNKcxVG3A#wln(N>goIt9c2?j) zh>5iAD37c;z(HjcshuE-ZU7 zdw?}*^}w+;+2s_>MF)pyDBTU%2>JBsQ|}i8z+fGHb+GdHG)H$-=|z1AcYu9z2#iS~ zbX#2ZE6S<9D5KqiU{AR17(}eEp|K|O6xLuBYRu!nZW7@oAlp2)Gv$~qVVns`k?AC1 z8y%X=#JNLg8Hcjbxbk^(69mvIu^GV4i*nHdL47oM5uehWc?Ad$mD}LHIv+(}BRe)D zmn(Hqy@`~eEmPX)duy@5C}0{~L*Dks@J`*?iNH@gH(?{>lP6ESgaiXGloUeVzdezH zO9eJy;&-CqHxZl*Deq}XJ~^_V7II=Kis+1o06RS>tMr{QYoh}E17!g ztRQ>kUnyHBe`F5qF;xjCr6~>IiE}$PLf&rhSAZzc&OzGo-_U=mANU>r7i9VX2x7LW zopA3@t$7c()2)6tLjLf>4_|%t)u*3+`oRYuy!F;w89-m?KlM%hFKuxAS3|L(XU@xe rgJ69>%In+H&=0rLaU`. + +.. toctree:: + :maxdepth: 1 + + introduction + installing + tutorial + understanding_sessions + working_with_entities + querying + handling_events + caching + locations/index + example/index + api_reference/index + event_list + environment_variables + security_and_authentication + release/index + glossary + +****************** +Indices and tables +****************** + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst new file mode 100644 index 00000000000..5e42621bee5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst @@ -0,0 +1,77 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _installing: + +********** +Installing +********** + +.. highlight:: bash + +Installation is simple with `pip `_:: + + pip install ftrack-python-api + +Building from source +==================== + +You can also build manually from the source for more control. First obtain a +copy of the source by either downloading the +`zipball `_ or +cloning the public repository:: + + git clone git@bitbucket.org:ftrack/ftrack-python-api.git + +Then you can build and install the package into your current Python +site-packages folder:: + + python setup.py install + +Alternatively, just build locally and manage yourself:: + + python setup.py build + +Building documentation from source +---------------------------------- + +To build the documentation from source:: + + python setup.py build_sphinx + +Then view in your browser:: + + file:///path/to/ftrack-python-api/build/doc/html/index.html + +Running tests against the source +-------------------------------- + +With a copy of the source it is also possible to run the unit tests:: + + python setup.py test + +Dependencies +============ + +* `ftrack server `_ >= 3.3.11 +* `Python `_ >= 2.7, < 3 +* `Requests `_ >= 2, <3, +* `Arrow `_ >= 0.4.4, < 1, +* `termcolor `_ >= 1.1.0, < 2, +* `pyparsing `_ >= 2.0, < 3, +* `Clique `_ >= 1.2.0, < 2, +* `websocket-client `_ >= 0.40.0, < 1 + +Additional For building +----------------------- + +* `Sphinx `_ >= 1.2.2, < 2 +* `sphinx_rtd_theme `_ >= 0.1.6, < 1 +* `Lowdown `_ >= 0.1.0, < 2 + +Additional For testing +---------------------- + +* `Pytest `_ >= 2.3.5, < 3 +* `pytest-mock `_ >= 0.4, < 1, +* `pytest-catchlog `_ >= 1, <=2 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst new file mode 100644 index 00000000000..63fe980749c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst @@ -0,0 +1,26 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _introduction: + +************ +Introduction +************ + +This API allows developers to write :term:`Python` scripts that talk directly +with an ftrack server. The scripts can perform operations against that server +depending on granted permissions. + +With any API it is important to find the right balance between flexibility and +usefulness. If an API is too low level then everyone ends up writing boilerplate +code for common problems and usually in an non-uniform way making it harder to +share scripts with others. It's also harder to get started with such an API. +Conversely, an API that attempts to be too smart can often become restrictive +when trying to do more advanced functionality or optimise for performance. + +With this API we have tried to strike the right balance between these two, +providing an API that should be simple to use out-of-the-box, but also expose +more flexibility and power when needed. + +Nothing is perfect though, so please do provide feedback on ways that we can +continue to improve this API for your specific needs. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst new file mode 100644 index 00000000000..97483221aae --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst @@ -0,0 +1,87 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _locations/configuring: + +********************* +Configuring locations +********************* + +To allow management of data by a location or retrieval of filesystem paths where +supported, a location instance needs to be configured in a session with an +:term:`accessor` and :term:`structure`. + +.. note:: + + The standard builtin locations require no further setup or configuration + and it is not necessary to read the rest of this section to use them. + +Before continuing, make sure that you are familiar with the general concepts +of locations by reading the :ref:`locations/overview`. + +.. _locations/configuring/manually: + +Configuring manually +==================== + +Locations can be configured manually when using a session by retrieving the +location and setting the appropriate attributes:: + + location = session.query('Location where name is "my.location"').one() + location.structure = ftrack_api.structure.id.IdStructure() + location.priority = 50 + +.. _locations/configuring/automatically: + +Configuring automatically +========================= + +Often the configuration of locations should be determined by developers +looking after the core pipeline and so ftrack provides a way for a plugin to +be registered to configure the necessary locations for each session. This can +then be managed centrally if desired. + +The configuration is handled through the standard events system via a topic +*ftrack.api.session.configure-location*. Set up an :ref:`event listener plugin +` as normal with a register function that +accepts a :class:`~ftrack_api.session.Session` instance. Then register a +callback against the relevant topic to configure locations at the appropriate +time:: + + import ftrack_api + import ftrack_api.entity.location + import ftrack_api.accessor.disk + import ftrack_api.structure.id + + + def configure_locations(event): + '''Configure locations for session.''' + session = event['data']['session'] + + # Find location(s) and customise instances. + location = session.query('Location where name is "my.location"').one() + ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.id.IdStructure() + location.priority = 50 + + + def register(session): + '''Register plugin with *session*.''' + session.event_hub.subscribe( + 'topic=ftrack.api.session.configure-location', + configure_locations + ) + +.. note:: + + If you expect the plugin to also be evaluated by the legacy API, remember + to :ref:`validate the arguments `. + +So long as the directory containing the plugin exists on your +:envvar:`FTRACK_EVENT_PLUGIN_PATH`, the plugin will run for each session +created and any configured locations will then remain configured for the +duration of that related session. + +Be aware that you can configure many locations in one plugin or have separate +plugins for different locations - the choice is entirely up to you! diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst new file mode 100644 index 00000000000..ac1eaba6494 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst @@ -0,0 +1,18 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _developing/locations: + +********* +Locations +********* + +Learn how to access locations using the API and configure your own location +plugins. + +.. toctree:: + :maxdepth: 1 + + overview + tutorial + configuring diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst new file mode 100644 index 00000000000..0a6ec171aa1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst @@ -0,0 +1,143 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _locations/overview: + +******** +Overview +******** + +Locations provides a way to easily track and manage data (files, image sequences +etc.) using ftrack. + +With locations it is possible to see where published data is in the world and +also to transfer data automatically between different locations, even different +storage mechanisms, by defining a few simple :term:`Python` plugins. By keeping +track of the size of the data it also helps manage storage capacity better. In +addition, the intrinsic links to production information makes assigning work to +others and transferring only the relevant data much simpler as well as greatly +reducing the burden on those responsible for archiving finished work. + +Concepts +======== + +The system is implemented in layers using a few key concepts in order to provide +a balance between out of the box functionality and custom configuration. + +.. _locations/overview/locations: + +Locations +--------- + +Data locations can be varied in scope and meaning - a facility, a laptop, a +specific drive. As such, rather than place a hard limit on what can be +considered a location, ftrack simply requires that a location be identifiable by +a string and that string be unique to that location. + +A global company with facilities in many different parts of the world might +follow a location naming convention similar to the following: + + * 'ftrack.london.server01' + * 'ftrack.london.server02' + * 'ftrack.nyc.server01' + * 'ftrack.amsterdam.server01' + * '..' + +Whereas, for a looser setup, the following might suit better: + + * 'bjorns-workstation' + * 'fredriks-mobile' + * 'martins-laptop' + * 'cloud-backup' + +Availability +------------ + +When tracking data across several locations it is important to be able to +quickly find out where data is available and where it is not. As such, ftrack +provides simple mechanisms for retrieving information on the availability of a +:term:`component` in each location. + +For a single file, the availability with be either 0% or 100%. For containers, +such as file sequences, each file is tracked separately and the availability of +the container calculated as an overall percentage (e.g. 47%). + +.. _locations/overview/accessors: + +Accessors +--------- + +Due to the flexibility of what can be considered a location, the system must be +able to cope with locations that represent different ways of storing data. For +example, data might be stored on a local hard drive, a cloud service or even in +a database. + +In addition, the method of accessing that storage can change depending on +perspective - local filesystem, FTP, S3 API etc. + +To handle this, ftrack introduces the idea of an :term:`accessor` that provides +access to the data in a standard way. An accessor is implemented in +:term:`Python` following a set interface and can be configured at runtime to +provide relevant access to a location. + +With an accessor configured for a location, it becomes possible to not only +track data, but also manage it through ftrack by using the accessor to add and +remove data from the location. + +At present, ftrack includes a :py:class:`disk accessor +` for local filesystem access. More will be +added over time and developers are encouraged to contribute their own. + +.. _locations/overview/structure: + +Structure +--------- + +Another important consideration for locations is how data should be structured +in the location (folder structure and naming conventions). For example, +different facilities may want to use different folder structures, or different +storage mechanisms may use different paths for the data. + +For this, ftrack supports the use of a :term:`Python` structure plugin. This +plugin is called when adding a :term:`component` to a location in order to +determine the correct structure to use. + +.. note:: + + A structure plugin accepts an ftrack entity as its input and so can be + reused for generating general structures as well. For example, an action + callback could be implemented to create the base folder structure for some + selected shots by reusing a structure plugin. + +.. _locations/overview/resource_identifiers: + +Resource identifiers +-------------------- + +When a :term:`component` can be linked to multiple locations it becomes +necessary to store information about the relationship on the link rather than +directly on the :term:`component` itself. The most important information is the +path to the data in that location. + +However, as seen above, not all locations may be filesystem based or accessed +using standard filesystem protocols. For this reason, and to help avoid +confusion, this *path* is referred to as a :term:`resource identifier` and no +limitations are placed on the format. Keep in mind though that accessors use +this information (retrieved from the database) in order to work out how to +access the data, so the format used must be compatible with all the accessors +used for any one location. For this reason, most +:term:`resource identifiers ` should ideally look like +relative filesystem paths. + +.. _locations/overview/resource_identifiers/transformer: + +Transformer +^^^^^^^^^^^ + +To further support custom formats for +:term:`resource identifiers `, it is also possible to +configure a resource identifier transformer plugin which will convert +the identifiers before they are stored centrally and after they are retrieved. + +A possible use case of this might be to store JSON encoded metadata about a path +in the database and convert this to an actual filesystem path on retrieval. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst new file mode 100644 index 00000000000..4c5a6c0f136 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst @@ -0,0 +1,193 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _locations/tutorial: + +******** +Tutorial +******** + +This tutorial is a walkthrough on how you interact with Locations using the +ftrack :term:`API`. Before you read this tutorial, make sure you familiarize +yourself with the location concepts by reading the :ref:`locations/overview`. + +All examples assume you are using Python 2.x, have the :mod:`ftrack_api` +module imported and a :class:`session ` created. + +.. code-block:: python + + import ftrack_api + session = ftrack_api.Session() + +.. _locations/creating-locations: + +Creating locations +================== + +Locations can be created just like any other entity using +:meth:`Session.create `:: + + location = session.create('Location', dict(name='my.location')) + session.commit() + +.. note:: + Location names beginning with ``ftrack.`` are reserved for internal use. Do + not use this prefix for your location names. + +To create a location only if it doesn't already exist use the convenience +method :meth:`Session.ensure `. This will return +either an existing matching location or a newly created one. + +Retrieving locations +==================== + +You can retrieve existing locations using the standard session +:meth:`~ftrack_api.session.Session.get` and +:meth:`~ftrack_api.session.Session.query` methods:: + + # Retrieve location by unique id. + location_by_id = session.get('Location', 'unique-id') + + # Retrieve location by name. + location_by_name = session.query( + 'Location where name is "my.location"' + ).one() + +To retrieve all existing locations use a standard query:: + + all_locations = session.query('Location').all() + for existing_location in all_locations: + print existing_location['name'] + +Configuring locations +===================== + +At this point you have created a custom location "my.location" in the database +and have an instance to reflect that. However, the location cannot be used in +this session to manage data unless it has been configured. To configure a +location for the session, set the appropriate attributes for accessor and +structure:: + + import tempfile + import ftrack_api.accessor.disk + import ftrack_api.structure.id + + # Assign a disk accessor with *temporary* storage + location.accessor = ftrack_api.accessor.disk.DiskAccessor( + prefix=tempfile.mkdtemp() + ) + + # Assign using ID structure. + location.structure = ftrack_api.structure.id.IdStructure() + + # Set a priority which will be used when automatically picking locations. + # Lower number is higher priority. + location.priority = 30 + +To learn more about how to configure locations automatically in a session, see +:ref:`locations/configuring`. + +.. note:: + + If a location is not configured in a session it can still be used as a + standard entity and to find out availability of components + +Using components with locations +=============================== + +The Locations :term:`API` tries to use sane defaults to stay out of your way. +When creating :term:`components `, a location is automatically picked +using :meth:`Session.pick_location `:: + + (_, component_path) = tempfile.mkstemp(suffix='.txt') + component_a = session.create_component(path=component_path) + +To override, specify a location explicitly:: + + (_, component_path) = tempfile.mkstemp(suffix='.txt') + component_b = session.create_component( + path=component_path, location=location + ) + +If you set the location to ``None``, the component will only be present in the +special origin location for the duration of the session:: + + (_, component_path) = tempfile.mkstemp(suffix='.txt') + component_c = session.create_component(path=component_path, location=None) + +After creating a :term:`component` in a location, it can be added to another +location by calling :meth:`Location.add_component +` and passing the location to +use as the *source* location:: + + origin_location = session.query( + 'Location where name is "ftrack.origin"' + ).one() + location.add_component(component_c, origin_location) + +To remove a component from a location use :meth:`Location.remove_component +`:: + + location.remove_component(component_b) + +Each location specifies whether to automatically manage data when adding or +removing components. To ensure that a location does not manage data, mixin the +relevant location mixin class before use:: + + import ftrack_api + import ftrack_api.entity.location + + ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + +Accessing paths +=============== + +The locations system is designed to help avoid having to deal with filesystem +paths directly. This is particularly important when you consider that a number +of locations won't provide any direct filesystem access (such as cloud storage). + +However, it is useful to still be able to get a filesystem path from locations +that support them (typically those configured with a +:class:`~ftrack_api.accessor.disk.DiskAccessor`). For example, you might need to +pass a filesystem path to another application or perform a copy using a faster +protocol. + +To retrieve the path if available, use :meth:`Location.get_filesystem_path +`:: + + print location.get_filesystem_path(component_c) + +Obtaining component availability +================================ + +Components in locations have a notion of availability. For regular components, +consisting of a single file, the availability would be either 0 if the +component is unavailable or 100 percent if the component is available in the +location. Composite components, like image sequences, have an availability +which is proportional to the amount of child components that have been added to +the location. + +For example, an image sequence might currently be in a state of being +transferred to :data:`test.location`. If half of the images are transferred, it +might be possible to start working with the sequence. To check availability use +the helper :meth:`Session.get_component_availability +` method:: + + print session.get_component_availability(component_c) + +There are also convenience methods on both :meth:`components +` and :meth:`locations +` for +retrieving availability as well:: + + print component_c.get_availability() + print location.get_component_availability(component_c) + +Location events +=============== + +If you want to receive event notifications when components are added to or +removed from locations, you can subscribe to the topics published, +:data:`ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC` or +:data:`ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC` and the callback +you want to be run. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst new file mode 100644 index 00000000000..7a200529ab1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst @@ -0,0 +1,263 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _querying: + +******** +Querying +******** + +.. currentmodule:: ftrack_api.session + +The API provides a simple, but powerful query language in addition to iterating +directly over entity attributes. Using queries can often substantially speed +up your code as well as reduce the amount of code written. + +A query is issued using :meth:`Session.query` and returns a list of matching +entities. The query always has a single *target* entity type that the query +is built against. This means that you cannot currently retrieve back a list of +different entity types in one query, though using :ref:`projections +` does allow retrieving related entities of a different +type in one go. + +The syntax for a query is: + +.. code-block:: none + + select from where + +However, both the selection of projections and criteria are optional. This means +the most basic query is just to fetch all entities of a particular type, such as +all projects in the system:: + + projects = session.query('Project') + +A query always returns a :class:`~ftrack_api.query.QueryResult` instance that +acts like a list with some special behaviour. The main special behaviour is that +the actual query to the server is not issued until you iterate or index into the +query results:: + + for project in projects: + print project['name'] + +You can also explicitly call :meth:`~ftrack_api.query.QueryResult.all` on the +result set:: + + projects = session.query('Project').all() + +.. note:: + + This behaviour exists in order to make way for efficient *paging* and other + optimisations in future. + +.. _querying/criteria: + +Using criteria to narrow results +================================ + +Often you will have some idea of the entities you want to retrieve. In this +case you can optimise your code by not fetching more data than you need. To do +this, add criteria to your query:: + + projects = session.query('Project where status is active') + +Each criteria follows the form: + +.. code-block:: none + + + +You can inspect the entity type or instance to find out which :ref:`attributes +` are available to filter on for a particular +entity type. The list of :ref:`operators ` that can +be applied and the types of values they expect is listed later on. + +.. _querying/criteria/combining: + +Combining criteria +------------------ + +Multiple criteria can be applied in a single expression by joining them with +either ``and`` or ``or``:: + + projects = session.query( + 'Project where status is active and name like "%thrones"' + ) + +You can use parenthesis to control the precedence when compound criteria are +used (by default ``and`` takes precedence):: + + projects = session.query( + 'Project where status is active and ' + '(name like "%thrones" or full_name like "%thrones")' + ) + +.. _querying/criteria/relationships: + +Filtering on relationships +-------------------------- + +Filtering on relationships is also intuitively supported. Simply follow the +relationship using a dotted notation:: + + tasks_in_project = session.query( + 'Task where project.id is "{0}"'.format(project['id']) + ) + +This works even for multiple strides across relationships (though do note that +excessive strides can affect performance):: + + tasks_completed_in_project = session.query( + 'Task where project.id is "{0}" and ' + 'status.type.name is "Done"' + .format(project['id']) + ) + +The same works for collections (where each entity in the collection is compared +against the subsequent condition):: + + import arrow + + tasks_with_time_logged_today = session.query( + 'Task where timelogs.start >= "{0}"'.format(arrow.now().floor('day')) + ) + +In the above query, each *Task* that has at least one *Timelog* with a *start* +time greater than the start of today is returned. + +When filtering on relationships, the conjunctions ``has`` and ``any`` can be +used to specify how the criteria should be applied. This becomes important when +querying using multiple conditions on collection relationships. The relationship +condition can be written against the following form:: + + () + +For optimal performance ``has`` should be used for scalar relationships when +multiple conditions are involved. For example, to find notes by a specific +author when only name is known:: + + notes_written_by_jane_doe = session.query( + 'Note where author has (first_name is "Jane" and last_name is "Doe")' + ) + +This query could be written without ``has``, giving the same results:: + + notes_written_by_jane_doe = session.query( + 'Note where author.first_name is "Jane" and author.last_name is "Doe"' + ) + +``any`` should be used for collection relationships. For example, to find all +projects that have at least one metadata instance that has `key=some_key` +and `value=some_value` the query would be:: + + projects_where_some_key_is_some_value = session.query( + 'Project where metadata any (key=some_key and value=some_value)' + ) + +If the query was written without ``any``, projects with one metadata matching +*key* and another matching the *value* would be returned. + +``any`` can also be used to query for empty relationship collections:: + + users_without_timelogs = session.query( + 'User where not timelogs any ()' + ) + +.. _querying/criteria/operators: + +Supported operators +------------------- + +This is the list of currently supported operators: + ++--------------+----------------+----------------------------------------------+ +| Operators | Description | Example | ++==============+================+==============================================+ +| = | Exactly equal. | name is "martin" | +| is | | | ++--------------+----------------+----------------------------------------------+ +| != | Not exactly | name is_not "martin" | +| is_not | equal. | | ++--------------+----------------+----------------------------------------------+ +| > | Greater than | start after "2015-06-01" | +| after | exclusive. | | +| greater_than | | | ++--------------+----------------+----------------------------------------------+ +| < | Less than | end before "2015-06-01" | +| before | exclusive. | | +| less_than | | | ++--------------+----------------+----------------------------------------------+ +| >= | Greater than | bid >= 10 | +| | inclusive. | | ++--------------+----------------+----------------------------------------------+ +| <= | Less than | bid <= 10 | +| | inclusive. | | ++--------------+----------------+----------------------------------------------+ +| in | One of. | status.type.name in ("In Progress", "Done") | ++--------------+----------------+----------------------------------------------+ +| not_in | Not one of. | status.name not_in ("Omitted", "On Hold") | ++--------------+----------------+----------------------------------------------+ +| like | Matches | name like "%thrones" | +| | pattern. | | ++--------------+----------------+----------------------------------------------+ +| not_like | Does not match | name not_like "%thrones" | +| | pattern. | | ++--------------+----------------+----------------------------------------------+ +| has | Test scalar | author has (first_name is "Jane" and | +| | relationship. | last_name is "Doe") | ++--------------+----------------+----------------------------------------------+ +| any | Test collection| metadata any (key=some_key and | +| | relationship. | value=some_value) | ++--------------+----------------+----------------------------------------------+ + +.. _querying/projections: + +Optimising using projections +============================ + +In :ref:`understanding_sessions` we mentioned :ref:`auto-population +` of attribute values on access. This +meant that when iterating over a lot of entities and attributes a large number +of queries were being sent to the server. Ultimately, this can cause your code +to run slowly:: + + >>> projects = session.query('Project') + >>> for project in projects: + ... print( + ... # Multiple queries issued here for each attribute accessed for + ... # each project in the loop! + ... '{project[full_name]} - {project[status][name]})' + ... .format(project=project) + ... ) + + +Fortunately, there is an easy way to optimise. If you know what attributes you +are interested in ahead of time you can include them in your query string as +*projections* in order to fetch them in one go:: + + >>> projects = session.query( + ... 'select full_name, status.name from Project' + ... ) + >>> for project in projects: + ... print( + ... # No additional queries issued here as the values were already + ... # loaded by the above query! + ... '{project[full_name]} - {project[status][name]})' + ... .format(project=project) + ... ) + +Notice how this works for related entities as well. In the example above, we +also fetched the name of each *Status* entity attached to a project in the same +query, which meant that no further queries had to be issued when accessing those +nested attributes. + +.. note:: + + There are no arbitrary limits to the number (or depth) of projections, but + do be aware that excessive projections can ultimately result in poor + performance also. As always, it is about choosing the right tool for the + job. + +You can also customise the +:ref:`working_with_entities/entity_types/default_projections` to use for each +entity type when none are specified in the query string. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst new file mode 100644 index 00000000000..0eef0b7407a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst @@ -0,0 +1,18 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _release: + +*************************** +Release and migration notes +*************************** + +Find out information about what has changed between versions and any important +migration notes to be aware of when switching to a new version. + +.. toctree:: + :maxdepth: 1 + + release_notes + migration + migrating_from_old_api diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst new file mode 100644 index 00000000000..699ccf224a7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst @@ -0,0 +1,613 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _release/migrating_from_old_api: + +********************** +Migrating from old API +********************** + +.. currentmodule:: ftrack_api.session + +Why a new API? +============== + +With the introduction of Workflows, ftrack is capable of supporting a greater +diversity of industries. We're enabling teams to closely align the system with +their existing practices and naming conventions, resulting in a tool that feels +more natural and intuitive. The old API was locked to specific workflows, making +it impractical to support this new feature naturally. + +We also wanted this new flexibility to extend to developers, so we set about +redesigning the API to fully leverage the power in the system. And while we had +the wrenches out, we figured why not go that extra mile and build in some of the +features that we see developers having to continually implement in-house across +different companies - features such as caching and support for custom pipeline +extensions. In essence, we decided to build the API that, as pipeline +developers, we had always wanted from our production tracking and asset +management systems. We think we succeeded, and we hope you agree. + +Installing +========== + +Before, you used to download the API package from your ftrack instance. With +each release of the new API we make it available on :term:`PyPi`, and +installing is super simple: + +.. code-block:: none + + pip install ftrack-python-api + +Before installing, it is always good to check the latest +:ref:`release/release_notes` to see which version of the ftrack server is +required. + +.. seealso:: :ref:`installing` + +Overview +======== + +An API needs to be approachable, so we built the new API to feel +intuitive and familiar. We bundle all the core functionality into one place – a +session – with consistent methods for interacting with entities in the system:: + + import ftrack_api + session = ftrack_api.Session() + +The session is responsible for loading plugins and communicating with the ftrack +server and allows you to use multiple simultaneous sessions. You will no longer +need to explicitly call :meth:`ftrack.setup` to load plugins. + +The core methods are straightforward: + +Session.create + create a new entity, like a new version. +Session.query + fetch entities from the server using a powerful query language. +Session.delete + delete existing entities. +Session.commit + commit all changes in one efficient call. + +.. note:: + + The new API batches create, update and delete operations by default for + efficiency. To synchronise local changes with the server you need to call + :meth:`Session.commit`. + +In addition all entities in the API now act like simple Python dictionaries, +with some additional helper methods where appropriate. If you know a little +Python (or even if you don't) getting up to speed should be a breeze:: + + >>> print user.keys() + ['first_name', 'last_name', 'email', ...] + >>> print user['email'] + 'old@example.com' + >>> user['email'] = 'new@example.com' + +And of course, relationships between entities are reflected in a natural way as +well:: + + new_timelog = session.create('Timelog', {...}) + task['timelogs'].append(new_timelog) + +.. seealso :: :ref:`tutorial` + +The new API also makes use of caching in order to provide more efficient +retrieval of data by reducing the number of calls to the remote server. + +.. seealso:: :ref:`caching` + +Open source and standard code style +=================================== + +The new API is open source software and developed in public at +`Bitbucket `_. We welcome you +to join us in the development and create pull requests there. + +In the new API, we also follow the standard code style for Python, +:term:`PEP-8`. This means that you will now find that methods and variables are +written using ``snake_case`` instead of ``camelCase``, amongst other things. + +Package name +============ + +The new package is named :mod:`ftrack_api`. By using a new package name, we +enable you to use the old API and the new side-by-side in the same process. + +Old API:: + + import ftrack + +New API:: + + import ftrack_api + +Specifying your credentials +=========================== + +The old API used three environment variables to authenticate with your ftrack +instance. While these continue to work as before, you now also have +the option to specify them when initializing the session:: + + >>> import ftrack_api + >>> session = ftrack_api.Session( + ... server_url='https://mycompany.ftrackapp.com', + ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', + ... api_user='martin' + ... ) + +In the examples below, will assume that you have imported the package and +created a session. + +.. seealso:: + + * :ref:`environment_variables` + * :ref:`tutorial` + + +Querying objects +================ + +The old API relied on predefined methods for querying objects and constructors +which enabled you to get an entity by it's id or name. + +Old API:: + + project = ftrack.getProject('dev_tutorial') + task = ftrack.Task('8923b7b3-4bf0-11e5-8811-3c0754289fd3') + user = ftrack.User('jane') + +New API:: + + project = session.query('Project where name is "dev_tutorial"').one() + task = session.get('Task', '8923b7b3-4bf0-11e5-8811-3c0754289fd3') + user = session.query('User where username is "jane"').one() + +While the new API can be a bit more verbose for simple queries, it is much more +powerful and allows you to filter on any field and preload related data:: + + tasks = session.query( + 'select name, parent.name from Task ' + 'where project.full_name is "My Project" ' + 'and status.type.short is "DONE" ' + 'and not timelogs any ()' + ).all() + +The above fetches all tasks for “My Project” that are done but have no timelogs. +It also pre-fetches related information about the tasks parent – all in one +efficient query. + +.. seealso:: :ref:`querying` + +Creating objects +================ + +In the old API, you create objects using specialized methods, such as +:meth:`ftrack.createProject`, :meth:`Project.createSequence` and +:meth:`Task.createShot`. + +In the new API, you can create any object using :meth:`Session.create`. In +addition, there are a few helper methods to reduce the amount of boilerplate +necessary to create certain objects. Don't forget to call :meth:`Session.commit` +once you have issued your create statements to commit your changes. + +As an example, let's look at populating a project with a few entities. + +Old API:: + + project = ftrack.getProject('migration_test') + + # Get default task type and status from project schema + taskType = project.getTaskTypes()[0] + taskStatus = project.getTaskStatuses(taskType)[0] + + sequence = project.createSequence('001') + + # Create five shots with one task each + for shot_number in xrange(10, 60, 10): + shot = sequence.createShot( + '{0:03d}'.format(shot_number) + ) + shot.createTask( + 'Task name', + taskType, + taskStatus + ) + + +New API:: + + project = session.query('Project where name is "migration_test"').one() + + # Get default task type and status from project schema + project_schema = project['project_schema'] + default_shot_status = project_schema.get_statuses('Shot')[0] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + # Create sequence + sequence = session.create('Sequence', { + 'name': '001', + 'parent': project + }) + + # Create five shots with one task each + for shot_number in xrange(10, 60, 10): + shot = session.create('Shot', { + 'name': '{0:03d}'.format(shot_number), + 'parent': sequence, + 'status': default_shot_status + }) + session.create('Task', { + 'name': 'Task name', + 'parent': shot, + 'status': default_task_status, + 'type': default_task_type + }) + + # Commit all changes to the server. + session.commit() + +If you test the example above, one thing you might notice is that the new API +is much more efficient. Thanks to the transaction-based architecture in the new +API only a single call to the server is required to create all the objects. + +.. seealso:: :ref:`working_with_entities/creating` + +Updating objects +================ + +Updating objects in the new API works in a similar way to the old API. Instead +of using the :meth:`set` method on objects, you simply set the key of the +entity to the new value, and call :meth:`Session.commit` to persist the +changes to the database. + +The following example adjusts the duration and comment of a timelog for a +user using the old and new API, respectively. + +Old API:: + + import ftrack + + user = ftrack.User('john') + user.set('email', 'john@example.com') + +New API:: + + import ftrack_api + session = ftrack_api.Session() + + user = session.query('User where username is "john"').one() + user['email'] = 'john@example.com' + session.commit() + +.. seealso:: :ref:`working_with_entities/updating` + + +Date and datetime attributes +============================ + +In the old API, date and datetime attributes where represented using a standard +:mod:`datetime` object. In the new API we have opted to use the :term:`arrow` +library instead. Datetime attributes are represented in the server timezone, +but with the timezone information stripped. + +Old API:: + + >>> import datetime + + >>> task_old_api = ftrack.Task(task_id) + >>> task_old_api.get('startdate') + datetime.datetime(2015, 9, 2, 0, 0) + + >>> # Updating a datetime attribute + >>> task_old_api.set('startdate', datetime.date.today()) + +New API:: + + >>> import arrow + + >>> task_new_api = session.get('Task', task_id) + >>> task_new_api['start_date'] + + + >>> # In the new API, utilize the arrow library when updating a datetime. + >>> task_new_api['start_date'] = arrow.utcnow().floor('day') + >>> session.commit() + +Custom attributes +================= + +In the old API, custom attributes could be retrieved from an entity by using +the methods :meth:`get` and :meth:`set`, like standard attributes. In the new +API, custom attributes can be written and read from entities using the +``custom_attributes`` property, which provides a dictionary-like interface. + +Old API:: + + >>> task_old_api = ftrack.Task(task_id) + >>> task_old_api.get('my_custom_attribute') + + >>> task_old_api.set('my_custom_attribute', 'My new value') + + +New API:: + + >>> task_new_api = session.get('Task', task_id) + >>> task_new_api['custom_attributes']['my_custom_attribute'] + + + >>> task_new_api['custom_attributes']['my_custom_attribute'] = 'My new value' + +For more information on working with custom attributes and existing +limitations, please see: + +.. seealso:: + + :ref:`example/custom_attribute` + + +Using both APIs side-by-side +============================ + +With so many powerful new features and the necessary support for more flexible +workflows, we chose early on to not limit the new API design by necessitating +backwards compatibility. However, we also didn't want to force teams using the +existing API to make a costly all-or-nothing switchover. As such, we have made +the new API capable of coexisting in the same process as the old API:: + + import ftrack + import ftrack_api + +In addition, the old API will continue to be supported for some time, but do +note that it will not support the new `Workflows +`_ and will not have new features back ported +to it. + +In the first example, we obtain a task reference using the old API and +then use the new API to assign a user to it:: + + import ftrack + import ftrack_api + + # Create session for new API, authenticating using envvars. + session = ftrack_api.Session() + + # Obtain task id using old API + shot = ftrack.getShot(['migration_test', '001', '010']) + task = shot.getTasks()[0] + task_id = task.getId() + + user = session.query( + 'User where username is "{0}"'.format(session.api_user) + ).one() + session.create('Appointment', { + 'resource': user, + 'context_id': task_id, + 'type': 'assignment' + }) + +The second example fetches a version using the new API and uploads and sets a +thumbnail using the old API:: + + import arrow + import ftrack + + # fetch a version published today + version = session.query( + 'AssetVersion where date >= "{0}"'.format( + arrow.now().floor('day') + ) + ).first() + + # Create a thumbnail using the old api. + thumbnail_path = '/path/to/thumbnail.jpg' + version_old_api = ftrack.AssetVersion(version['id']) + thumbnail = version_old_api.createThumbnail(thumbnail_path) + + # Also set the same thumbnail on the task linked to the version. + task_old_api = ftrack.Task(version['task_id']) + task_old_api.setThumbnail(thumbnail) + +.. note:: + + It is now possible to set thumbnails using the new API as well, for more + info see :ref:`example/thumbnail`. + +Plugin registration +------------------- + +To make event and location plugin register functions work with both old and new +API the function should be updated to validate the input arguments. For old +plugins the register method should validate that the first input is of type +``ftrack.Registry``, and for the new API it should be of type +:class:`ftrack_api.session.Session`. + +If the input parameter is not validated, a plugin might be mistakenly +registered twice, since both the new and old API will look for plugins the +same directories. + +.. seealso:: + + :ref:`ftrack:release/migration/3.0.29/developer_notes/register_function` + + +Example: publishing a new version +================================= + +In the following example, we look at migrating a script which publishes a new +version with two components. + +Old API:: + + # Query a shot and a task to create the asset against. + shot = ftrack.getShot(['dev_tutorial', '001', '010']) + task = shot.getTasks()[0] + + # Create new asset. + asset = shot.createAsset(name='forest', assetType='geo') + + # Create a new version for the asset. + version = asset.createVersion( + comment='Added more leaves.', + taskid=task.getId() + ) + + # Get the calculated version number. + print version.getVersion() + + # Add some components. + previewPath = '/path/to/forest_preview.mov' + previewComponent = version.createComponent(path=previewPath) + + modelPath = '/path/to/forest_mode.ma' + modelComponent = version.createComponent(name='model', path=modelPath) + + # Publish. + asset.publish() + + # Add thumbnail to version. + thumbnail = version.createThumbnail('/path/to/forest_thumbnail.jpg') + + # Set thumbnail on other objects without duplicating it. + task.setThumbnail(thumbnail) + +New API:: + + # Query a shot and a task to create the asset against. + shot = session.query( + 'Shot where project.name is "dev_tutorial" ' + 'and parent.name is "001" and name is "010"' + ).one() + task = shot['children'][0] + + # Create new asset. + asset_type = session.query('AssetType where short is "geo"').first() + asset = session.create('Asset', { + 'parent': shot, + 'name': 'forest', + 'type': asset_type + }) + + # Create a new version for the asset. + status = session.query('Status where name is "Pending"').one() + version = session.create('AssetVersion', { + 'asset': asset, + 'status': status, + 'comment': 'Added more leaves.', + 'task': task + }) + + # In the new API, the version number is not set until we persist the changes + print 'Version number before commit: {0}'.format(version['version']) + session.commit() + print 'Version number after commit: {0}'.format(version['version']) + + # Add some components. + preview_path = '/path/to/forest_preview.mov' + preview_component = version.create_component(preview_path, location='auto') + + model_path = '/path/to/forest_mode.ma' + model_component = version.create_component(model_path, { + 'name': 'model' + }, location='auto') + + # Publish. Newly created version defaults to being published in the new api, + # but if set to false you can update it by setting the key on the version. + version['is_published'] = True + + # Persist the changes + session.commit() + + # Add thumbnail to version. + thumbnail = version.create_thumbnail( + '/path/to/forest_thumbnail.jpg' + ) + + # Set thumbnail on other objects without duplicating it. + task['thumbnail'] = thumbnail + session.commit() + + +Workarounds for missing convenience methods +=========================================== + +Query object by path +-------------------- + +In the old API, there existed a convenience methods to get an object by +referencing the path (i.e object and parent names). + +Old API:: + + shot = ftrack.getShot(['dev_tutorial', '001', '010']) + +New API:: + + shot = session.query( + 'Shot where project.name is "dev_tutorial" ' + 'and parent.name is "001" and name is "010"' + ) + + +Retrieving an object's parents +------------------------------ + +To retrieve a list of an object's parents, you could call the method +:meth:`getParents` in the old API. Currently, it is not possible to fetch this +in a single call using the new API, so you will have to traverse the ancestors +one-by-one and fetch each object's parent. + +Old API:: + + parents = task.getParents() + +New API:: + + parents = [] + for item in task['link'][:-1]: + parents.append(session.get(item['type'], item['id'])) + +Note that link includes the task itself so `[:-1]` is used to only retreive the +parents. To learn more about the `link` attribute, see +:ref:`Using link attributes example`. + +Limitations in the current version of the API +============================================= + +The new API is still quite young and in active development and there are a few +limitations currently to keep in mind when using it. + +Missing schemas +--------------- + +The following entities are as of the time of writing not currently available +in the new API. Let us know if you depend on any of them. + + * Booking + * Calendar and Calendar Type + * Dependency + * Manager and Manager Type + * Phase + * Role + * Task template + * Temp data + +Action base class +----------------- +There is currently no helper class for creating actions using the new API. We +will add one in the near future. + +In the meantime, it is still possible to create actions without the base class +by listening and responding to the +:ref:`ftrack:developing/events/list/ftrack.action.discover` and +:ref:`ftrack:developing/events/list/ftrack.action.launch` events. + +Legacy location +--------------- + +The ftrack legacy disk locations utilizing the +:class:`InternalResourceIdentifierTransformer` has been deprecated. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst new file mode 100644 index 00000000000..1df2211f96c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst @@ -0,0 +1,98 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _release/migration: + +*************** +Migration notes +*************** + +.. note:: + + Migrating from the old ftrack API? Read the dedicated :ref:`guide + `. + +Migrate to upcoming 2.0.0 +========================= + +.. _release/migration/2.0.0/event_hub: + +Default behavior for connecting to event hub +-------------------------------------------- + +The default behavior for the `ftrack_api.Session` class will change +for the argument `auto_connect_event_hub`, the default value will +switch from True to False. In order for code relying on the event hub +to continue functioning as expected you must modify your code +to explicitly set the argument to True or that you manually call +`session.event_hub.connect()`. + +.. note:: + If you rely on the `ftrack.location.component-added` or + `ftrack.location.component-removed` events to further process created + or deleted components remember that your session must be connected + to the event hub for the events to be published. + + +Migrate to 1.0.3 +================ + +.. _release/migration/1.0.3/mutating_dictionary: + +Mutating custom attribute dictionary +------------------------------------ + +Custom attributes can no longer be set by mutating entire dictionary:: + + # This will result in an error. + task['custom_attributes'] = dict(foo='baz', bar=2) + session.commit() + +Instead the individual values should be changed:: + + # This works better. + task['custom_attributes']['foo'] = 'baz' + task['custom_attributes']['bar'] = 2 + session.commit() + +Migrate to 1.0.0 +================ + +.. _release/migration/1.0.0/chunked_transfer: + +Chunked accessor transfers +-------------------------- + +Data transfers between accessors is now buffered using smaller chunks instead of +all data at the same time. Included accessor file representations such as +:class:`ftrack_api.data.File` and :class:`ftrack_api.accessor.server.ServerFile` +are built to handle that. If you have written your own accessor and file +representation you may have to update it to support multiple reads using the +limit parameter and multiple writes. + +Migrate to 0.2.0 +================ + +.. _release/migration/0.2.0/new_api_name: + +New API name +------------ + +In this release the API has been renamed from `ftrack` to `ftrack_api`. This is +to allow both the old and new API to co-exist in the same environment without +confusion. + +As such, any scripts using this new API need to be updated to import +`ftrack_api` instead of `ftrack`. For example: + +**Previously**:: + + import ftrack + import ftrack.formatter + ... + +**Now**:: + + import ftrack_api + import ftrack_api.formatter + ... diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst new file mode 100644 index 00000000000..d7978ac0b86 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst @@ -0,0 +1,1478 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _release/release_notes: + +************* +Release Notes +************* + +.. currentmodule:: ftrack_api.session + +.. release:: 1.8.2 + :date: 2020-01-14 + + .. change:: fixed + :tag: Test + + test_ensure_entity_with_non_string_data_types test fails due to missing parents. + + .. change:: changed + :tags: session + + Use WeakMethod when registering atexit handler to prevent memory leak. + +.. release:: 1.8.1 + :date: 2019-10-30 + + .. change:: changed + :tags: Location + + Increase chunk size for file operations to 1 Megabyte. + This value can now also be set from the environment variable: + + :envvar:`FTRACK_API_FILE_CHUNK_SIZE` + + .. change:: new + :tag: setup + + Add check for correct python version when installing with pip. + + .. change:: new + :tags: Notes + + Add support for note labels in create_note helper method. + + .. change:: changed + :tags: session + + Ensure errors from server are fully reported with stack trace. + +.. release:: 1.8.0 + :date: 2019-02-21 + + .. change:: fixed + :tags: documentation + + Event description component-removed report component-added event signature. + + .. change:: new + :tags: session, attribute + + Add new scalar type `object` to factory. + + .. change:: new + :tags: session, attribute + + Add support for list of `computed` attributes as part of schema + definition. A computed attribute is derived on the server side, and can + be time dependentant and differ between users. As such a computed + attribute is not suitable for long term encoding and will not be encoded + with the `persisted_only` stragey. + + .. change:: changed + + The `delayed_job` method has been deprecated in favour of a direct + `Session.call`. See :ref:`example/sync_with_ldap` for example + usage. + + .. change:: changed + + Private method :meth:`Session._call` has been converted to + a public method, :meth:`Session.call`. + + The private method will continue to work, but a pending deprecation + warning will be issued when used. The private method will be removed + entirely in version 2.0. + + .. change:: changed + :tags: session, events + + Event server connection error is too generic, + the actual error is now reported to users. + +.. release:: 1.7.1 + :date: 2018-11-13 + + .. change:: fixed + :tags: session, events + + Meta events for event hub connect and disconnect does not include + source. + + .. change:: fixed + :tags: session, location + + Missing context argument to + :meth:`ResourceIdentifierTransformer.decode` + in :meth:`Location.get_resource_identifier`. + +.. release:: 1.7.0 + :date: 2018-07-27 + + .. change:: new + :tags: session, events + + Added new events :ref:`event_list/ftrack.api.session.ready` and + :ref:`event_list/ftrack.api.session.reset` which can be used to perform + operations after the session is ready or has been reset, respectively. + + .. change:: changed + + Private method :meth:`Session._entity_reference` has been converted to + a public method, :meth:`Session.entity_reference`. + + The private method will continue to work, but a pending deprecation + warning will be issued when used. The private method will be removed + entirely in version 2.0. + + .. change:: fixed + :tags: session, events + + :meth:`Session.close` raises an exception if event hub was explicitly + connected after session initialization. + +.. release:: 1.6.0 + :date: 2018-05-17 + + .. change:: new + :tags: depreciation, events + + In version 2.0.0 of the `ftrack-python-api` the default behavior for + the :class:`Session` class will change for the argument + *auto_connect_event_hub*, the default value will switch from *True* to + *False*. + + A warning will now be emitted if async events are published or + subscribed to without *auto_connect_event_hub* has not explicitly been + set to *True*. + + .. seealso:: :ref:`release/migration/2.0.0/event_hub`. + + .. change:: fixed + :tags: documentation + + Event payload not same as what is being emitted for + :ref:`event_list/ftrack.location.component-added` and + :ref:`event_list/ftrack.location.component-removed`. + + .. change:: fixed + :tags: events + + Pyparsing is causing random errors in a threaded environment. + +.. release:: 1.5.0 + :date: 2018-04-19 + + .. change:: fixed + :tags: session, cache + + Cached entities not updated correctly when fetched in a nested + query. + +.. release:: 1.4.0 + :date: 2018-02-05 + + .. change:: fixed + :tags: session, cache + + Collection attributes not merged correctly when fetched from + server. + + .. change:: new + :tags: session, user, api key + + New function :meth:`ftrack_api.session.Session.reset_remote` allows + resetting of attributes to their default value. A convenience method + for resetting a users api key utalizing this was also added + :meth:`ftrack_api.entity.user.User.reset_api_key`. + + .. seealso:: :ref:`working_with_entities/resetting` + + .. change:: new + + Add support for sending out invitation emails to users. + See :ref:`example/invite_user` for example usage. + + .. change:: changed + :tags: cache, performance + + Entities fetched from cache are now lazily merged. Improved + performance when dealing with highly populated caches. + +.. release:: 1.3.3 + :date: 2017-11-16 + + + .. change:: new + :tags: users, ldap + + Add support for triggering a synchronization of + users between ldap and ftrack. See :ref:`example/sync_with_ldap` + for example usage. + + .. note:: + + This requires that you run ftrack 3.5.10 or later. + + .. change:: fixed + :tags: metadata + + Not possible to set metadata on creation. + +.. release:: 1.3.2 + :date: 2017-09-18 + + + .. change:: new + :tags: task template + + Added example for managing task templates through the API. See + :ref:`example/task_template` for example usage. + + .. change:: fixed + :tags: custom attributes + + Not possible to set hierarchical custom attributes on an entity that + has not been committed. + + .. change:: fixed + :tags: custom attributes + + Not possible to set custom attributes on an `Asset` that has not been + committed. + + .. change:: fixed + :tags: metadata + + Not possible to set metadata on creation. + +.. release:: 1.3.1 + :date: 2017-07-21 + + .. change:: fixed + :tags: session, events + + Calling disconnect on the event hub is slow. + +.. release:: 1.3.0 + :date: 2017-07-17 + + .. change:: new + :tags: session + + Support using a :class:`Session` as a context manager to aid closing of + session after use:: + + with ftrack_api.Session() as session: + # Perform operations with session. + + .. change:: new + :tags: session + + :meth:`Session.close` automatically called on Python exit if session not + already closed. + + .. change:: new + :tags: session + + Added :meth:`Session.close` to properly close a session's connections to + the server(s) as well as ensure event listeners are properly + unsubscribed. + + .. change:: new + + Added :exc:`ftrack_api.exception.ConnectionClosedError` to represent + error caused when trying to access servers over closed connection. + +.. release:: 1.2.0 + :date: 2017-06-16 + + .. change:: changed + :tags: events + + Updated the websocket-client dependency to version >= 0.40.0 to allow + for http proxies. + + .. change:: fixed + :tags: documentation + + The :ref:`example/publishing` example incorrectly stated that a + location would be automatically picked if the *location* keyword + argument was omitted. + +.. release:: 1.1.1 + :date: 2017-04-27 + + .. change:: fixed + :tags: custom attributes + + Cannot use custom attributes for `Asset` in ftrack versions prior to + `3.5.0`. + + .. change:: fixed + :tags: documentation + + The :ref:`example ` + section for managing `text` custom attributes is not correct. + +.. release:: 1.1.0 + :date: 2017-03-08 + + .. change:: new + :tags: server location, thumbnail + + Added method :meth:`get_thumbnail_url() ` + to server location, which can be used to retrieve a thumbnail URL. + See :ref:`example/thumbnail/url` for example usage. + + .. change:: new + :tags: documentation + + Added :ref:`example ` on how to manage entity + links from the API. + + .. change:: new + :tags: documentation + + Added :ref:`example ` on + how to manage custom attribute configurations from the API. + + .. change:: new + :tags: documentation + + Added :ref:`example ` on how to use + `SecurityRole` and `UserSecurityRole` to manage security roles for + users. + + .. change:: new + :tags: documentation + + Added :ref:`examples ` to show how + to list a user's assigned tasks and all users assigned to a task. + + .. change:: changed + :tags: session, plugins + + Added *plugin_arguments* to :class:`Session` to allow passing of + optional keyword arguments to discovered plugin register functions. Only + arguments defined in a plugin register function signature are passed so + existing plugin register functions do not need updating if the new + functionality is not desired. + + .. change:: fixed + :tags: documentation + + The :ref:`example/project` example can be confusing since the project + schema may not contain the necessary object types. + + .. change:: fixed + :tags: documentation + + Query tutorial article gives misleading information about the ``has`` + operator. + + .. change:: fixed + :tags: session + + Size is not set on sequence components when using + :meth:`Session.create_component`. + +.. release:: 1.0.4 + :date: 2017-01-13 + + .. change:: fixed + :tags: custom attributes + + Custom attribute values cannot be set on entities that are not + persisted. + + .. change:: fixed + :tags: events + + `username` in published event's source data is set to the operating + system user and not the API user. + +.. release:: 1.0.3 + :date: 2017-01-04 + + .. change:: changed + :tags: session, custom attributes + + Increased performance of custom attributes and better support for + filtering when using a version of ftrack that supports non-sparse + attribute values. + + .. change:: changed + :tags: session, custom attributes + + Custom attributes can no longer be set by mutating entire dictionary. + + .. seealso:: :ref:`release/migration/1.0.3/mutating_dictionary`. + +.. release:: 1.0.2 + :date: 2016-11-17 + + .. change:: changed + :tags: session + + Removed version restriction for higher server versions. + +.. release:: 1.0.1 + :date: 2016-11-11 + + .. change:: fixed + + :meth:`EventHub.publish ` + *on_reply* callback only called for first received reply. It should be + called for all relevant replies received. + +.. release:: 1.0.0 + :date: 2016-10-28 + + .. change:: new + :tags: session + + :meth:`Session.get_upload_metadata` has been added. + + .. change:: changed + :tags: locations, backwards-incompatible + + Data transfer between locations using accessors is now chunked to avoid + reading large files into memory. + + .. seealso:: :ref:`release/migration/1.0.0/chunked_transfer`. + + .. change:: changed + :tags: server accessor + + :class:`ftrack_api.accessor.server.ServerFile` has been refactored to + work with large files more efficiently. + + .. change:: changed + :tags: server accessor + + :class:`ftrack_api.accessor.server.ServerFile` has been updated to use + the get_upload_metadata API endpoint instead of + /component/getPutMetadata. + + .. change:: changed + :tags: locations + + :class:`ftrack_api.data.String` is now using a temporary file instead of + StringIO to avoid reading large files into memory. + + .. change:: fixed + :tags: session, locations + + `ftrack.centralized-storage` does not properly validate location + selection during user configuration. + +.. release:: 0.16.0 + :date: 2016-10-18 + + .. change:: new + :tags: session, encode media + + :meth:`Session.encode_media` can now automatically associate the output + with a version by specifying a *version_id* keyword argument. A new + helper method on versions, :meth:`AssetVersion.encode_media + `, can be + used to make versions playable in a browser. A server version of 3.3.32 + or higher is required for it to function properly. + + .. seealso:: :ref:`example/encode_media`. + + .. change:: changed + :tags: session, encode media + + You can now decide if :meth:`Session.encode_media` should keep or + delete the original component by specifying the *keep_original* + keyword argument. + + .. change:: changed + :tags: backwards-incompatible, collection + + Collection mutation now stores collection instance in operations rather + than underlying data structure. + + .. change:: changed + :tags: performance + + Improve performance of commit operations by optimising encoding and + reducing payload sent to server. + + .. change:: fixed + :tags: documentation + + Asset parent variable is declared but never used in + :ref:`example/publishing`. + + .. change:: fixed + :tags: documentation + + Documentation of hierarchical attributes and their limitations are + misleading. See :ref:`example/custom_attribute`. + +.. release:: 0.15.5 + :date: 2016-08-12 + + .. change:: new + :tags: documentation + + Added two new examples for :ref:`example/publishing` and + :ref:`example/web_review`. + + .. change:: fixed + :tags: session, availability + + :meth:`Session.get_component_availabilities` ignores passed locations + shortlist and includes all locations in returned availability mapping. + + .. change:: fixed + :tags: documentation + + Source distribution of ftrack-python-api does not include ftrack.css + in the documentation. + +.. release:: 0.15.4 + :date: 2016-07-12 + + .. change:: fixed + :tags: querying + + Custom offset not respected by + :meth:`QueryResult.first `. + + .. change:: changed + :tags: querying + + Using a custom offset with :meth:`QueryResult.one + ` helper method now raises an + exception as an offset is inappropriate when expecting to select a + single item. + + .. change:: fixed + :tags: caching + + :meth:`LayeredCache.remove ` + incorrectly raises :exc:`~exceptions.KeyError` if key only exists in + sub-layer cache. + +.. release:: 0.15.3 + :date: 2016-06-30 + + .. change:: fixed + :tags: session, caching + + A newly created entity now has the correct + :attr:`ftrack_api.symbol.CREATED` state when checked in caching layer. + Previously the state was :attr:`ftrack_api.symbol.NOT_SET`. Note that + this fix causes a change in logic and the stored + :class:`ftrack_api.operation.CreateEntityOperation` might hold data that + has not been fully :meth:`merged `. + + .. change:: fixed + :tags: documentation + + The second example in the assignments article is not working. + + .. change:: changed + :tags: session, caching + + A callable cache maker can now return ``None`` to indicate that it could + not create a suitable cache, but :class:`Session` instantiation can + continue safely. + +.. release:: 0.15.2 + :date: 2016-06-02 + + .. change:: new + :tags: documentation + + Added an example on how to work with assignments and allocations + :ref:`example/assignments_and_allocations`. + + .. change:: new + :tags: documentation + + Added :ref:`example/entity_links` article with + examples of how to manage asset version dependencies. + + .. change:: fixed + :tags: performance + + Improve performance of large collection management. + + .. change:: fixed + + Entities are not hashable because + :meth:`ftrack_api.entity.base.Entity.__hash__` raises `TypeError`. + +.. release:: 0.15.1 + :date: 2016-05-02 + + .. change:: fixed + :tags: collection, attribute, performance + + Custom attribute configurations does not cache necessary keys, leading + to performance issues. + + .. change:: fixed + :tags: locations, structure + + Standard structure does not work if version relation is not set on + the `Component`. + +.. release:: 0.15.0 + :date: 2016-04-04 + + .. change:: new + :tags: session, locations + + `ftrack.centralized-storage` not working properly on Windows. + +.. release:: 0.14.0 + :date: 2016-03-14 + + .. change:: changed + :tags: session, locations + + The `ftrack.centralized-storage` configurator now validates that name, + label and description for new locations are filled in. + + .. change:: new + :tags: session, client review + + Added :meth:`Session.send_review_session_invite` and + :meth:`Session.send_review_session_invites` that can be used to inform + review session invitees about a review session. + + .. seealso:: :ref:`Usage guide `. + + .. change:: new + :tags: session, locations + + Added `ftrack.centralized-storage` configurator as a private module. It + implements a wizard like interface used to configure a centralised + storage scenario. + + .. change:: new + :tags: session, locations + + `ftrack.centralized-storage` storage scenario is automatically + configured based on information passed from the server with the + `query_server_information` action. + + .. change:: new + :tags: structure + + Added :class:`ftrack_api.structure.standard.StandardStructure` with + hierarchy based resource identifier generation. + + .. change:: new + :tags: documentation + + Added more information to the :ref:`understanding_sessions/plugins` + article. + + .. change:: fixed + + :meth:`~ftrack_api.entity.user.User.start_timer` arguments *comment* + and *name* are ignored. + + .. change:: fixed + + :meth:`~ftrack_api.entity.user.User.stop_timer` calculates the wrong + duration when the server is not running in UTC. + + For the duration to be calculated correctly ftrack server version + >= 3.3.15 is required. + +.. release:: 0.13.0 + :date: 2016-02-10 + + .. change:: new + :tags: component, thumbnail + + Added improved support for handling thumbnails. + + .. seealso:: :ref:`example/thumbnail`. + + .. change:: new + :tags: session, encode media + + Added :meth:`Session.encode_media` that can be used to encode + media to make it playable in a browser. + + .. seealso:: :ref:`example/encode_media`. + + .. change:: fixed + + :meth:`Session.commit` fails when setting a custom attribute on an asset + version that has been created and committed in the same session. + + .. change:: new + :tags: locations + + Added :meth:`ftrack_api.entity.location.Location.get_url` to retrieve a + URL to a component in a location if supported by the + :class:`ftrack_api.accessor.base.Accessor`. + + .. change:: new + :tags: documentation + + Updated :ref:`example/note` and :ref:`example/job` articles with + examples of how to use note and job components. + + .. change:: changed + :tags: logging, performance + + Logged messages now evaluated lazily using + :class:`ftrack_api.logging.LazyLogMessage` as optimisation. + + .. change:: changed + :tags: session, events + + Auto connection of event hub for :class:`Session` now takes place in + background to improve session startup time. + + .. change:: changed + :tags: session, events + + Event hub connection timeout is now 60 seconds instead of 10. + + .. change:: changed + :tags: server version + + ftrack server version >= 3.3.11, < 3.4 required. + + .. change:: changed + :tags: querying, performance + + :class:`ftrack_api.query.QueryResult` now pages internally using a + specified page size in order to optimise record retrieval for large + query results. :meth:`Session.query` has also been updated to allow + passing a custom page size at runtime if desired. + + .. change:: changed + :tags: querying, performance + + Increased performance of :meth:`~ftrack_api.query.QueryResult.first` and + :meth:`~ftrack_api.query.QueryResult.one` by using new `limit` syntax. + +.. release:: 0.12.0 + :date: 2015-12-17 + + .. change:: new + :tags: session, widget url + + Added :meth:`ftrack_api.session.Session.get_widget_url` to retrieve an + authenticated URL to info or tasks widgets. + +.. release:: 0.11.0 + :date: 2015-12-04 + + .. change:: new + :tags: documentation + + Updated :ref:`release/migrating_from_old_api` with new link attribute + and added a :ref:`usage example `. + + .. change:: new + :tags: caching, schemas, performance + + Caching of schemas for increased performance. + :meth:`ftrack_api.session.Session` now accepts `schema_cache_path` + argument to specify location of schema cache. If not set it will use a + temporary folder. + +.. release:: 0.10.0 + :date: 2015-11-24 + + .. change:: changed + :tags: tests + + Updated session test to use mocked schemas for encoding tests. + + .. change:: fixed + + Documentation specifies Python 2.6 instead of Python 2.7 as minimum + interpreter version. + + .. change:: fixed + + Documentation does not reflect current dependencies. + + .. change:: changed + :tags: session, component, locations, performance + + Improved performance of + :meth:`ftrack_api.entity.location.Location.add_components` by batching + database operations. + + As a result it is no longer possible to determine progress of transfer + for container components in realtime as events will be emitted in batch + at end of operation. + + In addition, it is now the callers responsibility to clean up any + transferred data should an error occur during either data transfer or + database registration. + + .. change:: changed + :tags: exception, locations + + :exc:`ftrack_api.exception.ComponentInLocationError` now accepts either + a single component or multiple components and makes them available as + *components* in its *details* parameter. + + .. change:: changed + :tags: tests + + Updated session test to not fail on the new private link attribute. + + .. change:: changed + :tags: session + + Internal method :py:meth:`_fetch_schemas` has beed renamed to + :py:meth:`Session._load_schemas` and now requires a `schema_cache_path` + argument. + +.. release:: 0.9.0 + :date: 2015-10-30 + + .. change:: new + :tags: caching + + Added :meth:`ftrack_api.cache.Cache.values` as helper for retrieving + all values in cache. + + .. change:: fixed + :tags: session, caching + + :meth:`Session.merge` redundantly attempts to expand entity references + that have already been expanded causing performance degradation. + + .. change:: new + :tags: session + + :meth:`Session.rollback` has been added to support cleanly reverting + session state to last good state following a failed commit. + + .. change:: changed + :tags: events + + Event hub will no longer allow unverified SSL connections. + + .. seealso:: :ref:`security_and_authentication`. + + .. change:: changed + :tags: session + + :meth:`Session.reset` no longer resets the connection. It also clears + all local state and re-configures certain aspects that are cache + dependant, such as location plugins. + + .. change:: fixed + :tags: factory + + Debug logging messages using incorrect index for formatting leading to + misleading exception. + +.. release:: 0.8.4 + :date: 2015-10-08 + + .. change:: new + + Added initial support for custom attributes. + + .. seealso:: :ref:`example/custom_attribute`. + + .. change:: new + :tags: collection, attribute + + Added :class:`ftrack_api.collection.CustomAttributeCollectionProxy` and + :class:`ftrack_api.attribute.CustomAttributeCollectionAttribute` to + handle custom attributes. + + .. change:: changed + :tags: collection, attribute + + ``ftrack_api.attribute.MappedCollectionAttribute`` renamed to + :class:`ftrack_api.attribute.KeyValueMappedCollectionAttribute` to more + closely reflect purpose. + + .. change:: changed + :tags: collection + + :class:`ftrack_api.collection.MappedCollectionProxy` has been refactored + as a generic base class with key, value specialisation handled in new + dedicated class + :class:`ftrack_api.collection.KeyValueMappedCollectionProxy`. This is + done to avoid confusion following introduction of new + :class:`ftrack_api.collection.CustomAttributeCollectionProxy` class. + + .. change:: fixed + :tags: events + + The event hub does not always reconnect after computer has come back + from sleep. + +.. release:: 0.8.3 + :date: 2015-09-28 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.2.1, < 3.4 required. + + .. change:: changed + + Updated *ftrack.server* location implementation. A server version of 3.3 + or higher is required for it to function properly. + + .. change:: fixed + + :meth:`ftrack_api.entity.factory.StandardFactory.create` not respecting + *bases* argument. + +.. release:: 0.8.2 + :date: 2015-09-16 + + .. change:: fixed + :tags: session + + Wrong file type set on component when publishing image sequence using + :meth:`Session.create_component`. + +.. release:: 0.8.1 + :date: 2015-09-08 + + .. change:: fixed + :tags: session + + :meth:`Session.ensure` not implemented. + +.. release:: 0.8.0 + :date: 2015-08-28 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.2.1, < 3.3 required. + + .. change:: new + + Added lists example. + + .. seealso:: :ref:`example/list`. + + .. change:: new + + Added convenience methods for handling timers + :class:`~ftrack_api.entity.user.User.start_timer` and + :class:`~ftrack_api.entity.user.User.stop_timer`. + + .. change:: changed + + The dynamic API classes Type, Status, Priority and + StatusType have been renamed to Type, Status, Priority and State. + + .. change:: changed + + :meth:`Session.reset` now also clears the top most level cache (by + default a :class:`~ftrack_api.cache.MemoryCache`). + + .. change:: fixed + + Some invalid server url formats not detected. + + .. change:: fixed + + Reply events not encoded correctly causing them to be misinterpreted by + the server. + +.. release:: 0.7.0 + :date: 2015-08-24 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.2, < 3.3 required. + + .. change:: changed + + Removed automatic set of default statusid, priorityid and typeid on + objects as that is now either not mandatory or handled on server. + + .. change:: changed + + Updated :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_statuses` + and :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_types` to + handle custom objects. + +.. release:: 0.6.0 + :date: 2015-08-19 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.1.8, < 3.2 required. + + .. change:: changed + :tags: querying, documentation + + Updated documentation with details on new operators ``has`` and ``any`` + for querying relationships. + + .. seealso:: :ref:`querying/criteria/operators` + +.. release:: 0.5.2 + :date: 2015-07-29 + + .. change:: changed + :tags: server version + + ftrack server version 3.1.5 or greater required. + + .. change:: changed + + Server reported errors are now more readable and are no longer sometimes + presented as an HTML page. + +.. release:: 0.5.1 + :date: 2015-07-06 + + .. change:: changed + + Defaults computed by :class:`~ftrack_api.entity.factory.StandardFactory` + are now memoised per session to improve performance. + + .. change:: changed + + :class:`~ftrack_api.cache.Memoiser` now supports a *return_copies* + parameter to control whether deep copies should be returned when a value + was retrieved from the cache. + +.. release:: 0.5.0 + :date: 2015-07-02 + + .. change:: changed + + Now checks for server compatibility and requires an ftrack server + version of 3.1 or greater. + + .. change:: new + + Added convenience methods to :class:`~ftrack_api.query.QueryResult` to + fetch :meth:`~ftrack_api.query.QueryResult.first` or exactly + :meth:`~ftrack_api.query.QueryResult.one` result. + + .. change:: new + :tags: notes + + Added support for handling notes. + + .. seealso:: :ref:`example/note`. + + .. change:: changed + + Collection attributes generate empty collection on first access when no + remote value available. This allows interacting with a collection on a + newly created entity before committing. + + .. change:: fixed + :tags: session + + Ambiguous error raised when :class:`Session` is started with an invalid + user or key. + + .. change:: fixed + :tags: caching, session + + :meth:`Session.merge` fails against + :class:`~ftrack_api.cache.SerialisedCache` when circular reference + encountered due to entity identity not being prioritised in merge. + +.. release:: 0.4.3 + :date: 2015-06-29 + + .. change:: fixed + :tags: plugins, session, entity types + + Entity types not constructed following standard install. + + This is because the discovery of the default plugins is unreliable + across Python installation processes (pip, wheel etc). Instead, the + default plugins have been added as templates to the :ref:`event_list` + documentation and the + :class:`~ftrack_api.entity.factory.StandardFactory` used to create any + missing classes on :class:`Session` startup. + +.. release:: 0.4.2 + :date: 2015-06-26 + + .. change:: fixed + :tags: metadata + + Setting exact same metadata twice can cause + :exc:`~ftrack_api.exception.ImmutableAttributeError` to be incorrectly + raised. + + .. change:: fixed + :tags: session + + Calling :meth:`Session.commit` does not clear locally set attribute + values leading to immutability checks being bypassed in certain cases. + +.. release:: 0.4.1 + :date: 2015-06-25 + + .. change:: fixed + :tags: metadata + + Setting metadata twice in one session causes `KeyError`. + +.. release:: 0.4.0 + :date: 2015-06-22 + + .. change:: changed + :tags: documentation + + Documentation extensively updated. + + .. change:: new + :tags: Client review + + Added support for handling review sessions. + + .. seealso:: :ref:`Usage guide `. + + .. change:: fixed + + Metadata property not working in line with rest of system, particularly + the caching framework. + + .. change:: new + :tags: collection + + Added :class:`ftrack_api.collection.MappedCollectionProxy` class for + providing a dictionary interface to a standard + :class:`ftrack_api.collection.Collection`. + + .. change:: new + :tags: collection, attribute + + Added :class:`ftrack_api.attribute.MappedCollectionAttribute` class for + describing an attribute that should use the + :class:`ftrack_api.collection.MappedCollectionProxy`. + + .. change:: new + + Entities that use composite primary keys are now fully supported in the + session, including for :meth:`Session.get` and :meth:`Session.populate`. + + .. change:: change + + Base :class:`ftrack_api.entity.factory.Factory` refactored to separate + out attribute instantiation into dedicated methods to make extending + simpler. + + .. change:: change + :tags: collection, attribute + + :class:`ftrack_api.attribute.DictionaryAttribute` and + :class:`ftrack_api.attribute.DictionaryAttributeCollection` removed. + They have been replaced by the new + :class:`ftrack_api.attribute.MappedCollectionAttribute` and + :class:`ftrack_api.collection.MappedCollectionProxy` respectively. + + .. change:: new + :tags: events + + :class:`Session` now supports an *auto_connect_event_hub* argument to + control whether the built in event hub should connect to the server on + session initialisation. This is useful for when only local events should + be supported or when the connection should be manually controlled. + +.. release:: 0.3.0 + :date: 2015-06-14 + + .. change:: fixed + + Session operations may be applied server side in invalid order resulting + in unexpected error. + + .. change:: fixed + + Creating and deleting an entity in single commit causes error as create + operation never persisted to server. + + Now all operations for the entity are ignored on commit when this case + is detected. + + .. change:: changed + + Internally moved from differential state to operation tracking for + determining session changes when persisting. + + .. change:: new + + ``Session.recorded_operations`` attribute for examining current + pending operations on a :class:`Session`. + + .. change:: new + + :meth:`Session.operation_recording` context manager for suspending + recording operations temporarily. Can also manually control + ``Session.record_operations`` boolean. + + .. change:: new + + Operation classes to track individual operations occurring in session. + + .. change:: new + + Public :meth:`Session.merge` method for merging arbitrary values into + the session manually. + + .. change:: changed + + An entity's state is now computed from the operations performed on it + and is no longer manually settable. + + .. change:: changed + + ``Entity.state`` attribute removed. Instead use the new inspection + :func:`ftrack_api.inspection.state`. + + Previously:: + + print entity.state + + Now:: + + import ftrack_api.inspection + print ftrack_api.inspection.state(entity) + + There is also an optimised inspection, + :func:`ftrack_api.inspection.states`. for determining state of many + entities at once. + + .. change:: changed + + Shallow copying a :class:`ftrack_api.symbol.Symbol` instance now + returns same instance. + +.. release:: 0.2.0 + :date: 2015-06-04 + + .. change:: changed + + Changed name of API from `ftrack` to `ftrack_api`. + + .. seealso:: :ref:`release/migration/0.2.0/new_api_name`. + + .. change:: new + :tags: caching + + Configurable caching support in :class:`Session`, including the ability + to use an external persisted cache and new cache implementations. + + .. seealso:: :ref:`caching`. + + .. change:: new + :tags: caching + + :meth:`Session.get` now tries to retrieve matching entity from + configured cache first. + + .. change:: new + :tags: serialisation, caching + + :meth:`Session.encode` supports a new mode *persisted_only* that will + only encode persisted attribute values. + + .. change:: changed + + Session.merge method is now private (:meth:`Session._merge`) until it is + qualified for general usage. + + .. change:: changed + :tags: entity state + + :class:`~ftrack_api.entity.base.Entity` state now managed on the entity + directly rather than stored separately in the :class:`Session`. + + Previously:: + + session.set_state(entity, state) + print session.get_state(entity) + + Now:: + + entity.state = state + print entity.state + + .. change:: changed + :tags: entity state + + Entity states are now :class:`ftrack_api.symbol.Symbol` instances rather + than strings. + + Previously:: + + entity.state = 'created' + + Now:: + + entity.state = ftrack_api.symbol.CREATED + + .. change:: fixed + :tags: entity state + + It is now valid to transition from most entity states to an + :attr:`ftrack_api.symbol.NOT_SET` state. + + .. change:: changed + :tags: caching + + :class:`~ftrack_api.cache.EntityKeyMaker` removed and replaced by + :class:`~ftrack_api.cache.StringKeyMaker`. Entity identity now + computed separately and passed to key maker to allow key maker to work + with non entity instances. + + .. change:: fixed + :tags: entity + + Internal data keys ignored when re/constructing entities reducing + distracting and irrelevant warnings in logs. + + .. change:: fixed + :tags: entity + + :class:`~ftrack_api.entity.base.Entity` equality test raises error when + other is not an entity instance. + + .. change:: changed + :tags: entity, caching + + :meth:`~ftrack_api.entity.base.Entity.merge` now also merges state and + local attributes. In addition, it ensures values being merged have also + been merged into the session and outputs more log messages. + + .. change:: fixed + :tags: inspection + + :func:`ftrack_api.inspection.identity` returns different result for same + entity depending on whether entity type is unicode or string. + + .. change:: fixed + + :func:`ftrack_api.mixin` causes method resolution failure when same + class mixed in multiple times. + + .. change:: changed + + Representations of objects now show plain id rather than converting to + hex. + + .. change:: fixed + :tags: events + + Event hub raises TypeError when listening to ftrack.update events. + + .. change:: fixed + :tags: events + + :meth:`ftrack_api.event.hub.EventHub.subscribe` fails when subscription + argument contains special characters such as `@` or `+`. + + .. change:: fixed + :tags: collection + + :meth:`ftrack_api.collection.Collection` incorrectly modifies entity + state on initialisation. + +.. release:: 0.1.0 + :date: 2015-03-25 + + .. change:: changed + + Moved standardised construct entity type logic to core package (as part + of the :class:`~ftrack_api.entity.factory.StandardFactory`) for easier + reuse and extension. + +.. release:: 0.1.0-beta.2 + :date: 2015-03-17 + + .. change:: new + :tags: locations + + Support for ftrack.server location. The corresponding server build is + required for it to function properly. + + .. change:: new + :tags: locations + + Support for managing components in locations has been added. Check out + the :ref:`dedicated tutorial `. + + .. change:: new + + A new inspection API (:mod:`ftrack_api.inspection`) has been added for + extracting useful information from objects in the system, such as the + identity of an entity. + + .. change:: changed + + ``Entity.primary_key`` and ``Entity.identity`` have been removed. + Instead, use the new :func:`ftrack_api.inspection.primary_key` and + :func:`ftrack_api.inspection.identity` functions. This was done to make it + clearer the the extracted information is determined from the current + entity state and modifying the returned object will have no effect on + the entity instance itself. + + .. change:: changed + + :func:`ftrack_api.inspection.primary_key` now returns a mapping of the + attribute names and values that make up the primary key, rather than + the previous behaviour of returning a tuple of just the values. To + emulate previous behaviour do:: + + ftrack_api.inspection.primary_key(entity).values() + + .. change:: changed + + :meth:`Session.encode` now supports different strategies for encoding + entities via the entity_attribute_strategy* keyword argument. This makes + it possible to use this method for general serialisation of entity + instances. + + .. change:: changed + + Encoded referenced entities are now a mapping containing + *__entity_type__* and then each key, value pair that makes up the + entity's primary key. For example:: + + { + '__entity_type__': 'User', + 'id': '8b90a444-4e65-11e1-a500-f23c91df25eb' + } + + .. change:: changed + + :meth:`Session.decode` no longer automatically adds decoded entities to + the :class:`Session` cache making it possible to use decode + independently. + + .. change:: new + + Added :meth:`Session.merge` for merging entities recursively into the + session cache. + + .. change:: fixed + + Replacing an entity in a :class:`ftrack_api.collection.Collection` with an + identical entity no longer raises + :exc:`ftrack_api.exception.DuplicateItemInCollectionError`. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py new file mode 100644 index 00000000000..5fda0195a95 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py @@ -0,0 +1,24 @@ +# :coding: utf-8 +import logging + +import ftrack_api.session + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + logger = logging.getLogger('com.example.example-plugin') + + # Validate that session is an instance of ftrack_api.Session. If not, + # assume that register is being called from an old or incompatible API and + # return without doing anything. + if not isinstance(session, ftrack_api.session.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0!r} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + # Perform your logic here, such as subscribe to an event. + pass + + logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py new file mode 100644 index 00000000000..dd11136d69b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py @@ -0,0 +1,37 @@ +# :coding: utf-8 +import logging + +import ftrack_api.session + + +def register_with_session_ready(event): + '''Called when session is ready to be used.''' + logger = logging.getLogger('com.example.example-plugin') + logger.debug('Session ready.') + session = event['data']['session'] + + # Session is now ready and can be used to e.g. query objects. + task = session.query('Task').first() + print task['name'] + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + logger = logging.getLogger('com.example.example-plugin') + + # Validate that session is an instance of ftrack_api.Session. If not, + # assume that register is being called from an old or incompatible API and + # return without doing anything. + if not isinstance(session, ftrack_api.session.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0!r} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + session.event_hub.subscribe( + 'topic=ftrack.api.session.ready', + register_with_session_ready + ) + + logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst new file mode 100644 index 00000000000..724afa81a64 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst @@ -0,0 +1,38 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _security_and_authentication: + +*************************** +Security and authentication +*************************** + +Self signed SSL certificate +=========================== + +When using a self signed SSL certificate the API may fail to connect if it +cannot verify the SSL certificate. Under the hood the +`requests `_ library is used and it +must be specified where the trusted certificate authority can be found using the +environment variable ``REQUESTS_CA_BUNDLE``. + +.. seealso:: `SSL Cert Verification `_ + +InsecurePlatformWarning +======================= + +When using this API you may sometimes see a warning:: + + InsecurePlatformWarning: A true SSLContext object is not available. This + prevents urllib3 from configuring SSL appropriately and may cause certain + SSL connections to fail. + +If you encounter this warning, its recommended you upgrade to Python 2.7.9, or +use pyOpenSSL. To use pyOpenSSL simply:: + + pip install pyopenssl ndg-httpsclient pyasn1 + +and the `requests `_ library used by +this API will use pyOpenSSL instead. + +.. seealso:: `InsecurePlatformWarning `_ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst new file mode 100644 index 00000000000..73b352eb2f6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst @@ -0,0 +1,156 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _tutorial: + +******** +Tutorial +******** + +.. currentmodule:: ftrack_api.session + +This tutorial provides a quick dive into using the API and the broad stroke +concepts involved. + +First make sure the ftrack Python API is :ref:`installed `. + +Then start a Python session and import the ftrack API:: + + >>> import ftrack_api + +The API uses :ref:`sessions ` to manage communication +with an ftrack server. Create a session that connects to your ftrack server +(changing the passed values as appropriate):: + + >>> session = ftrack_api.Session( + ... server_url='https://mycompany.ftrackapp.com', + ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', + ... api_user='martin' + ... ) + +.. note:: + + A session can use :ref:`environment variables + ` to configure itself. + +Now print a list of the available entity types retrieved from the server:: + + >>> print session.types.keys() + [u'TypedContext', u'ObjectType', u'Priority', u'Project', u'Sequence', + u'Shot', u'Task', u'Status', u'Type', u'Timelog', u'User'] + +Now the list of possible entity types is known, :ref:`query ` the +server to retrieve entities of a particular type by using the +:meth:`Session.query` method:: + + >>> projects = session.query('Project') + +Each project retrieved will be an :ref:`entity ` instance +that behaves much like a standard Python dictionary. For example, to find out +the available keys for an entity, call the +:meth:`~ftrack_api.entity.Entity.keys` method:: + + >>> print projects[0].keys() + [u'status', u'is_global', u'name', u'end_date', u'context_type', + u'id', u'full_name', u'root', u'start_date'] + +Now, iterate over the retrieved entities and print each ones name:: + + >>> for project in projects: + ... print project['name'] + test + client_review + tdb + man_test + ftrack + bunny + +.. note:: + + Many attributes for retrieved entities are loaded on demand when the + attribute is first accessed. Doing this lots of times in a script can be + inefficient, so it is worth using :ref:`projections ` + in queries or :ref:`pre-populating ` + entities where appropriate. You can also :ref:`customise default projections + ` to help others + pre-load common attributes. + +To narrow a search, add :ref:`criteria ` to the query:: + + >>> active_projects = session.query('Project where status is active') + +Combine criteria for more powerful queries:: + + >>> import arrow + >>> + >>> active_projects_ending_before_next_week = session.query( + ... 'Project where status is active and end_date before "{0}"' + ... .format(arrow.now().replace(weeks=+1)) + ... ) + +Some attributes on an entity will refer to another entity or collection of +entities, such as *children* on a *Project* being a collection of *Context* +entities that have the project as their parent:: + + >>> project = session.query('Project').first() + >>> print project['children'] + + +And on each *Context* there is a corresponding *parent* attribute which is a +link back to the parent:: + + >>> child = project['children'][0] + >>> print child['parent'] is project + True + +These relationships can also be used in the criteria for a query:: + + >>> results = session.query( + ... 'Context where parent.name like "te%"' + ... ) + +To create new entities in the system use :meth:`Session.create`:: + + >>> new_sequence = session.create('Sequence', { + ... 'name': 'Starlord Reveal' + ... }) + +The created entity is not yet persisted to the server, but it is still possible +to modify it. + + >>> new_sequence['description'] = 'First hero character reveal.' + +The sequence also needs a parent. This can be done in one of two ways: + +* Set the parent attribute on the sequence:: + + >>> new_sequence['parent'] = project + +* Add the sequence to a parent's children attribute:: + + >>> project['children'].append(new_sequence) + +When ready, persist to the server using :meth:`Session.commit`:: + + >>> session.commit() + +When finished with a :class:`Session`, it is important to :meth:`~Session.close` +it in order to release resources and properly unsubscribe any registered event +listeners. It is also possible to use the session as a context manager in order +to have it closed automatically after use:: + + >>> with ftrack_api.Session() as session: + ... print session.query('User').first() + + >>> print session.closed + True + +Once a :class:`Session` is closed, any operations that attempt to use the closed +connection to the ftrack server will fail:: + + >>> session.query('Project').first() + ConnectionClosedError: Connection closed. + +Continue to the next section to start learning more about the API in greater +depth or jump over to the :ref:`usage examples ` if you prefer to learn +by example. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst new file mode 100644 index 00000000000..e3602c4fa9d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst @@ -0,0 +1,281 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _understanding_sessions: + +********************** +Understanding sessions +********************** + +.. currentmodule:: ftrack_api.session + +All communication with an ftrack server takes place through a :class:`Session`. +This allows more opportunity for configuring the connection, plugins etc. and +also makes it possible to connect to multiple ftrack servers from within the +same Python process. + +.. _understanding_sessions/connection: + +Connection +========== + +A session can be manually configured at runtime to connect to a server with +certain credentials:: + + >>> session = ftrack_api.Session( + ... server_url='https://mycompany.ftrackapp.com', + ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', + ... api_user='martin' + ... ) + +Alternatively, a session can use the following environment variables to +configure itself: + + * :envvar:`FTRACK_SERVER` + * :envvar:`FTRACK_API_USER` + * :envvar:`FTRACK_API_KEY` + +When using environment variables, no server connection arguments need to be +passed manually:: + + >>> session = ftrack_api.Session() + +.. _understanding_sessions/unit_of_work: + +Unit of work +============ + +Each session follows the unit of work pattern. This means that many of the +operations performed using a session will happen locally and only be persisted +to the server at certain times, notably when calling :meth:`Session.commit`. +This approach helps optimise calls to the server and also group related logic +together in a transaction:: + + user = session.create('User', {}) + user['username'] = 'martin' + other_user = session.create('User', {'username': 'bjorn'}) + other_user['email'] = 'bjorn@example.com' + +Behind the scenes a series of :class:`operations +` are recorded reflecting the changes made. You +can take a peek at these operations if desired by examining the +``Session.recorded_operations`` property:: + + >>> for operation in session.recorded_operations: + ... print operation + + + + + +Calling :meth:`Session.commit` persists all recorded operations to the server +and clears the operation log:: + + session.commit() + +.. note:: + + The commit call will optimise operations to be as efficient as possible + without breaking logical ordering. For example, a create followed by updates + on the same entity will be compressed into a single create. + +Queries are special and always issued on demand. As a result, a query may return +unexpected results if the relevant local changes have not yet been sent to the +server:: + + >>> user = session.create('User', {'username': 'some_unique_username'}) + >>> query = 'User where username is "{0}"'.format(user['username']) + >>> print len(session.query(query)) + 0 + >>> session.commit() + >>> print len(session.query(query)) + 1 + +Where possible, query results are merged in with existing data transparently +with any local changes preserved:: + + >>> user = session.query('User').first() + >>> user['email'] = 'me@example.com' # Not yet committed to server. + >>> retrieved = session.query( + ... 'User where id is "{0}"'.format(user['id']) + ... ).one() + >>> print retrieved['email'] # Displays locally set value. + 'me@example.com' + >>> print retrieved is user + True + +This is possible due to the smart :ref:`caching` layer in the session. + +.. _understanding_sessions/auto_population: + +Auto-population +=============== + +Another important concept in a session is that of auto-population. By default a +session is configured to auto-populate missing attribute values on access. This +means that the first time you access an attribute on an entity instance a query +will be sent to the server to fetch the value:: + + user = session.query('User').first() + # The next command will issue a request to the server to fetch the + # 'username' value on demand at this is the first time it is accessed. + print user['username'] + +Once a value has been retrieved it is :ref:`cached ` locally in the +session and accessing it again will not issue more server calls:: + + # On second access no server call is made. + print user['username'] + +You can control the auto population behaviour of a session by either changing +the ``Session.auto_populate`` attribute on a session or using the provided +context helper :meth:`Session.auto_populating` to temporarily change the +setting. When turned off you may see a special +:attr:`~ftrack_api.symbol.NOT_SET` symbol that represents a value has not yet +been fetched:: + + >>> with session.auto_populating(False): + ... print user['email'] + NOT_SET + +Whilst convenient for simple scripts, making many requests to the server for +each attribute can slow execution of a script. To support optimisation the API +includes methods for batch fetching attributes. Read about them in +:ref:`querying/projections` and :ref:`working_with_entities/populating`. + +.. _understanding_sessions/entity_types: + +Entity types +============ + +When a session has successfully connected to the server it will automatically +download schema information and :ref:`create appropriate classes +` for use. This is important as different +servers can support different entity types and configurations. + +This information is readily available and useful if you need to check that the +entity types you expect are present. Here's how to print a list of all entity +types registered for use in the current API session:: + + >>> print session.types.keys() + [u'Task', u'Shot', u'TypedContext', u'Sequence', u'Priority', + u'Status', u'Project', u'User', u'Type', u'ObjectType'] + +Each entity type is backed by a :ref:`customisable class +` that further describes the entity type and +the attributes that are available. + +.. hint:: + + If you need to use an :func:`isinstance` check, always go through the + session as the classes are built dynamically:: + + >>> isinstance(entity, session.types['Project']) + +.. _understanding_sessions/plugins: + +Configuring plugins +=================== + +Plugins are used by the API to extend it with new functionality, such as +:term:`locations ` or adding convenience methods to +:ref:`understanding_sessions/entity_types`. In addition to new API +functionality, event plugins may also be used for event processing by listening +to :ref:`ftrack update events ` or adding custom functionality to ftrack by registering +:term:`actions `. + + +When starting a new :class:`Session` either pass the *plugins_paths* to search +explicitly or rely on the environment variable +:envvar:`FTRACK_EVENT_PLUGIN_PATH`. As each session is independent of others, +you can configure plugins per session. + +The paths will be searched for :term:`plugins `, python files +which expose a `register` function. These functions will be evaluated and can +be used extend the API with new functionality, such as locations or actions. + +If you do not specify any override then the session will attempt to discover and +use the default plugins. + +Plugins are discovered using :func:`ftrack_api.plugin.discover` with the +session instance passed as the sole positional argument. Most plugins should +take the form of a mount function that then subscribes to specific :ref:`events +` on the session:: + + def configure_locations(event): + '''Configure locations for session.''' + session = event['data']['session'] + # Find location(s) and customise instances. + + def register(session): + '''Register plugin with *session*.''' + session.event_hub.subscribe( + 'topic=ftrack.api.session.configure-location', + configure_locations + ) + +Additional keyword arguments can be passed as *plugin_arguments* to the +:class:`Session` on instantiation. These are passed to the plugin register +function if its signature supports them:: + + # a_plugin.py + def register(session, reticulate_splines=False): + '''Register plugin with *session*.''' + ... + + # main.py + session = ftrack_api.Session( + plugin_arguments={ + 'reticulate_splines': True, + 'some_other_argument': 42 + } + ) + +.. seealso:: + + Lists of events which you can subscribe to in your plugins are available + both for :ref:`synchronous event published by the python API ` + and :ref:`asynchronous events published by the server ` + + +Quick setup +----------- + +1. Create a directory where plugins will be stored. Place any plugins you want +loaded automatically in an API *session* here. + +.. image:: /image/configuring_plugins_directory.png + +2. Configure the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. + + +Detailed setup +-------------- + +Start out by creating a directory on your machine where you will store your +plugins. Download :download:`example_plugin.py ` +and place it in the directory. + +Open up a terminal window, and ensure that plugin is picked up when +instantiating the session and manually setting the *plugin_paths*:: + + >>> # Set up basic logging + >>> import logging + >>> logging.basicConfig() + >>> plugin_logger = logging.getLogger('com.example.example-plugin') + >>> plugin_logger.setLevel(logging.DEBUG) + >>> + >>> # Configure the API, loading plugins in the specified paths. + >>> import ftrack_api + >>> plugin_paths = ['/path/to/plugins'] + >>> session = ftrack_api.Session(plugin_paths=plugin_paths) + +If everything is working as expected, you should see the following in the +output:: + + DEBUG:com.example.example-plugin:Plugin registered + +Instead of specifying the plugin paths when instantiating the session, you can +also specify the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. +To specify multiple directories, use the path separator for your operating +system. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst new file mode 100644 index 00000000000..2d9d26f986f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst @@ -0,0 +1,434 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _working_with_entities: + +********************* +Working with entities +********************* + +.. currentmodule:: ftrack_api.session + +:class:`Entity ` instances are Python dict-like +objects whose keys correspond to attributes for that type in the system. They +may also provide helper methods to perform common operations such as replying to +a note:: + + note = session.query('Note').first() + print note.keys() + print note['content'] + note['content'] = 'A different message!' + reply = note.create_reply(...) + +.. _working_with_entities/attributes: + +Attributes +========== + +Each entity instance is typed according to its underlying entity type on the +server and configured with appropriate attributes. For example, a *task* will be +represented by a *Task* class and have corresponding attributes. You can +:ref:`customise entity classes ` to alter +attribute access or provide your own helper methods. + +To see the available attribute names on an entity use the +:meth:`~ftrack_api.entity.base.Entity.keys` method on the instance:: + + >>> task = session.query('Task').first() + >>> print task.keys() + ['id', 'name', ...] + +If you need more information about the type of attribute, examine the +``attributes`` property on the corresponding class:: + + >>> for attribute in type(task).attributes: + ... print attribute + + + + + + ... + +Notice that there are different types of attribute such as +:class:`~ftrack_api.attribute.ScalarAttribute` for plain values or +:class:`~ftrack_api.attribute.ReferenceAttribute` for relationships. These +different types are reflected in the behaviour on the entity instance when +accessing a particular attribute by key: + + >>> # Scalar + >>> print task['name'] + 'model' + >>> task['name'] = 'comp' + + >>> # Single reference + >>> print task['status'] + + >>> new_status = session.query('Status').first() + >>> task['status'] = new_status + + >>> # Collection + >>> print task['timelogs'] + + >>> print task['timelogs'][:] + [, ...] + >>> new_timelog = session.create('Timelog', {...}) + >>> task['timelogs'].append(new_timelog) + +.. _working_with_entities/attributes/bidirectional: + +Bi-directional relationships +---------------------------- + +Some attributes refer to different sides of a bi-directional relationship. In +the current version of the API bi-directional updates are not propagated +automatically to the other side of the relationship. For example, setting a +*parent* will not update the parent entity's *children* collection locally. +There are plans to support this behaviour better in the future. For now, after +commit, :ref:`populate ` the reverse side +attribute manually. + +.. _working_with_entities/creating: + +Creating entities +================= + +In order to create a new instance of an entity call :meth:`Session.create` +passing in the entity type to create and any initial attribute values:: + + new_user = session.create('User', {'username': 'martin'}) + +If there are any default values that can be set client side then they will be +applied at this point. Typically this will be the unique entity key:: + + >>> print new_user['id'] + 170f02a4-6656-4f15-a5cb-c4dd77ce0540 + +At this point no information has been sent to the server. However, you are free +to continue :ref:`updating ` this object +locally until you are ready to persist the changes by calling +:meth:`Session.commit`. + +If you are wondering about what would happen if you accessed an unset attribute +on a newly created entity, go ahead and give it a go:: + + >>> print new_user['first_name'] + NOT_SET + +The session knows that it is a newly created entity that has not yet been +persisted so it doesn't try to fetch any attributes on access even when +``session.auto_populate`` is turned on. + +.. _working_with_entities/updating: + +Updating entities +================= + +Updating an entity is as simple as modifying the values for specific keys on +the dict-like instance and calling :meth:`Session.commit` when ready. The entity +to update can either be a new entity or a retrieved entity:: + + task = session.query('Task').first() + task['bid'] = 8 + +Remember that, for existing entities, accessing an attribute will load it from +the server automatically. If you are interested in just setting values without +first fetching them from the server, turn :ref:`auto-population +` off temporarily:: + + >>> with session.auto_populating(False): + ... task = session.query('Task').first() + ... task['bid'] = 8 + + +.. _working_with_entities/resetting: + +Server side reset of entity attributes or settings. +=========================== + +Some entities support resetting of attributes, for example +to reset a users api key:: + + + session.reset_remote( + 'api_key', entity=session.query('User where username is "test_user"').one() + ) + +.. note:: + Currently the only attribute possible to reset is 'api_key' on + the user entity type. + + +.. _working_with_entities/deleting: + +Deleting entities +================= + +To delete an entity you need an instance of the entity in your session (either +from having created one or retrieving one). Then call :meth:`Session.delete` on +the entity and :meth:`Session.commit` when ready:: + + task_to_delete = session.query('Task').first() + session.delete(task_to_delete) + ... + session.commit() + +.. note:: + + Even though the entity is deleted, you will still have access to the local + instance and any local data stored on that instance whilst that instance + remains in memory. + +Keep in mind that some deletions, when propagated to the server, will cause +other entities to be deleted also, so you don't have to worry about deleting an +entire hierarchy manually. For example, deleting a *Task* will also delete all +*Notes* on that task. + +.. _working_with_entities/populating: + +Populating entities +=================== + +When an entity is retrieved via :meth:`Session.query` or :meth:`Session.get` it +will have some attributes prepopulated. The rest are dynamically loaded when +they are accessed. If you need to access many attributes it can be more +efficient to request all those attributes be loaded in one go. One way to do +this is to use a :ref:`projections ` in queries. + +However, if you have entities that have been passed to you from elsewhere you +don't have control over the query that was issued to get those entities. In this +case you can you can populate those entities in one go using +:meth:`Session.populate` which works exactly like :ref:`projections +` in queries do, but operating against known entities:: + + >>> users = session.query('User') + >>> session.populate(users, 'first_name, last_name') + >>> with session.auto_populating(False): # Turn off for example purpose. + ... for user in users: + ... print 'Name: {0}'.format(user['first_name']) + ... print 'Email: {0}'.format(user['email']) + Name: Martin + Email: NOT_SET + ... + +.. note:: + + You can populate a single or many entities in one call so long as they are + all the same entity type. + +.. _working_with_entities/entity_states: + +Entity states +============= + +Operations on entities are :ref:`recorded in the session +` as they happen. At any time you can +inspect an entity to determine its current state from those pending operations. + +To do this, use :func:`ftrack_api.inspection.state`:: + + >>> import ftrack_api.inspection + >>> new_user = session.create('User', {}) + >>> print ftrack_api.inspection.state(new_user) + CREATED + >>> existing_user = session.query('User').first() + >>> print ftrack_api.inspection.state(existing_user) + NOT_SET + >>> existing_user['email'] = 'martin@example.com' + >>> print ftrack_api.inspection.state(existing_user) + MODIFIED + >>> session.delete(new_user) + >>> print ftrack_api.inspection.state(new_user) + DELETED + +.. _working_with_entities/entity_types: + +Customising entity types +======================== + +Each type of entity in the system is represented in the Python client by a +dedicated class. However, because the types of entities can vary these classes +are built on demand using schema information retrieved from the server. + +Many of the default classes provide additional helper methods which are mixed +into the generated class at runtime when a session is started. + +In some cases it can be useful to tailor the custom classes to your own pipeline +workflows. Perhaps you want to add more helper functions, change attribute +access rules or even providing a layer of backwards compatibility for existing +code. The Python client was built with this in mind and makes such +customisations as easy as possible. + +When a :class:`Session` is constructed it fetches schema details from the +connected server and then calls an :class:`Entity factory +` to create classes from those schemas. It +does this by emitting a synchronous event, +*ftrack.api.session.construct-entity-type*, for each schema and expecting a +*class* object to be returned. + +In the default setup, a :download:`construct_entity_type.py +<../resource/plugin/construct_entity_type.py>` plugin is placed on the +:envvar:`FTRACK_EVENT_PLUGIN_PATH`. This plugin will register a trivial subclass +of :class:`ftrack_api.entity.factory.StandardFactory` to create the classes in +response to the construct event. The simplest way to get started is to edit this +default plugin as required. + +.. seealso:: :ref:`understanding_sessions/plugins` + +.. _working_with_entities/entity_types/default_projections: + +Default projections +------------------- + +When a :ref:`query ` is issued without any :ref:`projections +`, the session will automatically add default projections +according to the type of the entity. + +For example, the following shows that for a *User*, only *id* is fetched by +default when no projections added to the query:: + + >>> user = session.query('User').first() + >>> with session.auto_populating(False): # For demonstration purpose only. + ... print user.items() + [ + (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') + (u'username', Symbol(NOT_SET)), + (u'first_name', Symbol(NOT_SET)), + ... + ] + +.. note:: + + These default projections are also used when you access a relationship + attribute using the dictionary key syntax. + +If you want to default to fetching *username* for a *Task* as well then you can +change the default_projections* in your class factory plugin:: + + class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + cls = super(Factory, self).create(schema, bases=bases) + + # Further customise cls before returning. + if schema['id'] == 'User': + cls.default_projections = ['id', 'username'] + + return cls + +Now a projection-less query will also query *username* by default: + +.. note:: + + You will need to start a new session to pick up the change you made:: + + session = ftrack_api.Session() + +.. code-block:: python + + >>> user = session.query('User').first() + >>> with session.auto_populating(False): # For demonstration purpose only. + ... print user.items() + [ + (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') + (u'username', u'martin'), + (u'first_name', Symbol(NOT_SET)), + ... + ] + +Note that if any specific projections are applied in a query, those override +the default projections entirely. This allows you to also *reduce* the data +loaded on demand:: + + >>> session = ftrack_api.Session() # Start new session to avoid cache. + >>> user = session.query('select id from User').first() + >>> with session.auto_populating(False): # For demonstration purpose only. + ... print user.items() + [ + (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') + (u'username', Symbol(NOT_SET)), + (u'first_name', Symbol(NOT_SET)), + ... + ] + +.. _working_with_entities/entity_types/helper_methods: + +Helper methods +-------------- + +If you want to add additional helper methods to the constructed classes to +better support your pipeline logic, then you can simply patch the created +classes in your factory, much like with changing the default projections:: + + def get_full_name(self): + '''Return full name for user.''' + return '{0} {1}'.format(self['first_name'], self['last_name']).strip() + + class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + cls = super(Factory, self).create(schema, bases=bases) + + # Further customise cls before returning. + if schema['id'] == 'User': + cls.get_full_name = get_full_name + + return cls + +Now you have a new helper method *get_full_name* on your *User* entities:: + + >>> session = ftrack_api.Session() # New session to pick up changes. + >>> user = session.query('User').first() + >>> print user.get_full_name() + Martin Pengelly-Phillips + +If you'd rather not patch the existing classes, or perhaps have a lot of helpers +to mixin, you can instead inject your own class as the base class. The only +requirement is that it has the base :class:`~ftrack_api.entity.base.Entity` +class in its ancestor classes:: + + import ftrack_api.entity.base + + + class CustomUser(ftrack_api.entity.base.Entity): + '''Represent user.''' + + def get_full_name(self): + '''Return full name for user.''' + return '{0} {1}'.format(self['first_name'], self['last_name']).strip() + + + class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + # Alter base class for constructed class. + if bases is None: + bases = [ftrack_api.entity.base.Entity] + + if schema['id'] == 'User': + bases = [CustomUser] + + cls = super(Factory, self).create(schema, bases=bases) + return cls + +The resulting effect is the same:: + + >>> session = ftrack_api.Session() # New session to pick up changes. + >>> user = session.query('User').first() + >>> print user.get_full_name() + Martin Pengelly-Phillips + +.. note:: + + Your custom class is not the leaf class which will still be a dynamically + generated class. Instead your custom class becomes the base for the leaf + class:: + + >>> print type(user).__mro__ + (, , ...) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini new file mode 100644 index 00000000000..b1f515ee18e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +minversion = 2.4.2 +addopts = -v -k-slow --junitxml=test-reports/junit.xml --cache-clear +norecursedirs = .* _* +python_files = test_*.py +python_functions = test_* +mock_use_standalone_module = true \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py new file mode 100644 index 00000000000..0682a5eeb0e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py @@ -0,0 +1,39 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import logging + +import ftrack_api +import ftrack_api.entity.location +import ftrack_api.accessor.disk + + +def configure_locations(event): + '''Configure locations for session.''' + session = event['data']['session'] + + # Find location(s) and customise instances. + # + # location = session.query('Location where name is "my.location"').one() + # ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + # location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + + +def register(session): + '''Register plugin with *session*.''' + logger = logging.getLogger('ftrack_plugin:configure_locations.register') + + # Validate that session is an instance of ftrack_api.Session. If not, assume + # that register is being called from an old or incompatible API and return + # without doing anything. + if not isinstance(session, ftrack_api.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + session.event_hub.subscribe( + 'topic=ftrack.api.session.configure-location', + configure_locations + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py new file mode 100644 index 00000000000..45f78416708 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py @@ -0,0 +1,46 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import logging + +import ftrack_api.entity.factory + + +class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + # Optionally change bases for class to be generated. + cls = super(Factory, self).create(schema, bases=bases) + + # Further customise cls before returning. + + return cls + + +def register(session): + '''Register plugin with *session*.''' + logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') + + # Validate that session is an instance of ftrack_api.Session. If not, assume + # that register is being called from an old or incompatible API and return + # without doing anything. + if not isinstance(session, ftrack_api.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0!r} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + factory = Factory() + + def construct_entity_type(event): + '''Return class to represent entity type specified by *event*.''' + schema = event['data']['schema'] + return factory.create(schema) + + session.event_hub.subscribe( + 'topic=ftrack.api.session.construct-entity-type', + construct_entity_type + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg new file mode 100644 index 00000000000..b2ad8fd0861 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg @@ -0,0 +1,6 @@ +[build_sphinx] +config-dir = doc +source-dir = doc +build-dir = build/doc +builder = html +all_files = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py new file mode 100644 index 00000000000..da99a572b4c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py @@ -0,0 +1,81 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import os +import re + +from setuptools import setup, find_packages +from setuptools.command.test import test as TestCommand + + +ROOT_PATH = os.path.dirname(os.path.realpath(__file__)) +RESOURCE_PATH = os.path.join(ROOT_PATH, 'resource') +SOURCE_PATH = os.path.join(ROOT_PATH, 'source') +README_PATH = os.path.join(ROOT_PATH, 'README.rst') + + +# Read version from source. +with open( + os.path.join(SOURCE_PATH, 'ftrack_api', '_version.py') +) as _version_file: + VERSION = re.match( + r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL + ).group(1) + + +# Custom commands. +class PyTest(TestCommand): + '''Pytest command.''' + + def finalize_options(self): + '''Finalize options to be used.''' + TestCommand.finalize_options(self) + self.test_args = [] + self.test_suite = True + + def run_tests(self): + '''Import pytest and run.''' + import pytest + raise SystemExit(pytest.main(self.test_args)) + + +# Call main setup. +setup( + name='ftrack-python-api', + version=VERSION, + description='Python API for ftrack.', + long_description=open(README_PATH).read(), + keywords='ftrack, python, api', + url='https://bitbucket.org/ftrack/ftrack-python-api', + author='ftrack', + author_email='support@ftrack.com', + license='Apache License (2.0)', + packages=find_packages(SOURCE_PATH), + package_dir={ + '': 'source' + }, + setup_requires=[ + 'sphinx >= 1.2.2, < 2', + 'sphinx_rtd_theme >= 0.1.6, < 1', + 'lowdown >= 0.1.0, < 2' + ], + install_requires=[ + 'requests >= 2, <3', + 'arrow >= 0.4.4, < 1', + 'termcolor >= 1.1.0, < 2', + 'pyparsing >= 2.0, < 3', + 'clique >= 1.2.0, < 2', + 'websocket-client >= 0.40.0, < 1' + ], + tests_require=[ + 'pytest >= 2.7, < 3', + 'pytest-mock >= 0.4, < 1', + 'pytest-catchlog >= 1, <=2' + ], + cmdclass={ + 'test': PyTest + }, + zip_safe=False, + python_requires=">=2.7.9, <3.0" + +) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py new file mode 100644 index 00000000000..34833aa0dd6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py @@ -0,0 +1 @@ +from ftrack_api import * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py new file mode 100644 index 00000000000..d8ee30bd8f7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py @@ -0,0 +1,32 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from ._version import __version__ +from .session import Session + + +def mixin(instance, mixin_class, name=None): + '''Mixin *mixin_class* to *instance*. + + *name* can be used to specify new class name. If not specified then one will + be generated. + + ''' + if name is None: + name = '{0}{1}'.format( + instance.__class__.__name__, mixin_class.__name__ + ) + + # Check mixin class not already present in mro in order to avoid consistent + # method resolution failure. + if mixin_class in instance.__class__.mro(): + return + + instance.__class__ = type( + name, + ( + mixin_class, + instance.__class__ + ), + {} + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py new file mode 100644 index 00000000000..fbe14f32772 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py @@ -0,0 +1,656 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + +from __future__ import absolute_import + +import logging +import json +import sys +import os + +import ftrack_api +import ftrack_api.structure.standard as _standard +from ftrack_api.logging import LazyLogMessage as L + + +scenario_name = 'ftrack.centralized-storage' + + +class ConfigureCentralizedStorageScenario(object): + '''Configure a centralized storage scenario.''' + + def __init__(self): + '''Instansiate centralized storage scenario.''' + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + + @property + def storage_scenario(self): + '''Return storage scenario setting.''' + return self.session.query( + 'select value from Setting ' + 'where name is "storage_scenario" and group is "STORAGE"' + ).one() + + @property + def existing_centralized_storage_configuration(self): + '''Return existing centralized storage configuration.''' + storage_scenario = self.storage_scenario + + try: + configuration = json.loads(storage_scenario['value']) + except (ValueError, TypeError): + return None + + if not isinstance(configuration, dict): + return None + + if configuration.get('scenario') != scenario_name: + return None + + return configuration.get('data', {}) + + def _get_confirmation_text(self, configuration): + '''Return confirmation text from *configuration*.''' + configure_location = configuration.get('configure_location') + select_location = configuration.get('select_location') + select_mount_point = configuration.get('select_mount_point') + + if configure_location: + location_text = unicode( + 'A new location will be created:\n\n' + '* Label: {location_label}\n' + '* Name: {location_name}\n' + '* Description: {location_description}\n' + ).format(**configure_location) + else: + location = self.session.get( + 'Location', select_location['location_id'] + ) + location_text = ( + u'You have choosen to use an existing location: {0}'.format( + location['label'] + ) + ) + + mount_points_text = unicode( + '* Linux: {linux}\n' + '* OS X: {osx}\n' + '* Windows: {windows}\n\n' + ).format( + linux=select_mount_point.get('linux_mount_point') or '*Not set*', + osx=select_mount_point.get('osx_mount_point') or '*Not set*', + windows=select_mount_point.get('windows_mount_point') or '*Not set*' + ) + + mount_points_not_set = [] + + if not select_mount_point.get('linux_mount_point'): + mount_points_not_set.append('Linux') + + if not select_mount_point.get('osx_mount_point'): + mount_points_not_set.append('OS X') + + if not select_mount_point.get('windows_mount_point'): + mount_points_not_set.append('Windows') + + if mount_points_not_set: + mount_points_text += unicode( + 'Please be aware that this location will not be working on ' + '{missing} because the mount points are not set up.' + ).format( + missing=' and '.join(mount_points_not_set) + ) + + text = unicode( + '#Confirm storage setup#\n\n' + 'Almost there! Please take a moment to verify the settings you ' + 'are about to save. You can always come back later and update the ' + 'configuration.\n' + '##Location##\n\n' + '{location}\n' + '##Mount points##\n\n' + '{mount_points}' + ).format( + location=location_text, + mount_points=mount_points_text + ) + + return text + + def configure_scenario(self, event): + '''Configure scenario based on *event* and return form items.''' + steps = ( + 'select_scenario', + 'select_location', + 'configure_location', + 'select_structure', + 'select_mount_point', + 'confirm_summary', + 'save_configuration' + ) + + warning_message = '' + values = event['data'].get('values', {}) + + # Calculate previous step and the next. + previous_step = values.get('step', 'select_scenario') + next_step = steps[steps.index(previous_step) + 1] + state = 'configuring' + + self.logger.info(L( + u'Configuring scenario, previous step: {0}, next step: {1}. ' + u'Values {2!r}.', + previous_step, next_step, values + )) + + if 'configuration' in values: + configuration = values.pop('configuration') + else: + configuration = {} + + if values: + # Update configuration with values from the previous step. + configuration[previous_step] = values + + if previous_step == 'select_location': + values = configuration['select_location'] + if values.get('location_id') != 'create_new_location': + location_exists = self.session.query( + 'Location where id is "{0}"'.format( + values.get('location_id') + ) + ).first() + if not location_exists: + next_step = 'select_location' + warning_message = ( + '**The selected location does not exist. Please choose ' + 'one from the dropdown or create a new one.**' + ) + + if next_step == 'select_location': + try: + location_id = ( + self.existing_centralized_storage_configuration['location_id'] + ) + except (KeyError, TypeError): + location_id = None + + options = [{ + 'label': 'Create new location', + 'value': 'create_new_location' + }] + for location in self.session.query( + 'select name, label, description from Location' + ): + if location['name'] not in ( + 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', + 'ftrack.server', 'ftrack.review' + ): + options.append({ + 'label': u'{label} ({name})'.format( + label=location['label'], name=location['name'] + ), + 'description': location['description'], + 'value': location['id'] + }) + + warning = '' + if location_id is not None: + # If there is already a location configured we must make the + # user aware that changing the location may be problematic. + warning = ( + '\n\n**Be careful if you switch to another location ' + 'for an existing storage scenario. Components that have ' + 'already been published to the previous location will be ' + 'made unavailable for common use.**' + ) + default_value = location_id + elif location_id is None and len(options) == 1: + # No location configured and no existing locations to use. + default_value = 'create_new_location' + else: + # There are existing locations to choose from but non of them + # are currently active in the centralized storage scenario. + default_value = None + + items = [{ + 'type': 'label', + 'value': ( + '#Select location#\n' + 'Choose an already existing location or create a new one ' + 'to represent your centralized storage. {0}'.format( + warning + ) + ) + }, { + 'type': 'enumerator', + 'label': 'Location', + 'name': 'location_id', + 'value': default_value, + 'data': options + }] + + default_location_name = 'studio.central-storage-location' + default_location_label = 'Studio location' + default_location_description = ( + 'The studio central location where all components are ' + 'stored.' + ) + + if previous_step == 'configure_location': + configure_location = configuration.get( + 'configure_location' + ) + + if configure_location: + try: + existing_location = self.session.query( + u'Location where name is "{0}"'.format( + configure_location.get('location_name') + ) + ).first() + except UnicodeEncodeError: + next_step = 'configure_location' + warning_message += ( + '**The location name contains non-ascii characters. ' + 'Please change the name and try again.**' + ) + values = configuration['select_location'] + else: + if existing_location: + next_step = 'configure_location' + warning_message += ( + u'**There is already a location named {0}. ' + u'Please change the name and try again.**'.format( + configure_location.get('location_name') + ) + ) + values = configuration['select_location'] + + if ( + not configure_location.get('location_name') or + not configure_location.get('location_label') or + not configure_location.get('location_description') + ): + next_step = 'configure_location' + warning_message += ( + '**Location name, label and description cannot ' + 'be empty.**' + ) + values = configuration['select_location'] + + if next_step == 'configure_location': + # Populate form with previous configuration. + default_location_label = configure_location['location_label'] + default_location_name = configure_location['location_name'] + default_location_description = ( + configure_location['location_description'] + ) + + if next_step == 'configure_location': + + if values.get('location_id') == 'create_new_location': + # Add options to create a new location. + items = [{ + 'type': 'label', + 'value': ( + '#Create location#\n' + 'Here you will create a new location to be used ' + 'with your new Storage scenario. For your ' + 'convenience we have already filled in some default ' + 'values. If this is the first time you are configuring ' + 'a storage scenario in ftrack we recommend that you ' + 'stick with these settings.' + ) + }, { + 'label': 'Label', + 'name': 'location_label', + 'value': default_location_label, + 'type': 'text' + }, { + 'label': 'Name', + 'name': 'location_name', + 'value': default_location_name, + 'type': 'text' + }, { + 'label': 'Description', + 'name': 'location_description', + 'value': default_location_description, + 'type': 'text' + }] + + else: + # The user selected an existing location. Move on to next + # step. + next_step = 'select_mount_point' + + if next_step == 'select_structure': + # There is only one structure to choose from, go to next step. + next_step = 'select_mount_point' + # items = [ + # { + # 'type': 'label', + # 'value': ( + # '#Select structure#\n' + # 'Select which structure to use with your location. ' + # 'The structure is used to generate the filesystem ' + # 'path for components that are added to this location.' + # ) + # }, + # { + # 'type': 'enumerator', + # 'label': 'Structure', + # 'name': 'structure_id', + # 'value': 'standard', + # 'data': [{ + # 'label': 'Standard', + # 'value': 'standard', + # 'description': ( + # 'The Standard structure uses the names in your ' + # 'project structure to determine the path.' + # ) + # }] + # } + # ] + + if next_step == 'select_mount_point': + try: + mount_points = ( + self.existing_centralized_storage_configuration['accessor']['mount_points'] + ) + except (KeyError, TypeError): + mount_points = dict() + + items = [ + { + 'value': ( + '#Mount points#\n' + 'Set mount points for your centralized storage ' + 'location. For the location to work as expected each ' + 'platform that you intend to use must have the ' + 'corresponding mount point set and the storage must ' + 'be accessible. If not set correctly files will not be ' + 'saved or read.' + ), + 'type': 'label' + }, { + 'type': 'text', + 'label': 'Linux', + 'name': 'linux_mount_point', + 'empty_text': 'E.g. /usr/mnt/MyStorage ...', + 'value': mount_points.get('linux', '') + }, { + 'type': 'text', + 'label': 'OS X', + 'name': 'osx_mount_point', + 'empty_text': 'E.g. /Volumes/MyStorage ...', + 'value': mount_points.get('osx', '') + }, { + 'type': 'text', + 'label': 'Windows', + 'name': 'windows_mount_point', + 'empty_text': 'E.g. \\\\MyStorage ...', + 'value': mount_points.get('windows', '') + } + ] + + if next_step == 'confirm_summary': + items = [{ + 'type': 'label', + 'value': self._get_confirmation_text(configuration) + }] + state = 'confirm' + + if next_step == 'save_configuration': + mount_points = configuration['select_mount_point'] + select_location = configuration['select_location'] + + if select_location['location_id'] == 'create_new_location': + configure_location = configuration['configure_location'] + location = self.session.create( + 'Location', + { + 'name': configure_location['location_name'], + 'label': configure_location['location_label'], + 'description': ( + configure_location['location_description'] + ) + } + ) + + else: + location = self.session.query( + 'Location where id is "{0}"'.format( + select_location['location_id'] + ) + ).one() + + setting_value = json.dumps({ + 'scenario': scenario_name, + 'data': { + 'location_id': location['id'], + 'location_name': location['name'], + 'accessor': { + 'mount_points': { + 'linux': mount_points['linux_mount_point'], + 'osx': mount_points['osx_mount_point'], + 'windows': mount_points['windows_mount_point'] + } + } + } + }) + + self.storage_scenario['value'] = setting_value + self.session.commit() + + # Broadcast an event that storage scenario has been configured. + event = ftrack_api.event.base.Event( + topic='ftrack.storage-scenario.configure-done' + ) + self.session.event_hub.publish(event) + + items = [{ + 'type': 'label', + 'value': ( + '#Done!#\n' + 'Your storage scenario is now configured and ready ' + 'to use. **Note that you may have to restart Connect and ' + 'other applications to start using it.**' + ) + }] + state = 'done' + + if warning_message: + items.insert(0, { + 'type': 'label', + 'value': warning_message + }) + + items.append({ + 'type': 'hidden', + 'value': configuration, + 'name': 'configuration' + }) + items.append({ + 'type': 'hidden', + 'value': next_step, + 'name': 'step' + }) + + return { + 'items': items, + 'state': state + } + + def discover_centralized_scenario(self, event): + '''Return action discover dictionary for *event*.''' + return { + 'id': scenario_name, + 'name': 'Centralized storage scenario', + 'description': ( + '(Recommended) centralized storage scenario where all files ' + 'are kept on a storage that is mounted and available to ' + 'everyone in the studio.' + ) + } + + def register(self, session): + '''Subscribe to events on *session*.''' + self.session = session + + #: TODO: Move these to a separate function. + session.event_hub.subscribe( + unicode( + 'topic=ftrack.storage-scenario.discover ' + 'and source.user.username="{0}"' + ).format( + session.api_user + ), + self.discover_centralized_scenario + ) + session.event_hub.subscribe( + unicode( + 'topic=ftrack.storage-scenario.configure ' + 'and data.scenario_id="{0}" ' + 'and source.user.username="{1}"' + ).format( + scenario_name, + session.api_user + ), + self.configure_scenario + ) + + +class ActivateCentralizedStorageScenario(object): + '''Activate a centralized storage scenario.''' + + def __init__(self): + '''Instansiate centralized storage scenario.''' + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + + def activate(self, event): + '''Activate scenario in *event*.''' + storage_scenario = event['data']['storage_scenario'] + + try: + location_data = storage_scenario['data'] + location_name = location_data['location_name'] + location_id = location_data['location_id'] + mount_points = location_data['accessor']['mount_points'] + + except KeyError: + error_message = ( + 'Unable to read storage scenario data.' + ) + self.logger.error(L(error_message)) + raise ftrack_api.exception.LocationError( + 'Unable to configure location based on scenario.' + ) + + else: + location = self.session.create( + 'Location', + data=dict( + name=location_name, + id=location_id + ), + reconstructing=True + ) + + if sys.platform == 'darwin': + prefix = mount_points['osx'] + elif sys.platform == 'linux2': + prefix = mount_points['linux'] + elif sys.platform == 'win32': + prefix = mount_points['windows'] + else: + raise ftrack_api.exception.LocationError( + ( + 'Unable to find accessor prefix for platform {0}.' + ).format(sys.platform) + ) + + location.accessor = ftrack_api.accessor.disk.DiskAccessor( + prefix=prefix + ) + location.structure = _standard.StandardStructure() + location.priority = 1 + self.logger.info(L( + u'Storage scenario activated. Configured {0!r} from ' + u'{1!r}', + location, storage_scenario + )) + + def _verify_startup(self, event): + '''Verify the storage scenario configuration.''' + storage_scenario = event['data']['storage_scenario'] + location_data = storage_scenario['data'] + mount_points = location_data['accessor']['mount_points'] + + prefix = None + if sys.platform == 'darwin': + prefix = mount_points['osx'] + elif sys.platform == 'linux2': + prefix = mount_points['linux'] + elif sys.platform == 'win32': + prefix = mount_points['windows'] + + if not prefix: + return ( + u'The storage scenario has not been configured for your ' + u'operating system. ftrack may not be able to ' + u'store and track files correctly.' + ) + + if not os.path.isdir(prefix): + return ( + unicode( + 'The path {0} does not exist. ftrack may not be able to ' + 'store and track files correctly. \n\nIf the storage is ' + 'newly setup you may want to create necessary folder ' + 'structures. If the storage is a network drive you should ' + 'make sure that it is mounted correctly.' + ).format(prefix) + ) + + def register(self, session): + '''Subscribe to events on *session*.''' + self.session = session + + session.event_hub.subscribe( + ( + 'topic=ftrack.storage-scenario.activate ' + 'and data.storage_scenario.scenario="{0}"'.format( + scenario_name + ) + ), + self.activate + ) + + # Listen to verify startup event from ftrack connect to allow responding + # with a message if something is not working correctly with this + # scenario that the user should be notified about. + self.session.event_hub.subscribe( + ( + 'topic=ftrack.connect.verify-startup ' + 'and data.storage_scenario.scenario="{0}"'.format( + scenario_name + ) + ), + self._verify_startup + ) + +def register(session): + '''Register storage scenario.''' + scenario = ActivateCentralizedStorageScenario() + scenario.register(session) + + +def register_configuration(session): + '''Register storage scenario.''' + scenario = ConfigureCentralizedStorageScenario() + scenario.register(session) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py new file mode 100644 index 00000000000..9f79a1850ce --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py @@ -0,0 +1,534 @@ +# pragma: no cover +# Module 'ntpath' -- common operations on WinNT/Win95 pathnames +"""Common pathname manipulations, WindowsNT/95 version. + +Instead of importing this module directly, import os and refer to this +module as os.path. +""" + +import os +import sys +import stat +import genericpath +import warnings + +from genericpath import * + +__all__ = ["normcase","isabs","join","splitdrive","split","splitext", + "basename","dirname","commonprefix","getsize","getmtime", + "getatime","getctime", "islink","exists","lexists","isdir","isfile", + "ismount","walk","expanduser","expandvars","normpath","abspath", + "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", + "extsep","devnull","realpath","supports_unicode_filenames","relpath"] + +# strings representing various path-related bits and pieces +curdir = '.' +pardir = '..' +extsep = '.' +sep = '\\' +pathsep = ';' +altsep = '/' +defpath = '.;C:\\bin' +if 'ce' in sys.builtin_module_names: + defpath = '\\Windows' +elif 'os2' in sys.builtin_module_names: + # OS/2 w/ VACPP + altsep = '/' +devnull = 'nul' + +# Normalize the case of a pathname and map slashes to backslashes. +# Other normalizations (such as optimizing '../' away) are not done +# (this is done by normpath). + +def normcase(s): + """Normalize case of pathname. + + Makes all characters lowercase and all slashes into backslashes.""" + return s.replace("/", "\\").lower() + + +# Return whether a path is absolute. +# Trivial in Posix, harder on the Mac or MS-DOS. +# For DOS it is absolute if it starts with a slash or backslash (current +# volume), or if a pathname after the volume letter and colon / UNC resource +# starts with a slash or backslash. + +def isabs(s): + """Test whether a path is absolute""" + s = splitdrive(s)[1] + return s != '' and s[:1] in '/\\' + + +# Join two (or more) paths. + +def join(a, *p): + """Join two or more pathname components, inserting "\\" as needed. + If any component is an absolute path, all previous path components + will be discarded.""" + path = a + for b in p: + b_wins = 0 # set to 1 iff b makes path irrelevant + if path == "": + b_wins = 1 + + elif isabs(b): + # This probably wipes out path so far. However, it's more + # complicated if path begins with a drive letter: + # 1. join('c:', '/a') == 'c:/a' + # 2. join('c:/', '/a') == 'c:/a' + # But + # 3. join('c:/a', '/b') == '/b' + # 4. join('c:', 'd:/') = 'd:/' + # 5. join('c:/', 'd:/') = 'd:/' + if path[1:2] != ":" or b[1:2] == ":": + # Path doesn't start with a drive letter, or cases 4 and 5. + b_wins = 1 + + # Else path has a drive letter, and b doesn't but is absolute. + elif len(path) > 3 or (len(path) == 3 and + path[-1] not in "/\\"): + # case 3 + b_wins = 1 + + if b_wins: + path = b + else: + # Join, and ensure there's a separator. + assert len(path) > 0 + if path[-1] in "/\\": + if b and b[0] in "/\\": + path += b[1:] + else: + path += b + elif path[-1] == ":": + path += b + elif b: + if b[0] in "/\\": + path += b + else: + path += "\\" + b + else: + # path is not empty and does not end with a backslash, + # but b is empty; since, e.g., split('a/') produces + # ('a', ''), it's best if join() adds a backslash in + # this case. + path += '\\' + + return path + + +# Split a path in a drive specification (a drive letter followed by a +# colon) and the path specification. +# It is always true that drivespec + pathspec == p +def splitdrive(p): + """Split a pathname into drive and path specifiers. Returns a 2-tuple +"(drive,path)"; either part may be empty""" + if p[1:2] == ':': + return p[0:2], p[2:] + return '', p + + +# Parse UNC paths +def splitunc(p): + """Split a pathname into UNC mount point and relative path specifiers. + + Return a 2-tuple (unc, rest); either part may be empty. + If unc is not empty, it has the form '//host/mount' (or similar + using backslashes). unc+rest is always the input path. + Paths containing drive letters never have an UNC part. + """ + if p[1:2] == ':': + return '', p # Drive letter present + firstTwo = p[0:2] + if firstTwo == '//' or firstTwo == '\\\\': + # is a UNC path: + # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter + # \\machine\mountpoint\directories... + # directory ^^^^^^^^^^^^^^^ + normp = normcase(p) + index = normp.find('\\', 2) + if index == -1: + ##raise RuntimeError, 'illegal UNC path: "' + p + '"' + return ("", p) + index = normp.find('\\', index + 1) + if index == -1: + index = len(p) + return p[:index], p[index:] + return '', p + + +# Split a path in head (everything up to the last '/') and tail (the +# rest). After the trailing '/' is stripped, the invariant +# join(head, tail) == p holds. +# The resulting head won't end in '/' unless it is the root. + +def split(p): + """Split a pathname. + + Return tuple (head, tail) where tail is everything after the final slash. + Either part may be empty.""" + + d, p = splitdrive(p) + # set i to index beyond p's last slash + i = len(p) + while i and p[i-1] not in '/\\': + i = i - 1 + head, tail = p[:i], p[i:] # now tail has no slashes + # remove trailing slashes from head, unless it's all slashes + head2 = head + while head2 and head2[-1] in '/\\': + head2 = head2[:-1] + head = head2 or head + return d + head, tail + + +# Split a path in root and extension. +# The extension is everything starting at the last dot in the last +# pathname component; the root is everything before that. +# It is always true that root + ext == p. + +def splitext(p): + return genericpath._splitext(p, sep, altsep, extsep) +splitext.__doc__ = genericpath._splitext.__doc__ + + +# Return the tail (basename) part of a path. + +def basename(p): + """Returns the final component of a pathname""" + return split(p)[1] + + +# Return the head (dirname) part of a path. + +def dirname(p): + """Returns the directory component of a pathname""" + return split(p)[0] + +# Is a path a symbolic link? +# This will always return false on systems where posix.lstat doesn't exist. + +def islink(path): + """Test for symbolic link. + On WindowsNT/95 and OS/2 always returns false + """ + return False + +# alias exists to lexists +lexists = exists + +# Is a path a mount point? Either a root (with or without drive letter) +# or an UNC path with at most a / or \ after the mount point. + +def ismount(path): + """Test whether a path is a mount point (defined as root of drive)""" + unc, rest = splitunc(path) + if unc: + return rest in ("", "/", "\\") + p = splitdrive(path)[1] + return len(p) == 1 and p[0] in '/\\' + + +# Directory tree walk. +# For each directory under top (including top itself, but excluding +# '.' and '..'), func(arg, dirname, filenames) is called, where +# dirname is the name of the directory and filenames is the list +# of files (and subdirectories etc.) in the directory. +# The func may modify the filenames list, to implement a filter, +# or to impose a different order of visiting. + +def walk(top, func, arg): + """Directory tree walk with callback function. + + For each directory in the directory tree rooted at top (including top + itself, but excluding '.' and '..'), call func(arg, dirname, fnames). + dirname is the name of the directory, and fnames a list of the names of + the files and subdirectories in dirname (excluding '.' and '..'). func + may modify the fnames list in-place (e.g. via del or slice assignment), + and walk will only recurse into the subdirectories whose names remain in + fnames; this can be used to implement a filter, or to impose a specific + order of visiting. No semantics are defined for, or required of, arg, + beyond that arg is always passed to func. It can be used, e.g., to pass + a filename pattern, or a mutable object designed to accumulate + statistics. Passing None for arg is common.""" + warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", + stacklevel=2) + try: + names = os.listdir(top) + except os.error: + return + func(arg, top, names) + for name in names: + name = join(top, name) + if isdir(name): + walk(name, func, arg) + + +# Expand paths beginning with '~' or '~user'. +# '~' means $HOME; '~user' means that user's home directory. +# If the path doesn't begin with '~', or if the user or $HOME is unknown, +# the path is returned unchanged (leaving error reporting to whatever +# function is called with the expanded path as argument). +# See also module 'glob' for expansion of *, ? and [...] in pathnames. +# (A function should also be defined to do full *sh-style environment +# variable expansion.) + +def expanduser(path): + """Expand ~ and ~user constructs. + + If user or $HOME is unknown, do nothing.""" + if path[:1] != '~': + return path + i, n = 1, len(path) + while i < n and path[i] not in '/\\': + i = i + 1 + + if 'HOME' in os.environ: + userhome = os.environ['HOME'] + elif 'USERPROFILE' in os.environ: + userhome = os.environ['USERPROFILE'] + elif not 'HOMEPATH' in os.environ: + return path + else: + try: + drive = os.environ['HOMEDRIVE'] + except KeyError: + drive = '' + userhome = join(drive, os.environ['HOMEPATH']) + + if i != 1: #~user + userhome = join(dirname(userhome), path[1:i]) + + return userhome + path[i:] + + +# Expand paths containing shell variable substitutions. +# The following rules apply: +# - no expansion within single quotes +# - '$$' is translated into '$' +# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% +# - ${varname} is accepted. +# - $varname is accepted. +# - %varname% is accepted. +# - varnames can be made out of letters, digits and the characters '_-' +# (though is not verified in the ${varname} and %varname% cases) +# XXX With COMMAND.COM you can use any characters in a variable name, +# XXX except '^|<>='. + +def expandvars(path): + """Expand shell variables of the forms $var, ${var} and %var%. + + Unknown variables are left unchanged.""" + if '$' not in path and '%' not in path: + return path + import string + varchars = string.ascii_letters + string.digits + '_-' + res = '' + index = 0 + pathlen = len(path) + while index < pathlen: + c = path[index] + if c == '\'': # no expansion within single quotes + path = path[index + 1:] + pathlen = len(path) + try: + index = path.index('\'') + res = res + '\'' + path[:index + 1] + except ValueError: + res = res + path + index = pathlen - 1 + elif c == '%': # variable or '%' + if path[index + 1:index + 2] == '%': + res = res + c + index = index + 1 + else: + path = path[index+1:] + pathlen = len(path) + try: + index = path.index('%') + except ValueError: + res = res + '%' + path + index = pathlen - 1 + else: + var = path[:index] + if var in os.environ: + res = res + os.environ[var] + else: + res = res + '%' + var + '%' + elif c == '$': # variable or '$$' + if path[index + 1:index + 2] == '$': + res = res + c + index = index + 1 + elif path[index + 1:index + 2] == '{': + path = path[index+2:] + pathlen = len(path) + try: + index = path.index('}') + var = path[:index] + if var in os.environ: + res = res + os.environ[var] + else: + res = res + '${' + var + '}' + except ValueError: + res = res + '${' + path + index = pathlen - 1 + else: + var = '' + index = index + 1 + c = path[index:index + 1] + while c != '' and c in varchars: + var = var + c + index = index + 1 + c = path[index:index + 1] + if var in os.environ: + res = res + os.environ[var] + else: + res = res + '$' + var + if c != '': + index = index - 1 + else: + res = res + c + index = index + 1 + return res + + +# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. +# Previously, this function also truncated pathnames to 8+3 format, +# but as this module is called "ntpath", that's obviously wrong! + +def normpath(path): + """Normalize path, eliminating double slashes, etc.""" + # Preserve unicode (if path is unicode) + backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.') + if path.startswith(('\\\\.\\', '\\\\?\\')): + # in the case of paths with these prefixes: + # \\.\ -> device names + # \\?\ -> literal paths + # do not do any normalization, but return the path unchanged + return path + path = path.replace("/", "\\") + prefix, path = splitdrive(path) + # We need to be careful here. If the prefix is empty, and the path starts + # with a backslash, it could either be an absolute path on the current + # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It + # is therefore imperative NOT to collapse multiple backslashes blindly in + # that case. + # The code below preserves multiple backslashes when there is no drive + # letter. This means that the invalid filename \\\a\b is preserved + # unchanged, where a\\\b is normalised to a\b. It's not clear that there + # is any better behaviour for such edge cases. + if prefix == '': + # No drive letter - preserve initial backslashes + while path[:1] == "\\": + prefix = prefix + backslash + path = path[1:] + else: + # We have a drive letter - collapse initial backslashes + if path.startswith("\\"): + prefix = prefix + backslash + path = path.lstrip("\\") + comps = path.split("\\") + i = 0 + while i < len(comps): + if comps[i] in ('.', ''): + del comps[i] + elif comps[i] == '..': + if i > 0 and comps[i-1] != '..': + del comps[i-1:i+1] + i -= 1 + elif i == 0 and prefix.endswith("\\"): + del comps[i] + else: + i += 1 + else: + i += 1 + # If the path is now empty, substitute '.' + if not prefix and not comps: + comps.append(dot) + return prefix + backslash.join(comps) + + +# Return an absolute path. +try: + from nt import _getfullpathname + +except ImportError: # not running on Windows - mock up something sensible + def abspath(path): + """Return the absolute version of a path.""" + if not isabs(path): + if isinstance(path, unicode): + cwd = os.getcwdu() + else: + cwd = os.getcwd() + path = join(cwd, path) + return normpath(path) + +else: # use native Windows method on Windows + def abspath(path): + """Return the absolute version of a path.""" + + if path: # Empty path must return current working directory. + try: + path = _getfullpathname(path) + except WindowsError: + pass # Bad path - return unchanged. + elif isinstance(path, unicode): + path = os.getcwdu() + else: + path = os.getcwd() + return normpath(path) + +# realpath is a no-op on systems without islink support +realpath = abspath +# Win9x family and earlier have no Unicode filename support. +supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and + sys.getwindowsversion()[3] >= 2) + +def _abspath_split(path): + abs = abspath(normpath(path)) + prefix, rest = splitunc(abs) + is_unc = bool(prefix) + if not is_unc: + prefix, rest = splitdrive(abs) + return is_unc, prefix, [x for x in rest.split(sep) if x] + +def relpath(path, start=curdir): + """Return a relative version of a path""" + + if not path: + raise ValueError("no path specified") + + start_is_unc, start_prefix, start_list = _abspath_split(start) + path_is_unc, path_prefix, path_list = _abspath_split(path) + + if path_is_unc ^ start_is_unc: + raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" + % (path, start)) + if path_prefix.lower() != start_prefix.lower(): + if path_is_unc: + raise ValueError("path is on UNC root %s, start on UNC root %s" + % (path_prefix, start_prefix)) + else: + raise ValueError("path is on drive %s, start on drive %s" + % (path_prefix, start_prefix)) + # Work out how much of the filepath is shared by start and path. + i = 0 + for e1, e2 in zip(start_list, path_list): + if e1.lower() != e2.lower(): + break + i += 1 + + rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + if not rel_list: + return curdir + return join(*rel_list) + +try: + # The genericpath.isdir implementation uses os.stat and checks the mode + # attribute to tell whether or not the path is a directory. + # This is overkill on Windows - just pass the path to GetFileAttributes + # and check the attribute from there. + from nt import _isdir as isdir +except ImportError: + # Use genericpath.isdir as imported above. + pass diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py new file mode 100644 index 00000000000..aa1a8c4aba7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py @@ -0,0 +1 @@ +__version__ = '1.8.2' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py new file mode 100644 index 00000000000..69cc6f4b4f5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py @@ -0,0 +1,66 @@ +""" +Yet another backport of WeakMethod for Python 2.7. +Changes include removing exception chaining and adding args to super() calls. + +Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. + +Full license available in LICENSE.python. +""" +from weakref import ref + + +class WeakMethod(ref): + """ + A custom `weakref.ref` subclass which simulates a weak reference to + a bound method, working around the lifetime problem of bound methods. + """ + + __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" + + def __new__(cls, meth, callback=None): + try: + obj = meth.__self__ + func = meth.__func__ + except AttributeError: + raise TypeError( + "argument should be a bound method, not {}".format(type(meth)) + ) + + def _cb(arg): + # The self-weakref trick is needed to avoid creating a reference + # cycle. + self = self_wr() + if self._alive: + self._alive = False + if callback is not None: + callback(self) + + self = ref.__new__(cls, obj, _cb) + self._func_ref = ref(func, _cb) + self._meth_type = type(meth) + self._alive = True + self_wr = ref(self) + return self + + def __call__(self): + obj = super(WeakMethod, self).__call__() + func = self._func_ref() + if obj is None or func is None: + return None + return self._meth_type(func, obj) + + def __eq__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is other + return ref.__eq__(self, other) and self._func_ref == other._func_ref + return NotImplemented + + def __ne__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is not other + return ref.__ne__(self, other) or self._func_ref != other._func_ref + return NotImplemented + + __hash__ = ref.__hash__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py new file mode 100644 index 00000000000..1aab07ed77a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py new file mode 100644 index 00000000000..6aa9cf0281d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py @@ -0,0 +1,124 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +import abc + +import ftrack_api.exception + + +class Accessor(object): + '''Provide data access to a location. + + A location represents a specific storage, but access to that storage may + vary. For example, both local filesystem and FTP access may be possible for + the same storage. An accessor implements these different ways of accessing + the same data location. + + As different accessors may access the same location, only part of a data + path that is commonly understood may be stored in the database. The format + of this path should be a contract between the accessors that require access + to the same location and is left as an implementation detail. As such, this + system provides no guarantee that two different accessors can provide access + to the same location, though this is a clear goal. The path stored centrally + is referred to as the **resource identifier** and should be used when + calling any of the accessor methods that accept a *resource_identifier* + argument. + + ''' + + __metaclass__ = abc.ABCMeta + + def __init__(self): + '''Initialise location accessor.''' + super(Accessor, self).__init__() + + @abc.abstractmethod + def list(self, resource_identifier): + '''Return list of entries in *resource_identifier* container. + + Each entry in the returned list should be a valid resource identifier. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist or + :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if + *resource_identifier* is not a container. + + ''' + + @abc.abstractmethod + def exists(self, resource_identifier): + '''Return if *resource_identifier* is valid and exists in location.''' + + @abc.abstractmethod + def is_file(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file.''' + + @abc.abstractmethod + def is_container(self, resource_identifier): + '''Return whether *resource_identifier* refers to a container.''' + + @abc.abstractmethod + def is_sequence(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file sequence.''' + + @abc.abstractmethod + def open(self, resource_identifier, mode='rb'): + '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' + + @abc.abstractmethod + def remove(self, resource_identifier): + '''Remove *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist. + + ''' + + @abc.abstractmethod + def make_container(self, resource_identifier, recursive=True): + '''Make a container at *resource_identifier*. + + If *recursive* is True, also make any intermediate containers. + + Should silently ignore existing containers and not recreate them. + + ''' + + @abc.abstractmethod + def get_container(self, resource_identifier): + '''Return resource_identifier of container for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` + if container of *resource_identifier* could not be determined. + + ''' + + def remove_container(self, resource_identifier): # pragma: no cover + '''Remove container at *resource_identifier*.''' + return self.remove(resource_identifier) + + def get_filesystem_path(self, resource_identifier): # pragma: no cover + '''Return filesystem path for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + filesystem path could not be determined from *resource_identifier* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving filesystem paths is not supported by this accessor. + + ''' + raise ftrack_api.exception.AccessorUnsupportedOperationError( + 'get_filesystem_path', resource_identifier=resource_identifier + ) + + def get_url(self, resource_identifier): + '''Return URL for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + URL could not be determined from *resource_identifier* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving URL is not supported by this accessor. + + ''' + raise ftrack_api.exception.AccessorUnsupportedOperationError( + 'get_url', resource_identifier=resource_identifier + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py new file mode 100644 index 00000000000..65769603f65 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py @@ -0,0 +1,250 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +import os +import sys +import errno +import contextlib + +import ftrack_api._python_ntpath as ntpath +import ftrack_api.accessor.base +import ftrack_api.data +from ftrack_api.exception import ( + AccessorFilesystemPathError, + AccessorUnsupportedOperationError, + AccessorResourceNotFoundError, + AccessorOperationFailedError, + AccessorPermissionDeniedError, + AccessorResourceInvalidError, + AccessorContainerNotEmptyError, + AccessorParentResourceNotFoundError +) + + +class DiskAccessor(ftrack_api.accessor.base.Accessor): + '''Provide disk access to a location. + + Expect resource identifiers to refer to relative filesystem paths. + + ''' + + def __init__(self, prefix, **kw): + '''Initialise location accessor. + + *prefix* specifies the base folder for the disk based structure and + will be prepended to any path. It should be specified in the syntax of + the current OS. + + ''' + if prefix: + prefix = os.path.expanduser(os.path.expandvars(prefix)) + prefix = os.path.abspath(prefix) + self.prefix = prefix + + super(DiskAccessor, self).__init__(**kw) + + def list(self, resource_identifier): + '''Return list of entries in *resource_identifier* container. + + Each entry in the returned list should be a valid resource identifier. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist or + :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if + *resource_identifier* is not a container. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + with error_handler( + operation='list', resource_identifier=resource_identifier + ): + listing = [] + for entry in os.listdir(filesystem_path): + listing.append(os.path.join(resource_identifier, entry)) + + return listing + + def exists(self, resource_identifier): + '''Return if *resource_identifier* is valid and exists in location.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + return os.path.exists(filesystem_path) + + def is_file(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + return os.path.isfile(filesystem_path) + + def is_container(self, resource_identifier): + '''Return whether *resource_identifier* refers to a container.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + return os.path.isdir(filesystem_path) + + def is_sequence(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file sequence.''' + raise AccessorUnsupportedOperationError(operation='is_sequence') + + def open(self, resource_identifier, mode='rb'): + '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + with error_handler( + operation='open', resource_identifier=resource_identifier + ): + data = ftrack_api.data.File(filesystem_path, mode) + + return data + + def remove(self, resource_identifier): + '''Remove *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + if self.is_file(resource_identifier): + with error_handler( + operation='remove', resource_identifier=resource_identifier + ): + os.remove(filesystem_path) + + elif self.is_container(resource_identifier): + with error_handler( + operation='remove', resource_identifier=resource_identifier + ): + os.rmdir(filesystem_path) + + else: + raise AccessorResourceNotFoundError( + resource_identifier=resource_identifier + ) + + def make_container(self, resource_identifier, recursive=True): + '''Make a container at *resource_identifier*. + + If *recursive* is True, also make any intermediate containers. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + with error_handler( + operation='makeContainer', resource_identifier=resource_identifier + ): + try: + if recursive: + os.makedirs(filesystem_path) + else: + try: + os.mkdir(filesystem_path) + except OSError as error: + if error.errno == errno.ENOENT: + raise AccessorParentResourceNotFoundError( + resource_identifier=resource_identifier + ) + else: + raise + + except OSError, error: + if error.errno != errno.EEXIST: + raise + + def get_container(self, resource_identifier): + '''Return resource_identifier of container for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if + container of *resource_identifier* could not be determined. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + container = os.path.dirname(filesystem_path) + + if self.prefix: + if not container.startswith(self.prefix): + raise AccessorParentResourceNotFoundError( + resource_identifier=resource_identifier, + message='Could not determine container for ' + '{resource_identifier} as container falls outside ' + 'of configured prefix.' + ) + + # Convert container filesystem path into resource identifier. + container = container[len(self.prefix):] + if ntpath.isabs(container): + # Ensure that resulting path is relative by stripping any + # leftover prefixed slashes from string. + # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the + # result will be 'foo/bar'. + container = container.lstrip('\\/') + + return container + + def get_filesystem_path(self, resource_identifier): + '''Return filesystem path for *resource_identifier*. + + For example:: + + >>> accessor = DiskAccessor('my.location', '/mountpoint') + >>> print accessor.get_filesystem_path('test.txt') + /mountpoint/test.txt + >>> print accessor.get_filesystem_path('/mountpoint/test.txt') + /mountpoint/test.txt + + Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem + path could not be determined from *resource_identifier*. + + ''' + filesystem_path = resource_identifier + if filesystem_path: + filesystem_path = os.path.normpath(filesystem_path) + + if self.prefix: + if not os.path.isabs(filesystem_path): + filesystem_path = os.path.normpath( + os.path.join(self.prefix, filesystem_path) + ) + + if not filesystem_path.startswith(self.prefix): + raise AccessorFilesystemPathError( + resource_identifier=resource_identifier, + message='Could not determine access path for ' + 'resource_identifier outside of configured prefix: ' + '{resource_identifier}.' + ) + + return filesystem_path + + +@contextlib.contextmanager +def error_handler(**kw): + '''Conform raised OSError/IOError exception to appropriate FTrack error.''' + try: + yield + + except (OSError, IOError) as error: + (exception_type, exception_value, traceback) = sys.exc_info() + kw.setdefault('error', error) + + error_code = getattr(error, 'errno') + if not error_code: + raise AccessorOperationFailedError(**kw), None, traceback + + if error_code == errno.ENOENT: + raise AccessorResourceNotFoundError(**kw), None, traceback + + elif error_code == errno.EPERM: + raise AccessorPermissionDeniedError(**kw), None, traceback + + elif error_code == errno.ENOTEMPTY: + raise AccessorContainerNotEmptyError(**kw), None, traceback + + elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): + raise AccessorResourceInvalidError(**kw), None, traceback + + else: + raise AccessorOperationFailedError(**kw), None, traceback + + except Exception: + raise diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py new file mode 100644 index 00000000000..9c735084d5c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py @@ -0,0 +1,240 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import hashlib +import base64 +import json + +import requests + +from .base import Accessor +from ..data import String +import ftrack_api.exception +import ftrack_api.symbol + + +class ServerFile(String): + '''Representation of a server file.''' + + def __init__(self, resource_identifier, session, mode='rb'): + '''Initialise file.''' + self.mode = mode + self.resource_identifier = resource_identifier + self._session = session + self._has_read = False + + super(ServerFile, self).__init__() + + def flush(self): + '''Flush all changes.''' + super(ServerFile, self).flush() + + if self.mode == 'wb': + self._write() + + def read(self, limit=None): + '''Read file.''' + if not self._has_read: + self._read() + self._has_read = True + + return super(ServerFile, self).read(limit) + + def _read(self): + '''Read all remote content from key into wrapped_file.''' + position = self.tell() + self.seek(0) + + response = requests.get( + '{0}/component/get'.format(self._session.server_url), + params={ + 'id': self.resource_identifier, + 'username': self._session.api_user, + 'apiKey': self._session.api_key + }, + stream=True + ) + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as error: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to read data: {0}.'.format(error) + ) + + for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): + self.wrapped_file.write(block) + + self.flush() + self.seek(position) + + def _write(self): + '''Write current data to remote key.''' + position = self.tell() + self.seek(0) + + # Retrieve component from cache to construct a filename. + component = self._session.get('FileComponent', self.resource_identifier) + if not component: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Unable to retrieve component with id: {0}.'.format( + self.resource_identifier + ) + ) + + # Construct a name from component name and file_type. + name = component['name'] + if component['file_type']: + name = u'{0}.{1}'.format( + name, + component['file_type'].lstrip('.') + ) + + try: + metadata = self._session.get_upload_metadata( + component_id=self.resource_identifier, + file_name=name, + file_size=self._get_size(), + checksum=self._compute_checksum() + ) + except Exception as error: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to get put metadata: {0}.'.format(error) + ) + + # Ensure at beginning of file before put. + self.seek(0) + + # Put the file based on the metadata. + response = requests.put( + metadata['url'], + data=self.wrapped_file, + headers=metadata['headers'] + ) + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as error: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to put file to server: {0}.'.format(error) + ) + + self.seek(position) + + def _get_size(self): + '''Return size of file in bytes.''' + position = self.tell() + self.seek(0, os.SEEK_END) + length = self.tell() + self.seek(position) + return length + + def _compute_checksum(self): + '''Return checksum for file.''' + fp = self.wrapped_file + buf_size = ftrack_api.symbol.CHUNK_SIZE + hash_obj = hashlib.md5() + spos = fp.tell() + + s = fp.read(buf_size) + while s: + hash_obj.update(s) + s = fp.read(buf_size) + + base64_digest = base64.encodestring(hash_obj.digest()) + if base64_digest[-1] == '\n': + base64_digest = base64_digest[0:-1] + + fp.seek(spos) + return base64_digest + + +class _ServerAccessor(Accessor): + '''Provide server location access.''' + + def __init__(self, session, **kw): + '''Initialise location accessor.''' + super(_ServerAccessor, self).__init__(**kw) + + self._session = session + + def open(self, resource_identifier, mode='rb'): + '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' + return ServerFile(resource_identifier, session=self._session, mode=mode) + + def remove(self, resourceIdentifier): + '''Remove *resourceIdentifier*.''' + response = requests.get( + '{0}/component/remove'.format(self._session.server_url), + params={ + 'id': resourceIdentifier, + 'username': self._session.api_user, + 'apiKey': self._session.api_key + } + ) + if response.status_code != 200: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to remove file.' + ) + + def get_container(self, resource_identifier): + '''Return resource_identifier of container for *resource_identifier*.''' + return None + + def make_container(self, resource_identifier, recursive=True): + '''Make a container at *resource_identifier*.''' + + def list(self, resource_identifier): + '''Return list of entries in *resource_identifier* container.''' + raise NotImplementedError() + + def exists(self, resource_identifier): + '''Return if *resource_identifier* is valid and exists in location.''' + return False + + def is_file(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file.''' + raise NotImplementedError() + + def is_container(self, resource_identifier): + '''Return whether *resource_identifier* refers to a container.''' + raise NotImplementedError() + + def is_sequence(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file sequence.''' + raise NotImplementedError() + + def get_url(self, resource_identifier): + '''Return url for *resource_identifier*.''' + url_string = ( + u'{url}/component/get?id={id}&username={username}' + u'&apiKey={apiKey}' + ) + return url_string.format( + url=self._session.server_url, + id=resource_identifier, + username=self._session.api_user, + apiKey=self._session.api_key + ) + + def get_thumbnail_url(self, resource_identifier, size=None): + '''Return thumbnail url for *resource_identifier*. + + Optionally, specify *size* to constrain the downscaled image to size + x size pixels. + ''' + url_string = ( + u'{url}/component/thumbnail?id={id}&username={username}' + u'&apiKey={apiKey}' + ) + url = url_string.format( + url=self._session.server_url, + id=resource_identifier, + username=self._session.api_user, + apiKey=self._session.api_key + ) + if size: + url += u'&size={0}'.format(size) + + return url diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py new file mode 100644 index 00000000000..719b612f394 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py @@ -0,0 +1,707 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import collections +import copy +import logging +import functools + +import ftrack_api.symbol +import ftrack_api.exception +import ftrack_api.collection +import ftrack_api.inspection +import ftrack_api.operation + +logger = logging.getLogger( + __name__ +) + + +def merge_references(function): + '''Decorator to handle merging of references / collections.''' + + @functools.wraps(function) + def get_value(attribute, entity): + '''Merge the attribute with the local cache.''' + + if attribute.name not in entity._inflated: + # Only merge on first access to avoid + # inflating them multiple times. + + logger.debug( + 'Merging potential new data into attached ' + 'entity for attribute {0}.'.format( + attribute.name + ) + ) + + # Local attributes. + local_value = attribute.get_local_value(entity) + if isinstance( + local_value, + ( + ftrack_api.entity.base.Entity, + ftrack_api.collection.Collection, + ftrack_api.collection.MappedCollectionProxy + ) + ): + logger.debug( + 'Merging local value for attribute {0}.'.format(attribute) + ) + + merged_local_value = entity.session._merge( + local_value, merged=dict() + ) + + if merged_local_value is not local_value: + with entity.session.operation_recording(False): + attribute.set_local_value(entity, merged_local_value) + + # Remote attributes. + remote_value = attribute.get_remote_value(entity) + if isinstance( + remote_value, + ( + ftrack_api.entity.base.Entity, + ftrack_api.collection.Collection, + ftrack_api.collection.MappedCollectionProxy + ) + ): + logger.debug( + 'Merging remote value for attribute {0}.'.format(attribute) + ) + + merged_remote_value = entity.session._merge( + remote_value, merged=dict() + ) + + if merged_remote_value is not remote_value: + attribute.set_remote_value(entity, merged_remote_value) + + entity._inflated.add( + attribute.name + ) + + return function( + attribute, entity + ) + + return get_value + + +class Attributes(object): + '''Collection of properties accessible by name.''' + + def __init__(self, attributes=None): + super(Attributes, self).__init__() + self._data = dict() + if attributes is not None: + for attribute in attributes: + self.add(attribute) + + def add(self, attribute): + '''Add *attribute*.''' + existing = self._data.get(attribute.name, None) + if existing: + raise ftrack_api.exception.NotUniqueError( + 'Attribute with name {0} already added as {1}' + .format(attribute.name, existing) + ) + + self._data[attribute.name] = attribute + + def remove(self, attribute): + '''Remove attribute.''' + self._data.pop(attribute.name) + + def get(self, name): + '''Return attribute by *name*. + + If no attribute matches *name* then return None. + + ''' + return self._data.get(name, None) + + def keys(self): + '''Return list of attribute names.''' + return self._data.keys() + + def __contains__(self, item): + '''Return whether *item* present.''' + if not isinstance(item, Attribute): + return False + + return item.name in self._data + + def __iter__(self): + '''Return iterator over attributes.''' + return self._data.itervalues() + + def __len__(self): + '''Return count of attributes.''' + return len(self._data) + + +class Attribute(object): + '''A name and value pair persisted remotely.''' + + def __init__( + self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True, + computed=False + ): + '''Initialise attribute with *name*. + + *default_value* represents the default value for the attribute. It may + be a callable. It is not used within the attribute when providing + values, but instead exists for other parts of the system to reference. + + If *mutable* is set to False then the local value of the attribute on an + entity can only be set when both the existing local and remote values + are :attr:`ftrack_api.symbol.NOT_SET`. The exception to this is when the + target value is also :attr:`ftrack_api.symbol.NOT_SET`. + + If *computed* is set to True the value is a remote side computed value + and should not be long-term cached. + + ''' + super(Attribute, self).__init__() + self._name = name + self._mutable = mutable + self._computed = computed + self.default_value = default_value + + self._local_key = 'local' + self._remote_key = 'remote' + + def __repr__(self): + '''Return representation of entity.''' + return '<{0}.{1}({2}) object at {3}>'.format( + self.__module__, + self.__class__.__name__, + self.name, + id(self) + ) + + def get_entity_storage(self, entity): + '''Return attribute storage on *entity* creating if missing.''' + storage_key = '_ftrack_attribute_storage' + storage = getattr(entity, storage_key, None) + if storage is None: + storage = collections.defaultdict( + lambda: + { + self._local_key: ftrack_api.symbol.NOT_SET, + self._remote_key: ftrack_api.symbol.NOT_SET + } + ) + setattr(entity, storage_key, storage) + + return storage + + @property + def name(self): + '''Return name.''' + return self._name + + @property + def mutable(self): + '''Return whether attribute is mutable.''' + return self._mutable + + @property + def computed(self): + '''Return whether attribute is computed.''' + return self._computed + + def get_value(self, entity): + '''Return current value for *entity*. + + If a value was set locally then return it, otherwise return last known + remote value. If no remote value yet retrieved, make a request for it + via the session and block until available. + + ''' + value = self.get_local_value(entity) + if value is not ftrack_api.symbol.NOT_SET: + return value + + value = self.get_remote_value(entity) + if value is not ftrack_api.symbol.NOT_SET: + return value + + if not entity.session.auto_populate: + return value + + self.populate_remote_value(entity) + return self.get_remote_value(entity) + + def get_local_value(self, entity): + '''Return locally set value for *entity*.''' + storage = self.get_entity_storage(entity) + return storage[self.name][self._local_key] + + def get_remote_value(self, entity): + '''Return remote value for *entity*. + + .. note:: + + Only return locally stored remote value, do not fetch from remote. + + ''' + storage = self.get_entity_storage(entity) + return storage[self.name][self._remote_key] + + def set_local_value(self, entity, value): + '''Set local *value* for *entity*.''' + if ( + not self.mutable + and self.is_set(entity) + and value is not ftrack_api.symbol.NOT_SET + ): + raise ftrack_api.exception.ImmutableAttributeError(self) + + old_value = self.get_local_value(entity) + + storage = self.get_entity_storage(entity) + storage[self.name][self._local_key] = value + + # Record operation. + if entity.session.record_operations: + entity.session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + entity.entity_type, + ftrack_api.inspection.primary_key(entity), + self.name, + old_value, + value + ) + ) + + def set_remote_value(self, entity, value): + '''Set remote *value*. + + .. note:: + + Only set locally stored remote value, do not persist to remote. + + ''' + storage = self.get_entity_storage(entity) + storage[self.name][self._remote_key] = value + + def populate_remote_value(self, entity): + '''Populate remote value for *entity*.''' + entity.session.populate([entity], self.name) + + def is_modified(self, entity): + '''Return whether local value set and differs from remote. + + .. note:: + + Will not fetch remote value so may report True even when values + are the same on the remote. + + ''' + local_value = self.get_local_value(entity) + remote_value = self.get_remote_value(entity) + return ( + local_value is not ftrack_api.symbol.NOT_SET + and local_value != remote_value + ) + + def is_set(self, entity): + '''Return whether a value is set for *entity*.''' + return any([ + self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, + self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET + ]) + + +class ScalarAttribute(Attribute): + '''Represent a scalar value.''' + + def __init__(self, name, data_type, **kw): + '''Initialise property.''' + super(ScalarAttribute, self).__init__(name, **kw) + self.data_type = data_type + + +class ReferenceAttribute(Attribute): + '''Reference another entity.''' + + def __init__(self, name, entity_type, **kw): + '''Initialise property.''' + super(ReferenceAttribute, self).__init__(name, **kw) + self.entity_type = entity_type + + def populate_remote_value(self, entity): + '''Populate remote value for *entity*. + + As attribute references another entity, use that entity's configured + default projections to auto populate useful attributes when loading. + + ''' + reference_entity_type = entity.session.types[self.entity_type] + default_projections = reference_entity_type.default_projections + + projections = [] + if default_projections: + for projection in default_projections: + projections.append('{0}.{1}'.format(self.name, projection)) + else: + projections.append(self.name) + + entity.session.populate([entity], ', '.join(projections)) + + def is_modified(self, entity): + '''Return whether a local value has been set and differs from remote. + + .. note:: + + Will not fetch remote value so may report True even when values + are the same on the remote. + + ''' + local_value = self.get_local_value(entity) + remote_value = self.get_remote_value(entity) + + if local_value is ftrack_api.symbol.NOT_SET: + return False + + if remote_value is ftrack_api.symbol.NOT_SET: + return True + + if ( + ftrack_api.inspection.identity(local_value) + != ftrack_api.inspection.identity(remote_value) + ): + return True + + return False + + + @merge_references + def get_value(self, entity): + return super(ReferenceAttribute, self).get_value( + entity + ) + +class AbstractCollectionAttribute(Attribute): + '''Base class for collection attributes.''' + + #: Collection class used by attribute. + collection_class = None + + @merge_references + def get_value(self, entity): + '''Return current value for *entity*. + + If a value was set locally then return it, otherwise return last known + remote value. If no remote value yet retrieved, make a request for it + via the session and block until available. + + .. note:: + + As value is a collection that is mutable, will transfer a remote + value into the local value on access if no local value currently + set. + + ''' + super(AbstractCollectionAttribute, self).get_value(entity) + + # Conditionally, copy remote value into local value so that it can be + # mutated without side effects. + local_value = self.get_local_value(entity) + remote_value = self.get_remote_value(entity) + if ( + local_value is ftrack_api.symbol.NOT_SET + and isinstance(remote_value, self.collection_class) + ): + try: + with entity.session.operation_recording(False): + self.set_local_value(entity, copy.copy(remote_value)) + except ftrack_api.exception.ImmutableAttributeError: + pass + + value = self.get_local_value(entity) + + # If the local value is still not set then attempt to set it with a + # suitable placeholder collection so that the caller can interact with + # the collection using its normal interface. This is required for a + # newly created entity for example. It *could* be done as a simple + # default value, but that would incur cost for every collection even + # when they are not modified before commit. + if value is ftrack_api.symbol.NOT_SET: + try: + with entity.session.operation_recording(False): + self.set_local_value( + entity, + # None should be treated as empty collection. + None + ) + except ftrack_api.exception.ImmutableAttributeError: + pass + + return self.get_local_value(entity) + + def set_local_value(self, entity, value): + '''Set local *value* for *entity*.''' + if value is not ftrack_api.symbol.NOT_SET: + value = self._adapt_to_collection(entity, value) + value.mutable = self.mutable + + super(AbstractCollectionAttribute, self).set_local_value(entity, value) + + def set_remote_value(self, entity, value): + '''Set remote *value*. + + .. note:: + + Only set locally stored remote value, do not persist to remote. + + ''' + if value is not ftrack_api.symbol.NOT_SET: + value = self._adapt_to_collection(entity, value) + value.mutable = False + + super(AbstractCollectionAttribute, self).set_remote_value(entity, value) + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to appropriate collection instance for *entity*. + + .. note:: + + If *value* is None then return a suitable empty collection. + + ''' + raise NotImplementedError() + + +class CollectionAttribute(AbstractCollectionAttribute): + '''Represent a collection of other entities.''' + + #: Collection class used by attribute. + collection_class = ftrack_api.collection.Collection + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to a Collection instance on *entity*.''' + + if not isinstance(value, ftrack_api.collection.Collection): + + if value is None: + value = ftrack_api.collection.Collection(entity, self) + + elif isinstance(value, list): + value = ftrack_api.collection.Collection( + entity, self, data=value + ) + + else: + raise NotImplementedError( + 'Cannot convert {0!r} to collection.'.format(value) + ) + + else: + if value.attribute is not self: + raise ftrack_api.exception.AttributeError( + 'Collection already bound to a different attribute' + ) + + return value + + +class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): + '''Represent a mapped key, value collection of entities.''' + + #: Collection class used by attribute. + collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy + + def __init__( + self, name, creator, key_attribute, value_attribute, **kw + ): + '''Initialise attribute with *name*. + + *creator* should be a function that accepts a dictionary of data and + is used by the referenced collection to create new entities in the + collection. + + *key_attribute* should be the name of the attribute on an entity in + the collection that represents the value for 'key' of the dictionary. + + *value_attribute* should be the name of the attribute on an entity in + the collection that represents the value for 'value' of the dictionary. + + ''' + self.creator = creator + self.key_attribute = key_attribute + self.value_attribute = value_attribute + + super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to an *entity*.''' + if not isinstance( + value, ftrack_api.collection.KeyValueMappedCollectionProxy + ): + + if value is None: + value = ftrack_api.collection.KeyValueMappedCollectionProxy( + ftrack_api.collection.Collection(entity, self), + self.creator, self.key_attribute, + self.value_attribute + ) + + elif isinstance(value, (list, ftrack_api.collection.Collection)): + + if isinstance(value, list): + value = ftrack_api.collection.Collection( + entity, self, data=value + ) + + value = ftrack_api.collection.KeyValueMappedCollectionProxy( + value, self.creator, self.key_attribute, + self.value_attribute + ) + + elif isinstance(value, collections.Mapping): + # Convert mapping. + # TODO: When backend model improves, revisit this logic. + # First get existing value and delete all references. This is + # needed because otherwise they will not be automatically + # removed server side. + # The following should not cause recursion as the internal + # values should be mapped collections already. + current_value = self.get_value(entity) + if not isinstance( + current_value, + ftrack_api.collection.KeyValueMappedCollectionProxy + ): + raise NotImplementedError( + 'Cannot adapt mapping to collection as current value ' + 'type is not a KeyValueMappedCollectionProxy.' + ) + + # Create the new collection using the existing collection as + # basis. Then update through proxy interface to ensure all + # internal operations called consistently (such as entity + # deletion for key removal). + collection = ftrack_api.collection.Collection( + entity, self, data=current_value.collection[:] + ) + collection_proxy = ( + ftrack_api.collection.KeyValueMappedCollectionProxy( + collection, self.creator, + self.key_attribute, self.value_attribute + ) + ) + + # Remove expired keys from collection. + expired_keys = set(current_value.keys()) - set(value.keys()) + for key in expired_keys: + del collection_proxy[key] + + # Set new values for existing keys / add new keys. + for key, value in value.items(): + collection_proxy[key] = value + + value = collection_proxy + + else: + raise NotImplementedError( + 'Cannot convert {0!r} to collection.'.format(value) + ) + else: + if value.attribute is not self: + raise ftrack_api.exception.AttributeError( + 'Collection already bound to a different attribute.' + ) + + return value + + +class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): + '''Represent a mapped custom attribute collection of entities.''' + + #: Collection class used by attribute. + collection_class = ( + ftrack_api.collection.CustomAttributeCollectionProxy + ) + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to an *entity*.''' + if not isinstance( + value, ftrack_api.collection.CustomAttributeCollectionProxy + ): + + if value is None: + value = ftrack_api.collection.CustomAttributeCollectionProxy( + ftrack_api.collection.Collection(entity, self) + ) + + elif isinstance(value, (list, ftrack_api.collection.Collection)): + + # Why are we creating a new if it is a list? This will cause + # any merge to create a new proxy and collection. + if isinstance(value, list): + value = ftrack_api.collection.Collection( + entity, self, data=value + ) + + value = ftrack_api.collection.CustomAttributeCollectionProxy( + value + ) + + elif isinstance(value, collections.Mapping): + # Convert mapping. + # TODO: When backend model improves, revisit this logic. + # First get existing value and delete all references. This is + # needed because otherwise they will not be automatically + # removed server side. + # The following should not cause recursion as the internal + # values should be mapped collections already. + current_value = self.get_value(entity) + if not isinstance( + current_value, + ftrack_api.collection.CustomAttributeCollectionProxy + ): + raise NotImplementedError( + 'Cannot adapt mapping to collection as current value ' + 'type is not a MappedCollectionProxy.' + ) + + # Create the new collection using the existing collection as + # basis. Then update through proxy interface to ensure all + # internal operations called consistently (such as entity + # deletion for key removal). + collection = ftrack_api.collection.Collection( + entity, self, data=current_value.collection[:] + ) + collection_proxy = ( + ftrack_api.collection.CustomAttributeCollectionProxy( + collection + ) + ) + + # Remove expired keys from collection. + expired_keys = set(current_value.keys()) - set(value.keys()) + for key in expired_keys: + del collection_proxy[key] + + # Set new values for existing keys / add new keys. + for key, value in value.items(): + collection_proxy[key] = value + + value = collection_proxy + + else: + raise NotImplementedError( + 'Cannot convert {0!r} to collection.'.format(value) + ) + else: + if value.attribute is not self: + raise ftrack_api.exception.AttributeError( + 'Collection already bound to a different attribute.' + ) + + return value diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py new file mode 100644 index 00000000000..49456dc2d79 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py @@ -0,0 +1,579 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +'''Caching framework. + +Defines a standardised :class:`Cache` interface for storing data against +specific keys. Key generation is also standardised using a :class:`KeyMaker` +interface. + +Combining a Cache and KeyMaker allows for memoisation of function calls with +respect to the arguments used by using a :class:`Memoiser`. + +As a convenience a simple :func:`memoise` decorator is included for quick +memoisation of function using a global cache and standard key maker. + +''' + +import collections +import functools +import abc +import copy +import inspect +import re +import anydbm +import contextlib +try: + import cPickle as pickle +except ImportError: # pragma: no cover + import pickle + +import ftrack_api.inspection +import ftrack_api.symbol + + +class Cache(object): + '''Cache interface. + + Derive from this to define concrete cache implementations. A cache is + centered around the concept of key:value pairings where the key is unique + across the cache. + + ''' + + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + + @abc.abstractmethod + def set(self, key, value): + '''Set *value* for *key*.''' + + @abc.abstractmethod + def remove(self, key): + '''Remove *key* and return stored value. + + Raise :exc:`KeyError` if *key* not found. + + ''' + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + raise NotImplementedError() # pragma: no cover + + def values(self): + '''Return values for current keys.''' + values = [] + for key in self.keys(): + try: + value = self.get(key) + except KeyError: + continue + else: + values.append(value) + + return values + + def clear(self, pattern=None): + '''Remove all keys matching *pattern*. + + *pattern* should be a regular expression string. + + If *pattern* is None then all keys will be removed. + + ''' + if pattern is not None: + pattern = re.compile(pattern) + + for key in self.keys(): + if pattern is not None: + if not pattern.search(key): + continue + + try: + self.remove(key) + except KeyError: + pass + + +class ProxyCache(Cache): + '''Proxy another cache.''' + + def __init__(self, proxied): + '''Initialise cache with *proxied* cache instance.''' + self.proxied = proxied + super(ProxyCache, self).__init__() + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + return self.proxied.get(key) + + def set(self, key, value): + '''Set *value* for *key*.''' + return self.proxied.set(key, value) + + def remove(self, key): + '''Remove *key* and return stored value. + + Raise :exc:`KeyError` if *key* not found. + + ''' + return self.proxied.remove(key) + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + return self.proxied.keys() + + +class LayeredCache(Cache): + '''Layered cache.''' + + def __init__(self, caches): + '''Initialise cache with *caches*.''' + super(LayeredCache, self).__init__() + self.caches = caches + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + Attempt to retrieve from cache layers in turn, starting with shallowest. + If value retrieved, then also set the value in each higher level cache + up from where retrieved. + + ''' + target_caches = [] + value = ftrack_api.symbol.NOT_SET + + for cache in self.caches: + try: + value = cache.get(key) + except KeyError: + target_caches.append(cache) + continue + else: + break + + if value is ftrack_api.symbol.NOT_SET: + raise KeyError(key) + + # Set value on all higher level caches. + for cache in target_caches: + cache.set(key, value) + + return value + + def set(self, key, value): + '''Set *value* for *key*.''' + for cache in self.caches: + cache.set(key, value) + + def remove(self, key): + '''Remove *key*. + + Raise :exc:`KeyError` if *key* not found in any layer. + + ''' + removed = False + for cache in self.caches: + try: + cache.remove(key) + except KeyError: + pass + else: + removed = True + + if not removed: + raise KeyError(key) + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + keys = [] + for cache in self.caches: + keys.extend(cache.keys()) + + return list(set(keys)) + + +class MemoryCache(Cache): + '''Memory based cache.''' + + def __init__(self): + '''Initialise cache.''' + self._cache = {} + super(MemoryCache, self).__init__() + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + return self._cache[key] + + def set(self, key, value): + '''Set *value* for *key*.''' + self._cache[key] = value + + def remove(self, key): + '''Remove *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + del self._cache[key] + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + return self._cache.keys() + + +class FileCache(Cache): + '''File based cache that uses :mod:`anydbm` module. + + .. note:: + + No locking of the underlying file is performed. + + ''' + + def __init__(self, path): + '''Initialise cache at *path*.''' + self.path = path + + # Initialise cache. + cache = anydbm.open(self.path, 'c') + cache.close() + + super(FileCache, self).__init__() + + @contextlib.contextmanager + def _database(self): + '''Yield opened database file.''' + cache = anydbm.open(self.path, 'w') + try: + yield cache + finally: + cache.close() + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + with self._database() as cache: + return cache[key] + + def set(self, key, value): + '''Set *value* for *key*.''' + with self._database() as cache: + cache[key] = value + + def remove(self, key): + '''Remove *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + with self._database() as cache: + del cache[key] + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + with self._database() as cache: + return cache.keys() + + +class SerialisedCache(ProxyCache): + '''Proxied cache that stores values as serialised data.''' + + def __init__(self, proxied, encode=None, decode=None): + '''Initialise cache with *encode* and *decode* callables. + + *proxied* is the underlying cache to use for storage. + + ''' + self.encode = encode + self.decode = decode + super(SerialisedCache, self).__init__(proxied) + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + value = super(SerialisedCache, self).get(key) + if self.decode: + value = self.decode(value) + + return value + + def set(self, key, value): + '''Set *value* for *key*.''' + if self.encode: + value = self.encode(value) + + super(SerialisedCache, self).set(key, value) + + +class KeyMaker(object): + '''Generate unique keys.''' + + __metaclass__ = abc.ABCMeta + + def __init__(self): + '''Initialise key maker.''' + super(KeyMaker, self).__init__() + self.item_separator = '' + + def key(self, *items): + '''Return key for *items*.''' + keys = [] + for item in items: + keys.append(self._key(item)) + + return self.item_separator.join(keys) + + @abc.abstractmethod + def _key(self, obj): + '''Return key for *obj*.''' + + +class StringKeyMaker(KeyMaker): + '''Generate string key.''' + + def _key(self, obj): + '''Return key for *obj*.''' + return str(obj) + + +class ObjectKeyMaker(KeyMaker): + '''Generate unique keys for objects.''' + + def __init__(self): + '''Initialise key maker.''' + super(ObjectKeyMaker, self).__init__() + self.item_separator = '\0' + self.mapping_identifier = '\1' + self.mapping_pair_separator = '\2' + self.iterable_identifier = '\3' + self.name_identifier = '\4' + + def _key(self, item): + '''Return key for *item*. + + Returned key will be a pickle like string representing the *item*. This + allows for typically non-hashable objects to be used in key generation + (such as dictionaries). + + If *item* is iterable then each item in it shall also be passed to this + method to ensure correct key generation. + + Special markers are used to distinguish handling of specific cases in + order to ensure uniqueness of key corresponds directly to *item*. + + Example:: + + >>> key_maker = ObjectKeyMaker() + >>> def add(x, y): + ... "Return sum of *x* and *y*." + ... return x + y + ... + >>> key_maker.key(add, (1, 2)) + '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' + >>> key_maker.key(add, (1, 3)) + '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' + + ''' + # TODO: Consider using a more robust and comprehensive solution such as + # dill (https://github.com/uqfoundation/dill). + if isinstance(item, collections.Iterable): + if isinstance(item, basestring): + return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) + + if isinstance(item, collections.Mapping): + contents = self.item_separator.join([ + ( + self._key(key) + + self.mapping_pair_separator + + self._key(value) + ) + for key, value in sorted(item.items()) + ]) + return ( + self.mapping_identifier + + contents + + self.mapping_identifier + ) + + else: + contents = self.item_separator.join([ + self._key(item) for item in item + ]) + return ( + self.iterable_identifier + + contents + + self.iterable_identifier + ) + + elif inspect.ismethod(item): + return ''.join(( + self.name_identifier, + item.__name__, + self.item_separator, + item.im_class.__name__, + self.item_separator, + item.__module__ + )) + + elif inspect.isfunction(item) or inspect.isclass(item): + return ''.join(( + self.name_identifier, + item.__name__, + self.item_separator, + item.__module__ + )) + + elif inspect.isbuiltin(item): + return self.name_identifier + item.__name__ + + else: + return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) + + +class Memoiser(object): + '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. + + Example:: + + >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) + >>> def add(x, y): + ... "Return sum of *x* and *y*." + ... print 'Called' + ... return x + y + ... + >>> memoiser.call(add, (1, 2), {}) + Called + >>> memoiser.call(add, (1, 2), {}) + >>> memoiser.call(add, (1, 3), {}) + Called + + ''' + + def __init__(self, cache=None, key_maker=None, return_copies=True): + '''Initialise with *cache* and *key_maker* to use. + + If *cache* is not specified a default :class:`MemoryCache` will be + used. Similarly, if *key_maker* is not specified a default + :class:`ObjectKeyMaker` will be used. + + If *return_copies* is True then all results returned from the cache will + be deep copies to avoid indirect mutation of cached values. + + ''' + self.cache = cache + if self.cache is None: + self.cache = MemoryCache() + + self.key_maker = key_maker + if self.key_maker is None: + self.key_maker = ObjectKeyMaker() + + self.return_copies = return_copies + super(Memoiser, self).__init__() + + def call(self, function, args=None, kw=None): + '''Call *function* with *args* and *kw* and return result. + + If *function* was previously called with exactly the same arguments + then return cached result if available. + + Store result for call in cache. + + ''' + if args is None: + args = () + + if kw is None: + kw = {} + + # Support arguments being passed as positionals or keywords. + arguments = inspect.getcallargs(function, *args, **kw) + + key = self.key_maker.key(function, arguments) + try: + value = self.cache.get(key) + + except KeyError: + value = function(*args, **kw) + self.cache.set(key, value) + + # If requested, deep copy value to return in order to avoid cached value + # being inadvertently altered by the caller. + if self.return_copies: + value = copy.deepcopy(value) + + return value + + +def memoise_decorator(memoiser): + '''Decorator to memoise function calls using *memoiser*.''' + def outer(function): + + @functools.wraps(function) + def inner(*args, **kw): + return memoiser.call(function, args, kw) + + return inner + + return outer + + +#: Default memoiser. +memoiser = Memoiser() + +#: Default memoise decorator using standard cache and key maker. +memoise = memoise_decorator(memoiser) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py new file mode 100644 index 00000000000..91655a7b022 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py @@ -0,0 +1,507 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import logging + +import collections +import copy + +import ftrack_api.exception +import ftrack_api.inspection +import ftrack_api.symbol +import ftrack_api.operation +import ftrack_api.cache +from ftrack_api.logging import LazyLogMessage as L + + +class Collection(collections.MutableSequence): + '''A collection of entities.''' + + def __init__(self, entity, attribute, mutable=True, data=None): + '''Initialise collection.''' + self.entity = entity + self.attribute = attribute + self._data = [] + self._identities = set() + + # Set initial dataset. + # Note: For initialisation, immutability is deferred till after initial + # population as otherwise there would be no public way to initialise an + # immutable collection. The reason self._data is not just set directly + # is to ensure other logic can be applied without special handling. + self.mutable = True + try: + if data is None: + data = [] + + with self.entity.session.operation_recording(False): + self.extend(data) + finally: + self.mutable = mutable + + def _identity_key(self, entity): + '''Return identity key for *entity*.''' + return str(ftrack_api.inspection.identity(entity)) + + def __copy__(self): + '''Return shallow copy. + + .. note:: + + To maintain expectations on usage, the shallow copy will include a + shallow copy of the underlying data store. + + ''' + cls = self.__class__ + copied_instance = cls.__new__(cls) + copied_instance.__dict__.update(self.__dict__) + copied_instance._data = copy.copy(self._data) + copied_instance._identities = copy.copy(self._identities) + + return copied_instance + + def _notify(self, old_value): + '''Notify about modification.''' + # Record operation. + if self.entity.session.record_operations: + self.entity.session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + self.entity.entity_type, + ftrack_api.inspection.primary_key(self.entity), + self.attribute.name, + old_value, + self + ) + ) + + def insert(self, index, item): + '''Insert *item* at *index*.''' + if not self.mutable: + raise ftrack_api.exception.ImmutableCollectionError(self) + + if item in self: + raise ftrack_api.exception.DuplicateItemInCollectionError( + item, self + ) + + old_value = copy.copy(self) + self._data.insert(index, item) + self._identities.add(self._identity_key(item)) + self._notify(old_value) + + def __contains__(self, value): + '''Return whether *value* present in collection.''' + return self._identity_key(value) in self._identities + + def __getitem__(self, index): + '''Return item at *index*.''' + return self._data[index] + + def __setitem__(self, index, item): + '''Set *item* against *index*.''' + if not self.mutable: + raise ftrack_api.exception.ImmutableCollectionError(self) + + try: + existing_index = self.index(item) + except ValueError: + pass + else: + if index != existing_index: + raise ftrack_api.exception.DuplicateItemInCollectionError( + item, self + ) + + old_value = copy.copy(self) + try: + existing_item = self._data[index] + except IndexError: + pass + else: + self._identities.remove(self._identity_key(existing_item)) + + self._data[index] = item + self._identities.add(self._identity_key(item)) + self._notify(old_value) + + def __delitem__(self, index): + '''Remove item at *index*.''' + if not self.mutable: + raise ftrack_api.exception.ImmutableCollectionError(self) + + old_value = copy.copy(self) + item = self._data[index] + del self._data[index] + self._identities.remove(self._identity_key(item)) + self._notify(old_value) + + def __len__(self): + '''Return count of items.''' + return len(self._data) + + def __eq__(self, other): + '''Return whether this collection is equal to *other*.''' + if not isinstance(other, Collection): + return False + + return sorted(self._identities) == sorted(other._identities) + + def __ne__(self, other): + '''Return whether this collection is not equal to *other*.''' + return not self == other + + +class MappedCollectionProxy(collections.MutableMapping): + '''Common base class for mapped collection of entities.''' + + def __init__(self, collection): + '''Initialise proxy for *collection*.''' + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.collection = collection + super(MappedCollectionProxy, self).__init__() + + def __copy__(self): + '''Return shallow copy. + + .. note:: + + To maintain expectations on usage, the shallow copy will include a + shallow copy of the underlying collection. + + ''' + cls = self.__class__ + copied_instance = cls.__new__(cls) + copied_instance.__dict__.update(self.__dict__) + copied_instance.collection = copy.copy(self.collection) + + return copied_instance + + @property + def mutable(self): + '''Return whether collection is mutable.''' + return self.collection.mutable + + @mutable.setter + def mutable(self, value): + '''Set whether collection is mutable to *value*.''' + self.collection.mutable = value + + @property + def attribute(self): + '''Return attribute bound to.''' + return self.collection.attribute + + @attribute.setter + def attribute(self, value): + '''Set bound attribute to *value*.''' + self.collection.attribute = value + + +class KeyValueMappedCollectionProxy(MappedCollectionProxy): + '''A mapped collection of key, value entities. + + Proxy a standard :class:`Collection` as a mapping where certain attributes + from the entities in the collection are mapped to key, value pairs. + + For example:: + + >>> collection = [Metadata(key='foo', value='bar'), ...] + >>> mapped = KeyValueMappedCollectionProxy( + ... collection, create_metadata, + ... key_attribute='key', value_attribute='value' + ... ) + >>> print mapped['foo'] + 'bar' + >>> mapped['bam'] = 'biz' + >>> print mapped.collection[-1] + Metadata(key='bam', value='biz') + + ''' + + def __init__( + self, collection, creator, key_attribute, value_attribute + ): + '''Initialise collection.''' + self.creator = creator + self.key_attribute = key_attribute + self.value_attribute = value_attribute + super(KeyValueMappedCollectionProxy, self).__init__(collection) + + def _get_entity_by_key(self, key): + '''Return entity instance with matching *key* from collection.''' + for entity in self.collection: + if entity[self.key_attribute] == key: + return entity + + raise KeyError(key) + + def __getitem__(self, key): + '''Return value for *key*.''' + entity = self._get_entity_by_key(key) + return entity[self.value_attribute] + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + try: + entity = self._get_entity_by_key(key) + except KeyError: + data = { + self.key_attribute: key, + self.value_attribute: value + } + entity = self.creator(self, data) + + if ( + ftrack_api.inspection.state(entity) is + ftrack_api.symbol.CREATED + ): + # Persisting this entity will be handled here, record the + # operation. + self.collection.append(entity) + + else: + # The entity is created and persisted separately by the + # creator. Do not record this operation. + with self.collection.entity.session.operation_recording(False): + # Do not record this operation since it will trigger + # redudant and potentially failing operations. + self.collection.append(entity) + + else: + entity[self.value_attribute] = value + + def __delitem__(self, key): + '''Remove and delete *key*. + + .. note:: + + The associated entity will be deleted as well. + + ''' + for index, entity in enumerate(self.collection): + if entity[self.key_attribute] == key: + break + else: + raise KeyError(key) + + del self.collection[index] + entity.session.delete(entity) + + def __iter__(self): + '''Iterate over all keys.''' + keys = set() + for entity in self.collection: + keys.add(entity[self.key_attribute]) + + return iter(keys) + + def __len__(self): + '''Return count of keys.''' + keys = set() + for entity in self.collection: + keys.add(entity[self.key_attribute]) + + return len(keys) + + +class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): + '''Generate key for session.''' + + def _key(self, obj): + '''Return key for *obj*.''' + if isinstance(obj, dict): + session = obj.get('session') + if session is not None: + # Key by session only. + return str(id(session)) + + return str(obj) + + +#: Memoiser for use with callables that should be called once per session. +memoise_session = ftrack_api.cache.memoise_decorator( + ftrack_api.cache.Memoiser( + key_maker=PerSessionDefaultKeyMaker(), return_copies=False + ) +) + + +@memoise_session +def _get_custom_attribute_configurations(session): + '''Return list of custom attribute configurations. + + The configuration objects will have key, project_id, id and object_type_id + populated. + + ''' + return session.query( + 'select key, project_id, id, object_type_id, entity_type from ' + 'CustomAttributeConfiguration' + ).all() + + +class CustomAttributeCollectionProxy(MappedCollectionProxy): + '''A mapped collection of custom attribute value entities.''' + + def __init__( + self, collection + ): + '''Initialise collection.''' + self.key_attribute = 'configuration_id' + self.value_attribute = 'value' + super(CustomAttributeCollectionProxy, self).__init__(collection) + + def _get_entity_configurations(self): + '''Return all configurations for current collection entity.''' + entity = self.collection.entity + entity_type = None + project_id = None + object_type_id = None + + if 'object_type_id' in entity.keys(): + project_id = entity['project_id'] + entity_type = 'task' + object_type_id = entity['object_type_id'] + + if entity.entity_type == 'AssetVersion': + project_id = entity['asset']['parent']['project_id'] + entity_type = 'assetversion' + + if entity.entity_type == 'Asset': + project_id = entity['parent']['project_id'] + entity_type = 'asset' + + if entity.entity_type == 'Project': + project_id = entity['id'] + entity_type = 'show' + + if entity.entity_type == 'User': + entity_type = 'user' + + if entity_type is None: + raise ValueError( + 'Entity {!r} not supported.'.format(entity) + ) + + configurations = [] + for configuration in _get_custom_attribute_configurations( + entity.session + ): + if ( + configuration['entity_type'] == entity_type and + configuration['project_id'] in (project_id, None) and + configuration['object_type_id'] == object_type_id + ): + configurations.append(configuration) + + # Return with global configurations at the end of the list. This is done + # so that global conigurations are shadowed by project specific if the + # configurations list is looped when looking for a matching `key`. + return sorted( + configurations, key=lambda item: item['project_id'] is None + ) + + def _get_keys(self): + '''Return a list of all keys.''' + keys = [] + for configuration in self._get_entity_configurations(): + keys.append(configuration['key']) + + return keys + + def _get_entity_by_key(self, key): + '''Return entity instance with matching *key* from collection.''' + configuration_id = self.get_configuration_id_from_key(key) + for entity in self.collection: + if entity[self.key_attribute] == configuration_id: + return entity + + return None + + def get_configuration_id_from_key(self, key): + '''Return id of configuration with matching *key*. + + Raise :exc:`KeyError` if no configuration with matching *key* found. + + ''' + for configuration in self._get_entity_configurations(): + if key == configuration['key']: + return configuration['id'] + + raise KeyError(key) + + def __getitem__(self, key): + '''Return value for *key*.''' + entity = self._get_entity_by_key(key) + + if entity: + return entity[self.value_attribute] + + for configuration in self._get_entity_configurations(): + if configuration['key'] == key: + return configuration['default'] + + raise KeyError(key) + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + custom_attribute_value = self._get_entity_by_key(key) + + if custom_attribute_value: + custom_attribute_value[self.value_attribute] = value + else: + entity = self.collection.entity + session = entity.session + data = { + self.key_attribute: self.get_configuration_id_from_key(key), + self.value_attribute: value, + 'entity_id': entity['id'] + } + + # Make sure to use the currently active collection. This is + # necessary since a merge might have replaced the current one. + self.collection.entity['custom_attributes'].collection.append( + session.create('CustomAttributeValue', data) + ) + + def __delitem__(self, key): + '''Remove and delete *key*. + + .. note:: + + The associated entity will be deleted as well. + + ''' + custom_attribute_value = self._get_entity_by_key(key) + + if custom_attribute_value: + index = self.collection.index(custom_attribute_value) + del self.collection[index] + + custom_attribute_value.session.delete(custom_attribute_value) + else: + self.logger.warning(L( + 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', + key, self.collection.entity + )) + + def __eq__(self, collection): + '''Return True if *collection* equals proxy collection.''' + if collection is ftrack_api.symbol.NOT_SET: + return False + + return collection.collection == self.collection + + def __iter__(self): + '''Iterate over all keys.''' + keys = self._get_keys() + return iter(keys) + + def __len__(self): + '''Return count of keys.''' + keys = self._get_keys() + return len(keys) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py new file mode 100644 index 00000000000..1802e380c05 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py @@ -0,0 +1,119 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +import os +from abc import ABCMeta, abstractmethod +import tempfile + + +class Data(object): + '''File-like object for manipulating data.''' + + __metaclass__ = ABCMeta + + def __init__(self): + '''Initialise data access.''' + self.closed = False + + @abstractmethod + def read(self, limit=None): + '''Return content from current position up to *limit*.''' + + @abstractmethod + def write(self, content): + '''Write content at current position.''' + + def flush(self): + '''Flush buffers ensuring data written.''' + + def seek(self, offset, whence=os.SEEK_SET): + '''Move internal pointer by *offset*. + + The *whence* argument is optional and defaults to os.SEEK_SET or 0 + (absolute file positioning); other values are os.SEEK_CUR or 1 + (seek relative to the current position) and os.SEEK_END or 2 + (seek relative to the file's end). + + ''' + raise NotImplementedError('Seek not supported.') + + def tell(self): + '''Return current position of internal pointer.''' + raise NotImplementedError('Tell not supported.') + + def close(self): + '''Flush buffers and prevent further access.''' + self.flush() + self.closed = True + + +class FileWrapper(Data): + '''Data wrapper for Python file objects.''' + + def __init__(self, wrapped_file): + '''Initialise access to *wrapped_file*.''' + self.wrapped_file = wrapped_file + self._read_since_last_write = False + super(FileWrapper, self).__init__() + + def read(self, limit=None): + '''Return content from current position up to *limit*.''' + self._read_since_last_write = True + + if limit is None: + limit = -1 + + return self.wrapped_file.read(limit) + + def write(self, content): + '''Write content at current position.''' + if self._read_since_last_write: + # Windows requires a seek before switching from read to write. + self.seek(self.tell()) + + self.wrapped_file.write(content) + self._read_since_last_write = False + + def flush(self): + '''Flush buffers ensuring data written.''' + super(FileWrapper, self).flush() + if hasattr(self.wrapped_file, 'flush'): + self.wrapped_file.flush() + + def seek(self, offset, whence=os.SEEK_SET): + '''Move internal pointer by *offset*.''' + self.wrapped_file.seek(offset, whence) + + def tell(self): + '''Return current position of internal pointer.''' + return self.wrapped_file.tell() + + def close(self): + '''Flush buffers and prevent further access.''' + if not self.closed: + super(FileWrapper, self).close() + if hasattr(self.wrapped_file, 'close'): + self.wrapped_file.close() + + +class File(FileWrapper): + '''Data wrapper accepting filepath.''' + + def __init__(self, path, mode='rb'): + '''Open file at *path* with *mode*.''' + file_object = open(path, mode) + super(File, self).__init__(file_object) + + +class String(FileWrapper): + '''Data wrapper using TemporaryFile instance.''' + + def __init__(self, content=None): + '''Initialise data with *content*.''' + super(String, self).__init__( + tempfile.TemporaryFile() + ) + + if content is not None: + self.wrapped_file.write(content) + self.wrapped_file.seek(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py new file mode 100644 index 00000000000..1d452f2828f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py new file mode 100644 index 00000000000..859d94e4360 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py @@ -0,0 +1,91 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class AssetVersion(ftrack_api.entity.base.Entity): + '''Represent asset version.''' + + def create_component( + self, path, data=None, location=None + ): + '''Create a new component from *path* with additional *data* + + .. note:: + + This is a helper method. To create components manually use the + standard :meth:`Session.create` method. + + *path* can be a string representing a filesystem path to the data to + use for the component. The *path* can also be specified as a sequence + string, in which case a sequence component with child components for + each item in the sequence will be created automatically. The accepted + format for a sequence is '{head}{padding}{tail} [{ranges}]'. For + example:: + + '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' + + .. seealso:: + + `Clique documentation `_ + + *data* should be a dictionary of any additional data to construct the + component with (as passed to :meth:`Session.create`). This version is + automatically set as the component's version. + + If *location* is specified then automatically add component to that + location. + + ''' + if data is None: + data = {} + + data.pop('version_id', None) + data['version'] = self + + return self.session.create_component(path, data=data, location=location) + + def encode_media(self, media, keep_original='auto'): + '''Return a new Job that encode *media* to make it playable in browsers. + + *media* can be a path to a file or a FileComponent in the ftrack.server + location. + + The job will encode *media* based on the file type and job data contains + information about encoding in the following format:: + + { + 'output': [{ + 'format': 'video/mp4', + 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' + }, { + 'format': 'image/jpeg', + 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' + }], + 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', + 'keep_original': True + } + + The output components are associated with the job via the job_components + relation. + + An image component will always be generated if possible, and will be + set as the version's thumbnail. + + The new components will automatically be associated with the version. + A server version of 3.3.32 or higher is required for this to function + properly. + + If *media* is a file path, a new source component will be created and + added to the ftrack server location and a call to :meth:`commit` will be + issued. If *media* is a FileComponent, it will be assumed to be in + available in the ftrack.server location. + + If *keep_original* is not set, the original media will be kept if it + is a FileComponent, and deleted if it is a file path. You can specify + True or False to change this behavior. + ''' + return self.session.encode_media( + media, version_id=self['id'], keep_original=keep_original + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py new file mode 100644 index 00000000000..f5a1a3cec35 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py @@ -0,0 +1,402 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import abc +import collections +import logging + +import ftrack_api.symbol +import ftrack_api.attribute +import ftrack_api.inspection +import ftrack_api.exception +import ftrack_api.operation +from ftrack_api.logging import LazyLogMessage as L + + +class DynamicEntityTypeMetaclass(abc.ABCMeta): + '''Custom metaclass to customise representation of dynamic classes. + + .. note:: + + Derive from same metaclass as derived bases to avoid conflicts. + + ''' + def __repr__(self): + '''Return representation of class.''' + return ''.format(self.__name__) + + +class Entity(collections.MutableMapping): + '''Base class for all entities.''' + + __metaclass__ = DynamicEntityTypeMetaclass + + entity_type = 'Entity' + attributes = None + primary_key_attributes = None + default_projections = None + + def __init__(self, session, data=None, reconstructing=False): + '''Initialise entity. + + *session* is an instance of :class:`ftrack_api.session.Session` that + this entity instance is bound to. + + *data* is a mapping of key, value pairs to apply as initial attribute + values. + + *reconstructing* indicates whether this entity is being reconstructed, + such as from a query, and therefore should not have any special creation + logic applied, such as initialising defaults for missing data. + + ''' + super(Entity, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.session = session + self._inflated = set() + + if data is None: + data = {} + + self.logger.debug(L( + '{0} entity from {1!r}.', + ('Reconstructing' if reconstructing else 'Constructing'), data + )) + + self._ignore_data_keys = ['__entity_type__'] + if not reconstructing: + self._construct(data) + else: + self._reconstruct(data) + + def _construct(self, data): + '''Construct from *data*.''' + # Suspend operation recording so that all modifications can be applied + # in single create operation. In addition, recording a modification + # operation requires a primary key which may not be available yet. + + relational_attributes = dict() + + with self.session.operation_recording(False): + # Set defaults for any unset local attributes. + for attribute in self.__class__.attributes: + if attribute.name not in data: + default_value = attribute.default_value + if callable(default_value): + default_value = default_value(self) + + attribute.set_local_value(self, default_value) + + + # Data represents locally set values. + for key, value in data.items(): + if key in self._ignore_data_keys: + continue + + attribute = self.__class__.attributes.get(key) + if attribute is None: + self.logger.debug(L( + 'Cannot populate {0!r} attribute as no such ' + 'attribute found on entity {1!r}.', key, self + )) + continue + + if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): + relational_attributes.setdefault( + attribute, value + ) + + else: + attribute.set_local_value(self, value) + + # Record create operation. + # Note: As this operation is recorded *before* any Session.merge takes + # place there is the possibility that the operation will hold references + # to outdated data in entity_data. However, this would be unusual in + # that it would mean the same new entity was created twice and only one + # altered. Conversely, if this operation were recorded *after* + # Session.merge took place, any cache would not be able to determine + # the status of the entity, which could be important if the cache should + # not store newly created entities that have not yet been persisted. Out + # of these two 'evils' this approach is deemed the lesser at this time. + # A third, more involved, approach to satisfy both might be to record + # the operation with a PENDING entity_data value and then update with + # merged values post merge. + if self.session.record_operations: + entity_data = {} + + # Lower level API used here to avoid including any empty + # collections that are automatically generated on access. + for attribute in self.attributes: + value = attribute.get_local_value(self) + if value is not ftrack_api.symbol.NOT_SET: + entity_data[attribute.name] = value + + self.session.recorded_operations.push( + ftrack_api.operation.CreateEntityOperation( + self.entity_type, + ftrack_api.inspection.primary_key(self), + entity_data + ) + ) + + for attribute, value in relational_attributes.items(): + # Finally we set values for "relational" attributes, we need + # to do this at the end in order to get the create operations + # in the correct order as the newly created attributes might + # contain references to the newly created entity. + + attribute.set_local_value( + self, value + ) + + def _reconstruct(self, data): + '''Reconstruct from *data*.''' + # Data represents remote values. + for key, value in data.items(): + if key in self._ignore_data_keys: + continue + + attribute = self.__class__.attributes.get(key) + if attribute is None: + self.logger.debug(L( + 'Cannot populate {0!r} attribute as no such attribute ' + 'found on entity {1!r}.', key, self + )) + continue + + attribute.set_remote_value(self, value) + + def __repr__(self): + '''Return representation of instance.''' + return ''.format( + self.__class__.__name__, id(self) + ) + + def __str__(self): + '''Return string representation of instance.''' + with self.session.auto_populating(False): + primary_key = ['Unknown'] + try: + primary_key = ftrack_api.inspection.primary_key(self).values() + except KeyError: + pass + + return '<{0}({1})>'.format( + self.__class__.__name__, ', '.join(primary_key) + ) + + def __hash__(self): + '''Return hash representing instance.''' + return hash(str(ftrack_api.inspection.identity(self))) + + def __eq__(self, other): + '''Return whether *other* is equal to this instance. + + .. note:: + + Equality is determined by both instances having the same identity. + Values of attributes are not considered. + + ''' + try: + return ( + ftrack_api.inspection.identity(other) + == ftrack_api.inspection.identity(self) + ) + except (AttributeError, KeyError): + return False + + def __getitem__(self, key): + '''Return attribute value for *key*.''' + attribute = self.__class__.attributes.get(key) + if attribute is None: + raise KeyError(key) + + return attribute.get_value(self) + + def __setitem__(self, key, value): + '''Set attribute *value* for *key*.''' + attribute = self.__class__.attributes.get(key) + if attribute is None: + raise KeyError(key) + + attribute.set_local_value(self, value) + + def __delitem__(self, key): + '''Clear attribute value for *key*. + + .. note:: + + Will not remove the attribute, but instead clear any local value + and revert to the last known server value. + + ''' + attribute = self.__class__.attributes.get(key) + attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) + + def __iter__(self): + '''Iterate over all attributes keys.''' + for attribute in self.__class__.attributes: + yield attribute.name + + def __len__(self): + '''Return count of attributes.''' + return len(self.__class__.attributes) + + def values(self): + '''Return list of values.''' + if self.session.auto_populate: + self._populate_unset_scalar_attributes() + + return super(Entity, self).values() + + def items(self): + '''Return list of tuples of (key, value) pairs. + + .. note:: + + Will fetch all values from the server if not already fetched or set + locally. + + ''' + if self.session.auto_populate: + self._populate_unset_scalar_attributes() + + return super(Entity, self).items() + + def clear(self): + '''Reset all locally modified attribute values.''' + for attribute in self: + del self[attribute] + + def merge(self, entity, merged=None): + '''Merge *entity* attribute values and other data into this entity. + + Only merge values from *entity* that are not + :attr:`ftrack_api.symbol.NOT_SET`. + + Return a list of changes made with each change being a mapping with + the keys: + + * type - Either 'remote_attribute', 'local_attribute' or 'property'. + * name - The name of the attribute / property modified. + * old_value - The previous value. + * new_value - The new merged value. + + ''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' + changes = [] + + # Attributes. + + # Prioritise by type so that scalar values are set first. This should + # guarantee that the attributes making up the identity of the entity + # are merged before merging any collections that may have references to + # this entity. + attributes = collections.deque() + for attribute in entity.attributes: + if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): + attributes.appendleft(attribute) + else: + attributes.append(attribute) + + for other_attribute in attributes: + attribute = self.attributes.get(other_attribute.name) + + # Local attributes. + other_local_value = other_attribute.get_local_value(entity) + if other_local_value is not ftrack_api.symbol.NOT_SET: + local_value = attribute.get_local_value(self) + if local_value != other_local_value: + merged_local_value = self.session.merge( + other_local_value, merged=merged + ) + + attribute.set_local_value(self, merged_local_value) + changes.append({ + 'type': 'local_attribute', + 'name': attribute.name, + 'old_value': local_value, + 'new_value': merged_local_value + }) + log_debug and self.logger.debug( + log_message.format(**changes[-1]) + ) + + # Remote attributes. + other_remote_value = other_attribute.get_remote_value(entity) + if other_remote_value is not ftrack_api.symbol.NOT_SET: + remote_value = attribute.get_remote_value(self) + if remote_value != other_remote_value: + merged_remote_value = self.session.merge( + other_remote_value, merged=merged + ) + + attribute.set_remote_value( + self, merged_remote_value + ) + + changes.append({ + 'type': 'remote_attribute', + 'name': attribute.name, + 'old_value': remote_value, + 'new_value': merged_remote_value + }) + + log_debug and self.logger.debug( + log_message.format(**changes[-1]) + ) + + # We need to handle collections separately since + # they may store a local copy of the remote attribute + # even though it may not be modified. + if not isinstance( + attribute, ftrack_api.attribute.AbstractCollectionAttribute + ): + continue + + local_value = attribute.get_local_value( + self + ) + + # Populated but not modified, update it. + if ( + local_value is not ftrack_api.symbol.NOT_SET and + local_value == remote_value + ): + attribute.set_local_value( + self, merged_remote_value + ) + changes.append({ + 'type': 'local_attribute', + 'name': attribute.name, + 'old_value': local_value, + 'new_value': merged_remote_value + }) + + log_debug and self.logger.debug( + log_message.format(**changes[-1]) + ) + + return changes + + def _populate_unset_scalar_attributes(self): + '''Populate all unset scalar attributes in one query.''' + projections = [] + for attribute in self.attributes: + if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): + if attribute.get_remote_value(self) is ftrack_api.symbol.NOT_SET: + projections.append(attribute.name) + + if projections: + self.session.populate([self], ', '.join(projections)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py new file mode 100644 index 00000000000..9d59c4c051d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py @@ -0,0 +1,74 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class Component(ftrack_api.entity.base.Entity): + '''Represent a component.''' + + def get_availability(self, locations=None): + '''Return availability in *locations*. + + If *locations* is None, all known locations will be checked. + + Return a dictionary of {location_id:percentage_availability} + + ''' + return self.session.get_component_availability( + self, locations=locations + ) + + +class CreateThumbnailMixin(object): + '''Mixin to add create_thumbnail method on entity class.''' + + def create_thumbnail(self, path, data=None): + '''Set entity thumbnail from *path*. + + Creates a thumbnail component using in the ftrack.server location + :meth:`Session.create_component + ` The thumbnail component + will be created using *data* if specified. If no component name is + given, `thumbnail` will be used. + + The file is expected to be of an appropriate size and valid file + type. + + .. note:: + + A :meth:`Session.commit` will be + automatically issued. + + ''' + if data is None: + data = {} + if not data.get('name'): + data['name'] = 'thumbnail' + + thumbnail_component = self.session.create_component( + path, data, location=None + ) + + origin_location = self.session.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + server_location = self.session.get( + 'Location', ftrack_api.symbol.SERVER_LOCATION_ID + ) + server_location.add_component(thumbnail_component, [origin_location]) + + # TODO: This commit can be avoided by reordering the operations in + # this method so that the component is transferred to ftrack.server + # after the thumbnail has been set. + # + # There is currently a bug in the API backend, causing the operations + # to *some* times be ordered wrongly, where the update occurs before + # the component has been created, causing an integrity error. + # + # Once this issue has been resolved, this commit can be removed and + # and the update placed between component creation and registration. + self['thumbnail_id'] = thumbnail_component['id'] + self.session.commit() + + return thumbnail_component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py new file mode 100644 index 00000000000..e925b70f5a6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py @@ -0,0 +1,435 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import logging +import uuid +import functools + +import ftrack_api.attribute +import ftrack_api.entity.base +import ftrack_api.entity.location +import ftrack_api.entity.component +import ftrack_api.entity.asset_version +import ftrack_api.entity.project_schema +import ftrack_api.entity.note +import ftrack_api.entity.job +import ftrack_api.entity.user +import ftrack_api.symbol +import ftrack_api.cache +from ftrack_api.logging import LazyLogMessage as L + + +class Factory(object): + '''Entity class factory.''' + + def __init__(self): + '''Initialise factory.''' + super(Factory, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*. + + *bases* should be a list of bases to give the constructed class. If not + specified, default to :class:`ftrack_api.entity.base.Entity`. + + ''' + entity_type = schema['id'] + class_name = entity_type + + class_bases = bases + if class_bases is None: + class_bases = [ftrack_api.entity.base.Entity] + + class_namespace = dict() + + # Build attributes for class. + attributes = ftrack_api.attribute.Attributes() + immutable_properties = schema.get('immutable', []) + computed_properties = schema.get('computed', []) + for name, fragment in schema.get('properties', {}).items(): + mutable = name not in immutable_properties + computed = name in computed_properties + + default = fragment.get('default', ftrack_api.symbol.NOT_SET) + if default == '{uid}': + default = lambda instance: str(uuid.uuid4()) + + data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) + + if data_type is not ftrack_api.symbol.NOT_SET: + + if data_type in ( + 'string', 'boolean', 'integer', 'number', 'variable', + 'object' + ): + # Basic scalar attribute. + if data_type == 'number': + data_type = 'float' + + if data_type == 'string': + data_format = fragment.get('format') + if data_format == 'date-time': + data_type = 'datetime' + + attribute = self.create_scalar_attribute( + class_name, name, mutable, computed, default, data_type + ) + if attribute: + attributes.add(attribute) + + elif data_type == 'array': + attribute = self.create_collection_attribute( + class_name, name, mutable + ) + if attribute: + attributes.add(attribute) + + elif data_type == 'mapped_array': + reference = fragment.get('items', {}).get('$ref') + if not reference: + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that does ' + 'not define a schema reference.', class_name, name + )) + continue + + attribute = self.create_mapped_collection_attribute( + class_name, name, mutable, reference + ) + if attribute: + attributes.add(attribute) + + else: + self.logger.debug(L( + 'Skipping {0}.{1} attribute with unrecognised data ' + 'type {2}', class_name, name, data_type + )) + else: + # Reference attribute. + reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) + if reference is ftrack_api.symbol.NOT_SET: + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that does ' + 'not define a schema reference.', class_name, name + )) + continue + + attribute = self.create_reference_attribute( + class_name, name, mutable, reference + ) + if attribute: + attributes.add(attribute) + + default_projections = schema.get('default_projections', []) + + # Construct class. + class_namespace['entity_type'] = entity_type + class_namespace['attributes'] = attributes + class_namespace['primary_key_attributes'] = schema['primary_key'][:] + class_namespace['default_projections'] = default_projections + + cls = type( + str(class_name), # type doesn't accept unicode. + tuple(class_bases), + class_namespace + ) + + return cls + + def create_scalar_attribute( + self, class_name, name, mutable, computed, default, data_type + ): + '''Return appropriate scalar attribute instance.''' + return ftrack_api.attribute.ScalarAttribute( + name, data_type=data_type, default_value=default, mutable=mutable, + computed=computed + ) + + def create_reference_attribute(self, class_name, name, mutable, reference): + '''Return appropriate reference attribute instance.''' + return ftrack_api.attribute.ReferenceAttribute( + name, reference, mutable=mutable + ) + + def create_collection_attribute(self, class_name, name, mutable): + '''Return appropriate collection attribute instance.''' + return ftrack_api.attribute.CollectionAttribute( + name, mutable=mutable + ) + + def create_mapped_collection_attribute( + self, class_name, name, mutable, reference + ): + '''Return appropriate mapped collection attribute instance.''' + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that has ' + 'no implementation defined for reference {2}.', + class_name, name, reference + )) + + +class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): + '''Generate key for defaults.''' + + def _key(self, obj): + '''Return key for *obj*.''' + if isinstance(obj, dict): + entity = obj.get('entity') + if entity is not None: + # Key by session only. + return str(id(entity.session)) + + return str(obj) + + +#: Memoiser for use with default callables that should only be called once per +# session. +memoise_defaults = ftrack_api.cache.memoise_decorator( + ftrack_api.cache.Memoiser( + key_maker=PerSessionDefaultKeyMaker(), return_copies=False + ) +) + +#: Memoiser for use with callables that should be called once per session. +memoise_session = ftrack_api.cache.memoise_decorator( + ftrack_api.cache.Memoiser( + key_maker=PerSessionDefaultKeyMaker(), return_copies=False + ) +) + + +@memoise_session +def _get_custom_attribute_configurations(session): + '''Return list of custom attribute configurations. + + The configuration objects will have key, project_id, id and object_type_id + populated. + + ''' + return session.query( + 'select key, project_id, id, object_type_id, entity_type, ' + 'is_hierarchical from CustomAttributeConfiguration' + ).all() + + +def _get_entity_configurations(entity): + '''Return all configurations for current collection entity.''' + entity_type = None + project_id = None + object_type_id = None + + if 'object_type_id' in entity.keys(): + project_id = entity['project_id'] + entity_type = 'task' + object_type_id = entity['object_type_id'] + + if entity.entity_type == 'AssetVersion': + project_id = entity['asset']['parent']['project_id'] + entity_type = 'assetversion' + + if entity.entity_type == 'Project': + project_id = entity['id'] + entity_type = 'show' + + if entity.entity_type == 'User': + entity_type = 'user' + + if entity.entity_type == 'Asset': + entity_type = 'asset' + + if entity.entity_type in ('TypedContextList', 'AssetVersionList'): + entity_type = 'list' + + if entity_type is None: + raise ValueError( + 'Entity {!r} not supported.'.format(entity) + ) + + configurations = [] + for configuration in _get_custom_attribute_configurations( + entity.session + ): + if ( + configuration['entity_type'] == entity_type and + configuration['project_id'] in (project_id, None) and + configuration['object_type_id'] == object_type_id + ): + # The custom attribute configuration is for the target entity type. + configurations.append(configuration) + elif ( + entity_type in ('asset', 'assetversion', 'show', 'task') and + configuration['project_id'] in (project_id, None) and + configuration['is_hierarchical'] + ): + # The target entity type allows hierarchical attributes. + configurations.append(configuration) + + # Return with global configurations at the end of the list. This is done + # so that global conigurations are shadowed by project specific if the + # configurations list is looped when looking for a matching `key`. + return sorted( + configurations, key=lambda item: item['project_id'] is None + ) + + +class StandardFactory(Factory): + '''Standard entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + if not bases: + bases = [] + + extra_bases = [] + # Customise classes. + if schema['id'] == 'ProjectSchema': + extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] + + elif schema['id'] == 'Location': + extra_bases = [ftrack_api.entity.location.Location] + + elif schema['id'] == 'AssetVersion': + extra_bases = [ftrack_api.entity.asset_version.AssetVersion] + + elif schema['id'].endswith('Component'): + extra_bases = [ftrack_api.entity.component.Component] + + elif schema['id'] == 'Note': + extra_bases = [ftrack_api.entity.note.Note] + + elif schema['id'] == 'Job': + extra_bases = [ftrack_api.entity.job.Job] + + elif schema['id'] == 'User': + extra_bases = [ftrack_api.entity.user.User] + + bases = extra_bases + bases + + # If bases does not contain any items, add the base entity class. + if not bases: + bases = [ftrack_api.entity.base.Entity] + + # Add mixins. + if 'notes' in schema.get('properties', {}): + bases.append( + ftrack_api.entity.note.CreateNoteMixin + ) + + if 'thumbnail_id' in schema.get('properties', {}): + bases.append( + ftrack_api.entity.component.CreateThumbnailMixin + ) + + cls = super(StandardFactory, self).create(schema, bases=bases) + + return cls + + def create_mapped_collection_attribute( + self, class_name, name, mutable, reference + ): + '''Return appropriate mapped collection attribute instance.''' + if reference == 'Metadata': + + def create_metadata(proxy, data, reference): + '''Return metadata for *data*.''' + entity = proxy.collection.entity + session = entity.session + data.update({ + 'parent_id': entity['id'], + 'parent_type': entity.entity_type + }) + return session.create(reference, data) + + creator = functools.partial( + create_metadata, reference=reference + ) + key_attribute = 'key' + value_attribute = 'value' + + return ftrack_api.attribute.KeyValueMappedCollectionAttribute( + name, creator, key_attribute, value_attribute, mutable=mutable + ) + + elif reference == 'CustomAttributeValue': + return ( + ftrack_api.attribute.CustomAttributeCollectionAttribute( + name, mutable=mutable + ) + ) + + elif reference.endswith('CustomAttributeValue'): + def creator(proxy, data): + '''Create a custom attribute based on *proxy* and *data*. + + Raise :py:exc:`KeyError` if related entity is already presisted + to the server. The proxy represents dense custom attribute + values and should never create new custom attribute values + through the proxy if entity exists on the remote. + + If the entity is not persisted the ususal + CustomAttributeValue items cannot be updated as + the related entity does not exist on remote and values not in + the proxy. Instead a CustomAttributeValue will + be reconstructed and an update operation will be recorded. + + ''' + entity = proxy.collection.entity + if ( + ftrack_api.inspection.state(entity) is not + ftrack_api.symbol.CREATED + ): + raise KeyError( + 'Custom attributes must be created explicitly for the ' + 'given entity type before being set.' + ) + + configuration = None + for candidate in _get_entity_configurations(entity): + if candidate['key'] == data['key']: + configuration = candidate + break + + if configuration is None: + raise ValueError( + u'No valid custom attribute for data {0!r} was found.' + .format(data) + ) + + create_data = dict(data.items()) + create_data['configuration_id'] = configuration['id'] + create_data['entity_id'] = entity['id'] + + session = entity.session + + # Create custom attribute by reconstructing it and update the + # value. This will prevent a create operation to be sent to the + # remote, as create operations for this entity type is not + # allowed. Instead an update operation will be recorded. + value = create_data.pop('value') + item = session.create( + reference, + create_data, + reconstructing=True + ) + + # Record update operation. + item['value'] = value + + return item + + key_attribute = 'key' + value_attribute = 'value' + + return ftrack_api.attribute.KeyValueMappedCollectionAttribute( + name, creator, key_attribute, value_attribute, mutable=mutable + ) + + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that has no configuration ' + 'for reference {2}.', class_name, name, reference + )) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py new file mode 100644 index 00000000000..ae37922c515 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py @@ -0,0 +1,48 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class Job(ftrack_api.entity.base.Entity): + '''Represent job.''' + + def __init__(self, session, data=None, reconstructing=False): + '''Initialise entity. + + *session* is an instance of :class:`ftrack_api.session.Session` that + this entity instance is bound to. + + *data* is a mapping of key, value pairs to apply as initial attribute + values. + + To set a job `description` visible in the web interface, *data* can + contain a key called `data` which should be a JSON serialised + dictionary containing description:: + + data = { + 'status': 'running', + 'data': json.dumps(dict(description='My job description.')), + ... + } + + Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` + is set to something not equal to "api_job". + + *reconstructing* indicates whether this entity is being reconstructed, + such as from a query, and therefore should not have any special creation + logic applied, such as initialising defaults for missing data. + + ''' + + if not reconstructing: + if data.get('type') not in ('api_job', None): + raise ValueError( + 'Invalid job type "{0}". Must be "api_job"'.format( + data.get('type') + ) + ) + + super(Job, self).__init__( + session, data=data, reconstructing=reconstructing + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py new file mode 100644 index 00000000000..707f4fa6526 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py @@ -0,0 +1,733 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import collections +import functools + +import ftrack_api.entity.base +import ftrack_api.exception +import ftrack_api.event.base +import ftrack_api.symbol +import ftrack_api.inspection +from ftrack_api.logging import LazyLogMessage as L + + +class Location(ftrack_api.entity.base.Entity): + '''Represent storage for components.''' + + def __init__(self, session, data=None, reconstructing=False): + '''Initialise entity. + + *session* is an instance of :class:`ftrack_api.session.Session` that + this entity instance is bound to. + + *data* is a mapping of key, value pairs to apply as initial attribute + values. + + *reconstructing* indicates whether this entity is being reconstructed, + such as from a query, and therefore should not have any special creation + logic applied, such as initialising defaults for missing data. + + ''' + self.accessor = ftrack_api.symbol.NOT_SET + self.structure = ftrack_api.symbol.NOT_SET + self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET + self.priority = 95 + super(Location, self).__init__( + session, data=data, reconstructing=reconstructing + ) + + def __str__(self): + '''Return string representation of instance.''' + representation = super(Location, self).__str__() + + with self.session.auto_populating(False): + name = self['name'] + if name is not ftrack_api.symbol.NOT_SET: + representation = representation.replace( + '(', '("{0}", '.format(name) + ) + + return representation + + def add_component(self, component, source, recursive=True): + '''Add *component* to location. + + *component* should be a single component instance. + + *source* should be an instance of another location that acts as the + source. + + Raise :exc:`ftrack_api.ComponentInLocationError` if the *component* + already exists in this location. + + Raise :exc:`ftrack_api.LocationError` if managing data and the generated + target structure for the component already exists according to the + accessor. This helps prevent potential data loss by avoiding overwriting + existing data. Note that there is a race condition between the check and + the write so if another process creates data at the same target during + that period it will be overwritten. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the component registration. + + ''' + return self.add_components( + [component], sources=source, recursive=recursive + ) + + def add_components(self, components, sources, recursive=True, _depth=0): + '''Add *components* to location. + + *components* should be a list of component instances. + + *sources* may be either a single source or a list of sources. If a list + then each corresponding index in *sources* will be used for each + *component*. A source should be an instance of another location. + + Raise :exc:`ftrack_api.exception.ComponentInLocationError` if any + component in *components* already exists in this location. In this case, + no changes will be made and no data transferred. + + Raise :exc:`ftrack_api.exception.LocationError` if managing data and the + generated target structure for the component already exists according to + the accessor. This helps prevent potential data loss by avoiding + overwriting existing data. Note that there is a race condition between + the check and the write so if another process creates data at the same + target during that period it will be overwritten. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the components registration. + + .. important:: + + If this location manages data then the *components* data is first + transferred to the target prescribed by the structure plugin, using + the configured accessor. If any component fails to transfer then + :exc:`ftrack_api.exception.LocationError` is raised and none of the + components are registered with the database. In this case it is left + up to the caller to decide and act on manually cleaning up any + transferred data using the 'transferred' detail in the raised error. + + Likewise, after transfer, all components are registered with the + database in a batch call. If any component causes an error then all + components will remain unregistered and + :exc:`ftrack_api.exception.LocationError` will be raised detailing + issues and any transferred data under the 'transferred' detail key. + + ''' + if ( + isinstance(sources, basestring) + or not isinstance(sources, collections.Sequence) + ): + sources = [sources] + + sources_count = len(sources) + if sources_count not in (1, len(components)): + raise ValueError( + 'sources must be either a single source or a sequence of ' + 'sources with indexes corresponding to passed components.' + ) + + if not self.structure: + raise ftrack_api.exception.LocationError( + 'No structure defined for location {location}.', + details=dict(location=self) + ) + + if not components: + # Optimisation: Return early when no components to process, such as + # when called recursively on an empty sequence component. + return + + indent = ' ' * (_depth + 1) + + # Check that components not already added to location. + existing_components = [] + try: + self.get_resource_identifiers(components) + + except ftrack_api.exception.ComponentNotInLocationError as error: + missing_component_ids = [ + missing_component['id'] + for missing_component in error.details['components'] + ] + for component in components: + if component['id'] not in missing_component_ids: + existing_components.append(component) + + else: + existing_components.extend(components) + + if existing_components: + # Some of the components already present in location. + raise ftrack_api.exception.ComponentInLocationError( + existing_components, self + ) + + # Attempt to transfer each component's data to this location. + transferred = [] + + for index, component in enumerate(components): + try: + # Determine appropriate source. + if sources_count == 1: + source = sources[0] + else: + source = sources[index] + + # Add members first for container components. + is_container = 'members' in component.keys() + if is_container and recursive: + self.add_components( + component['members'], source, recursive=recursive, + _depth=(_depth + 1) + ) + + # Add component to this location. + context = self._get_context(component, source) + resource_identifier = self.structure.get_resource_identifier( + component, context + ) + + # Manage data transfer. + self._add_data(component, resource_identifier, source) + + except Exception as error: + raise ftrack_api.exception.LocationError( + 'Failed to transfer component {component} data to location ' + '{location} due to error:\n{indent}{error}\n{indent}' + 'Transferred component data that may require cleanup: ' + '{transferred}', + details=dict( + indent=indent, + component=component, + location=self, + error=error, + transferred=transferred + ) + ) + + else: + transferred.append((component, resource_identifier)) + + # Register all successfully transferred components. + components_to_register = [] + component_resource_identifiers = [] + + try: + for component, resource_identifier in transferred: + if self.resource_identifier_transformer: + # Optionally encode resource identifier before storing. + resource_identifier = ( + self.resource_identifier_transformer.encode( + resource_identifier, + context={'component': component} + ) + ) + + components_to_register.append(component) + component_resource_identifiers.append(resource_identifier) + + # Store component in location information. + self._register_components_in_location( + components, component_resource_identifiers + ) + + except Exception as error: + raise ftrack_api.exception.LocationError( + 'Failed to register components with location {location} due to ' + 'error:\n{indent}{error}\n{indent}Transferred component data ' + 'that may require cleanup: {transferred}', + details=dict( + indent=indent, + location=self, + error=error, + transferred=transferred + ) + ) + + # Publish events. + for component in components_to_register: + + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + location_id = ftrack_api.inspection.primary_key(self).values()[0] + + self.session.event_hub.publish( + ftrack_api.event.base.Event( + topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, + data=dict( + component_id=component_id, + location_id=location_id + ), + ), + on_error='ignore' + ) + + def _get_context(self, component, source): + '''Return context for *component* and *source*.''' + context = {} + if source: + try: + source_resource_identifier = source.get_resource_identifier( + component + ) + except ftrack_api.exception.ComponentNotInLocationError: + pass + else: + context.update(dict( + source_resource_identifier=source_resource_identifier + )) + + return context + + def _add_data(self, component, resource_identifier, source): + '''Manage transfer of *component* data from *source*. + + *resource_identifier* specifies the identifier to use with this + locations accessor. + + ''' + self.logger.debug(L( + 'Adding data for component {0!r} from source {1!r} to location ' + '{2!r} using resource identifier {3!r}.', + component, resource_identifier, source, self + )) + + # Read data from source and write to this location. + if not source.accessor: + raise ftrack_api.exception.LocationError( + 'No accessor defined for source location {location}.', + details=dict(location=source) + ) + + if not self.accessor: + raise ftrack_api.exception.LocationError( + 'No accessor defined for target location {location}.', + details=dict(location=self) + ) + + is_container = 'members' in component.keys() + if is_container: + # TODO: Improve this check. Possibly introduce an inspection + # such as ftrack_api.inspection.is_sequence_component. + if component.entity_type != 'SequenceComponent': + self.accessor.make_container(resource_identifier) + + else: + # Try to make container of component. + try: + container = self.accessor.get_container( + resource_identifier + ) + + except ftrack_api.exception.AccessorParentResourceNotFoundError: + # Container could not be retrieved from + # resource_identifier. Assume that there is no need to + # make the container. + pass + + else: + # No need for existence check as make_container does not + # recreate existing containers. + self.accessor.make_container(container) + + if self.accessor.exists(resource_identifier): + # Note: There is a race condition here in that the + # data may be added externally between the check for + # existence and the actual write which would still + # result in potential data loss. However, there is no + # good cross platform, cross accessor solution for this + # at present. + raise ftrack_api.exception.LocationError( + 'Cannot add component as data already exists and ' + 'overwriting could result in data loss. Computed ' + 'target resource identifier was: {0}' + .format(resource_identifier) + ) + + # Read and write data. + source_data = source.accessor.open( + source.get_resource_identifier(component), 'rb' + ) + target_data = self.accessor.open(resource_identifier, 'wb') + + # Read/write data in chunks to avoid reading all into memory at the + # same time. + chunked_read = functools.partial( + source_data.read, ftrack_api.symbol.CHUNK_SIZE + ) + for chunk in iter(chunked_read, ''): + target_data.write(chunk) + + target_data.close() + source_data.close() + + def _register_component_in_location(self, component, resource_identifier): + '''Register *component* in location against *resource_identifier*.''' + return self._register_components_in_location( + [component], [resource_identifier] + ) + + def _register_components_in_location( + self, components, resource_identifiers + ): + '''Register *components* in location against *resource_identifiers*. + + Indices of *components* and *resource_identifiers* should align. + + ''' + for component, resource_identifier in zip( + components, resource_identifiers + ): + self.session.create( + 'ComponentLocation', data=dict( + component=component, + location=self, + resource_identifier=resource_identifier + ) + ) + + self.session.commit() + + def remove_component(self, component, recursive=True): + '''Remove *component* from location. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the component deregistration. + + ''' + return self.remove_components([component], recursive=recursive) + + def remove_components(self, components, recursive=True): + '''Remove *components* from location. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the components deregistration. + + ''' + for component in components: + # Check component is in this location + self.get_resource_identifier(component) + + # Remove members first for container components. + is_container = 'members' in component.keys() + if is_container and recursive: + self.remove_components( + component['members'], recursive=recursive + ) + + # Remove data. + self._remove_data(component) + + # Remove metadata. + self._deregister_component_in_location(component) + + # Emit event. + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + location_id = ftrack_api.inspection.primary_key(self).values()[0] + self.session.event_hub.publish( + ftrack_api.event.base.Event( + topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, + data=dict( + component_id=component_id, + location_id=location_id + ) + ), + on_error='ignore' + ) + + def _remove_data(self, component): + '''Remove data associated with *component*.''' + if not self.accessor: + raise ftrack_api.exception.LocationError( + 'No accessor defined for location {location}.', + details=dict(location=self) + ) + + try: + self.accessor.remove( + self.get_resource_identifier(component) + ) + except ftrack_api.exception.AccessorResourceNotFoundError: + # If accessor does not support detecting sequence paths then an + # AccessorResourceNotFoundError is raised. For now, if the + # component type is 'SequenceComponent' assume success. + if not component.entity_type == 'SequenceComponent': + raise + + def _deregister_component_in_location(self, component): + '''Deregister *component* from location.''' + component_id = ftrack_api.inspection.primary_key(component).values()[0] + location_id = ftrack_api.inspection.primary_key(self).values()[0] + + # TODO: Use session.get for optimisation. + component_location = self.session.query( + 'ComponentLocation where component_id is {0} and location_id is ' + '{1}'.format(component_id, location_id) + )[0] + + self.session.delete(component_location) + + # TODO: Should auto-commit here be optional? + self.session.commit() + + def get_component_availability(self, component): + '''Return availability of *component* in this location as a float.''' + return self.session.get_component_availability( + component, locations=[self] + )[self['id']] + + def get_component_availabilities(self, components): + '''Return availabilities of *components* in this location. + + Return list of float values corresponding to each component. + + ''' + return [ + availability[self['id']] for availability in + self.session.get_component_availabilities( + components, locations=[self] + ) + ] + + def get_resource_identifier(self, component): + '''Return resource identifier for *component*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the + component is not present in this location. + + ''' + return self.get_resource_identifiers([component])[0] + + def get_resource_identifiers(self, components): + '''Return resource identifiers for *components*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any + of the components are not present in this location. + + ''' + resource_identifiers = self._get_resource_identifiers(components) + + # Optionally decode resource identifier. + if self.resource_identifier_transformer: + for index, resource_identifier in enumerate(resource_identifiers): + resource_identifiers[index] = ( + self.resource_identifier_transformer.decode( + resource_identifier, + context={'component': components[index]} + ) + ) + + return resource_identifiers + + def _get_resource_identifiers(self, components): + '''Return resource identifiers for *components*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any + of the components are not present in this location. + + ''' + component_ids_mapping = collections.OrderedDict() + for component in components: + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + component_ids_mapping[component_id] = component + + component_locations = self.session.query( + 'select component_id, resource_identifier from ComponentLocation ' + 'where location_id is {0} and component_id in ({1})' + .format( + ftrack_api.inspection.primary_key(self).values()[0], + ', '.join(component_ids_mapping.keys()) + ) + ) + + resource_identifiers_map = {} + for component_location in component_locations: + resource_identifiers_map[component_location['component_id']] = ( + component_location['resource_identifier'] + ) + + resource_identifiers = [] + missing = [] + for component_id, component in component_ids_mapping.items(): + if component_id not in resource_identifiers_map: + missing.append(component) + else: + resource_identifiers.append( + resource_identifiers_map[component_id] + ) + + if missing: + raise ftrack_api.exception.ComponentNotInLocationError( + missing, self + ) + + return resource_identifiers + + def get_filesystem_path(self, component): + '''Return filesystem path for *component*.''' + return self.get_filesystem_paths([component])[0] + + def get_filesystem_paths(self, components): + '''Return filesystem paths for *components*.''' + resource_identifiers = self.get_resource_identifiers(components) + + filesystem_paths = [] + for resource_identifier in resource_identifiers: + filesystem_paths.append( + self.accessor.get_filesystem_path(resource_identifier) + ) + + return filesystem_paths + + def get_url(self, component): + '''Return url for *component*. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + URL could not be determined from *component* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving URL is not supported by the location's accessor. + ''' + resource_identifier = self.get_resource_identifier(component) + + return self.accessor.get_url(resource_identifier) + + +class MemoryLocationMixin(object): + '''Represent storage for components. + + Unlike a standard location, only store metadata for components in this + location in memory rather than persisting to the database. + + ''' + + @property + def _cache(self): + '''Return cache.''' + try: + cache = self.__cache + except AttributeError: + cache = self.__cache = {} + + return cache + + def _register_component_in_location(self, component, resource_identifier): + '''Register *component* in location with *resource_identifier*.''' + component_id = ftrack_api.inspection.primary_key(component).values()[0] + self._cache[component_id] = resource_identifier + + def _register_components_in_location( + self, components, resource_identifiers + ): + '''Register *components* in location against *resource_identifiers*. + + Indices of *components* and *resource_identifiers* should align. + + ''' + for component, resource_identifier in zip( + components, resource_identifiers + ): + self._register_component_in_location(component, resource_identifier) + + def _deregister_component_in_location(self, component): + '''Deregister *component* in location.''' + component_id = ftrack_api.inspection.primary_key(component).values()[0] + self._cache.pop(component_id) + + def _get_resource_identifiers(self, components): + '''Return resource identifiers for *components*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any + of the referenced components are not present in this location. + + ''' + resource_identifiers = [] + missing = [] + for component in components: + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + resource_identifier = self._cache.get(component_id) + if resource_identifier is None: + missing.append(component) + else: + resource_identifiers.append(resource_identifier) + + if missing: + raise ftrack_api.exception.ComponentNotInLocationError( + missing, self + ) + + return resource_identifiers + + +class UnmanagedLocationMixin(object): + '''Location that does not manage data.''' + + def _add_data(self, component, resource_identifier, source): + '''Manage transfer of *component* data from *source*. + + *resource_identifier* specifies the identifier to use with this + locations accessor. + + Overridden to have no effect. + + ''' + return + + def _remove_data(self, component): + '''Remove data associated with *component*. + + Overridden to have no effect. + + ''' + return + + +class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): + '''Special origin location that expects sources as filepaths.''' + + def _get_context(self, component, source): + '''Return context for *component* and *source*.''' + context = {} + if source: + context.update(dict( + source_resource_identifier=source + )) + + return context + + +class ServerLocationMixin(object): + '''Location representing ftrack server. + + Adds convenience methods to location, specific to ftrack server. + ''' + def get_thumbnail_url(self, component, size=None): + '''Return thumbnail url for *component*. + + Optionally, specify *size* to constrain the downscaled image to size + x size pixels. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + URL could not be determined from *resource_identifier* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving URL is not supported by the location's accessor. + ''' + resource_identifier = self.get_resource_identifier(component) + return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py new file mode 100644 index 00000000000..f5a9403728b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py @@ -0,0 +1,105 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import warnings + +import ftrack_api.entity.base + + +class Note(ftrack_api.entity.base.Entity): + '''Represent a note.''' + + def create_reply( + self, content, author + ): + '''Create a reply with *content* and *author*. + + .. note:: + + This is a helper method. To create replies manually use the + standard :meth:`Session.create` method. + + ''' + reply = self.session.create( + 'Note', { + 'author': author, + 'content': content + } + ) + + self['replies'].append(reply) + + return reply + + +class CreateNoteMixin(object): + '''Mixin to add create_note method on entity class.''' + + def create_note( + self, content, author, recipients=None, category=None, labels=None + ): + '''Create note with *content*, *author*. + + NoteLabels can be set by including *labels*. + + Note category can be set by including *category*. + + *recipients* can be specified as a list of user or group instances. + + ''' + note_label_support = 'NoteLabel' in self.session.types + + if not labels: + labels = [] + + if labels and not note_label_support: + raise ValueError( + 'NoteLabel is not supported by the current server version.' + ) + + if category and labels: + raise ValueError( + 'Both category and labels cannot be set at the same time.' + ) + + if not recipients: + recipients = [] + + data = { + 'content': content, + 'author': author + } + + if category: + if note_label_support: + labels = [category] + warnings.warn( + 'category argument will be removed in an upcoming version, ' + 'please use labels instead.', + PendingDeprecationWarning + ) + else: + data['category_id'] = category['id'] + + note = self.session.create('Note', data) + + self['notes'].append(note) + + for resource in recipients: + recipient = self.session.create('Recipient', { + 'note_id': note['id'], + 'resource_id': resource['id'] + }) + + note['recipients'].append(recipient) + + for label in labels: + self.session.create( + 'NoteLabelLink', + { + 'label_id': label['id'], + 'note_id': note['id'] + } + ) + + return note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py new file mode 100644 index 00000000000..ec6db7c0196 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py @@ -0,0 +1,94 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class ProjectSchema(ftrack_api.entity.base.Entity): + '''Class representing ProjectSchema.''' + + def get_statuses(self, schema, type_id=None): + '''Return statuses for *schema* and optional *type_id*. + + *type_id* is the id of the Type for a TypedContext and can be used to + get statuses where the workflow has been overridden. + + ''' + # Task has overrides and need to be handled separately. + if schema == 'Task': + if type_id is not None: + overrides = self['_overrides'] + for override in overrides: + if override['type_id'] == type_id: + return override['workflow_schema']['statuses'][:] + + return self['_task_workflow']['statuses'][:] + + elif schema == 'AssetVersion': + return self['_version_workflow']['statuses'][:] + + else: + try: + EntityTypeClass = self.session.types[schema] + except KeyError: + raise ValueError('Schema {0} does not exist.'.format(schema)) + + object_type_id_attribute = EntityTypeClass.attributes.get( + 'object_type_id' + ) + + try: + object_type_id = object_type_id_attribute.default_value + except AttributeError: + raise ValueError( + 'Schema {0} does not have statuses.'.format(schema) + ) + + for _schema in self['_schemas']: + if _schema['type_id'] == object_type_id: + result = self.session.query( + 'select task_status from SchemaStatus ' + 'where schema_id is {0}'.format(_schema['id']) + ) + return [ + schema_type['task_status'] for schema_type in result + ] + + raise ValueError( + 'No valid statuses were found for schema {0}.'.format(schema) + ) + + def get_types(self, schema): + '''Return types for *schema*.''' + # Task need to be handled separately. + if schema == 'Task': + return self['_task_type_schema']['types'][:] + + else: + try: + EntityTypeClass = self.session.types[schema] + except KeyError: + raise ValueError('Schema {0} does not exist.'.format(schema)) + + object_type_id_attribute = EntityTypeClass.attributes.get( + 'object_type_id' + ) + + try: + object_type_id = object_type_id_attribute.default_value + except AttributeError: + raise ValueError( + 'Schema {0} does not have types.'.format(schema) + ) + + for _schema in self['_schemas']: + if _schema['type_id'] == object_type_id: + result = self.session.query( + 'select task_type from SchemaType ' + 'where schema_id is {0}'.format(_schema['id']) + ) + return [schema_type['task_type'] for schema_type in result] + + raise ValueError( + 'No valid types were found for schema {0}.'.format(schema) + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py new file mode 100644 index 00000000000..511ad4ba999 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py @@ -0,0 +1,123 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import arrow + +import ftrack_api.entity.base +import ftrack_api.exception + + +class User(ftrack_api.entity.base.Entity): + '''Represent a user.''' + + def start_timer(self, context=None, comment='', name=None, force=False): + '''Start a timer for *context* and return it. + + *force* can be used to automatically stop an existing timer and create a + timelog for it. If you need to get access to the created timelog, use + :func:`stop_timer` instead. + + *comment* and *name* are optional but will be set on the timer. + + .. note:: + + This method will automatically commit the changes and if *force* is + False then it will fail with a + :class:`ftrack_api.exception.NotUniqueError` exception if a + timer is already running. + + ''' + if force: + try: + self.stop_timer() + except ftrack_api.exception.NoResultFoundError: + self.logger.debug('Failed to stop existing timer.') + + timer = self.session.create('Timer', { + 'user': self, + 'context': context, + 'name': name, + 'comment': comment + }) + + # Commit the new timer and try to catch any error that indicate another + # timelog already exists and inform the user about it. + try: + self.session.commit() + except ftrack_api.exception.ServerError as error: + if 'IntegrityError' in str(error): + raise ftrack_api.exception.NotUniqueError( + ('Failed to start a timelog for user with id: {0}, it is ' + 'likely that a timer is already running. Either use ' + 'force=True or stop the timer first.').format(self['id']) + ) + else: + # Reraise the error as it might be something unrelated. + raise + + return timer + + def stop_timer(self): + '''Stop the current timer and return a timelog created from it. + + If a timer is not running, a + :exc:`ftrack_api.exception.NoResultFoundError` exception will be + raised. + + .. note:: + + This method will automatically commit the changes. + + ''' + timer = self.session.query( + 'Timer where user_id = "{0}"'.format(self['id']) + ).one() + + # If the server is running in the same timezone as the local + # timezone, we remove the TZ offset to get the correct duration. + is_timezone_support_enabled = self.session.server_information.get( + 'is_timezone_support_enabled', None + ) + if is_timezone_support_enabled is None: + self.logger.warning( + 'Could not identify if server has timezone support enabled. ' + 'Will assume server is running in UTC.' + ) + is_timezone_support_enabled = True + + if is_timezone_support_enabled: + now = arrow.now() + else: + now = arrow.now().replace(tzinfo='utc') + + delta = now - timer['start'] + duration = delta.days * 24 * 60 * 60 + delta.seconds + + timelog = self.session.create('Timelog', { + 'user_id': timer['user_id'], + 'context_id': timer['context_id'], + 'comment': timer['comment'], + 'start': timer['start'], + 'duration': duration, + 'name': timer['name'] + }) + + self.session.delete(timer) + self.session.commit() + + return timelog + + def send_invite(self): + '''Send a invation email to the user''' + + self.session.send_user_invite( + self + ) + def reset_api_key(self): + '''Reset the users api key.''' + + response = self.session.reset_remote( + 'api_key', entity=self + ) + + return response['api_key'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py new file mode 100644 index 00000000000..1aab07ed77a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py new file mode 100644 index 00000000000..b5fd57da784 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py @@ -0,0 +1,85 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import uuid +import collections + + +class Event(collections.MutableMapping): + '''Represent a single event.''' + + def __init__(self, topic, id=None, data=None, sent=None, + source=None, target='', in_reply_to_event=None): + '''Initialise event. + + *topic* is the required topic for the event. It can use a dotted + notation to demarcate groupings. For example, 'ftrack.update'. + + *id* is the unique id for this event instance. It is primarily used when + replying to an event. If not supplied a default uuid based value will + be used. + + *data* refers to event specific data. It should be a mapping structure + and defaults to an empty dictionary if not supplied. + + *sent* is the timestamp the event is sent. It will be set automatically + as send time unless specified here. + + *source* is information about where the event originated. It should be + a mapping and include at least a unique id value under an 'id' key. If + not specified, senders usually populate the value automatically at + publish time. + + *target* can be an expression that targets this event. For example, + a reply event would target the event to the sender of the source event. + The expression will be tested against subscriber information only. + + *in_reply_to_event* is used when replying to an event and should contain + the unique id of the event being replied to. + + ''' + super(Event, self).__init__() + self._data = dict( + id=id or uuid.uuid4().hex, + data=data or {}, + topic=topic, + sent=sent, + source=source or {}, + target=target, + in_reply_to_event=in_reply_to_event + ) + self._stopped = False + + def stop(self): + '''Stop further processing of this event.''' + self._stopped = True + + def is_stopped(self): + '''Return whether event has been stopped.''' + return self._stopped + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}>'.format( + self.__class__.__name__, str(self._data) + ) + + def __getitem__(self, key): + '''Return value for *key*.''' + return self._data[key] + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + self._data[key] = value + + def __delitem__(self, key): + '''Remove *key*.''' + del self._data[key] + + def __iter__(self): + '''Iterate over all keys.''' + return iter(self._data) + + def __len__(self): + '''Return count of keys.''' + return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py new file mode 100644 index 00000000000..0535e4fd5f1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py @@ -0,0 +1,282 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from operator import eq, ne, ge, le, gt, lt + +from pyparsing import (Group, Word, CaselessKeyword, Forward, + FollowedBy, Suppress, oneOf, OneOrMore, Optional, + alphanums, quotedString, removeQuotes) + +import ftrack_api.exception + +# Do not enable packrat since it is not thread-safe and will result in parsing +# exceptions in a multi threaded environment. +# ParserElement.enablePackrat() + + +class Parser(object): + '''Parse string based expression into :class:`Expression` instance.''' + + def __init__(self): + '''Initialise parser.''' + self._operators = { + '=': eq, + '!=': ne, + '>=': ge, + '<=': le, + '>': gt, + '<': lt + } + self._parser = self._construct_parser() + super(Parser, self).__init__() + + def _construct_parser(self): + '''Construct and return parser.''' + field = Word(alphanums + '_.') + operator = oneOf(self._operators.keys()) + value = Word(alphanums + '-_,./*@+') + quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) + + condition = Group( + field + operator + (quoted_value | value) + )('condition') + + not_ = Optional(Suppress(CaselessKeyword('not')))('not') + and_ = Suppress(CaselessKeyword('and'))('and') + or_ = Suppress(CaselessKeyword('or'))('or') + + expression = Forward() + parenthesis = Suppress('(') + expression + Suppress(')') + previous = condition | parenthesis + + for conjunction in (not_, and_, or_): + current = Forward() + + if conjunction in (and_, or_): + conjunction_expression = ( + FollowedBy(previous + conjunction + previous) + + Group( + previous + OneOrMore(conjunction + previous) + )(conjunction.resultsName) + ) + + elif conjunction in (not_, ): + conjunction_expression = ( + FollowedBy(conjunction.expr + current) + + Group(conjunction + current)(conjunction.resultsName) + ) + + else: # pragma: no cover + raise ValueError('Unrecognised conjunction.') + + current <<= (conjunction_expression | previous) + previous = current + + expression <<= previous + return expression('expression') + + def parse(self, expression): + '''Parse string *expression* into :class:`Expression`. + + Raise :exc:`ftrack_api.exception.ParseError` if *expression* could + not be parsed. + + ''' + result = None + expression = expression.strip() + if expression: + try: + result = self._parser.parseString( + expression, parseAll=True + ) + except Exception as error: + raise ftrack_api.exception.ParseError( + 'Failed to parse: {0}. {1}'.format(expression, error) + ) + + return self._process(result) + + def _process(self, result): + '''Process *result* using appropriate method. + + Method called is determined by the name of the result. + + ''' + method_name = '_process_{0}'.format(result.getName()) + method = getattr(self, method_name) + return method(result) + + def _process_expression(self, result): + '''Process *result* as expression.''' + return self._process(result[0]) + + def _process_not(self, result): + '''Process *result* as NOT operation.''' + return Not(self._process(result[0])) + + def _process_and(self, result): + '''Process *result* as AND operation.''' + return All([self._process(entry) for entry in result]) + + def _process_or(self, result): + '''Process *result* as OR operation.''' + return Any([self._process(entry) for entry in result]) + + def _process_condition(self, result): + '''Process *result* as condition.''' + key, operator, value = result + return Condition(key, self._operators[operator], value) + + def _process_quoted_value(self, result): + '''Process *result* as quoted value.''' + return result + + +class Expression(object): + '''Represent a structured expression to test candidates against.''' + + def __str__(self): + '''Return string representation.''' + return '<{0}>'.format(self.__class__.__name__) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return True + + +class All(Expression): + '''Match candidate that matches all of the specified expressions. + + .. note:: + + If no expressions are supplied then will always match. + + ''' + + def __init__(self, expressions=None): + '''Initialise with list of *expressions* to match against.''' + self._expressions = expressions or [] + super(All, self).__init__() + + def __str__(self): + '''Return string representation.''' + return '<{0} [{1}]>'.format( + self.__class__.__name__, + ' '.join(map(str, self._expressions)) + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return all([ + expression.match(candidate) for expression in self._expressions + ]) + + +class Any(Expression): + '''Match candidate that matches any of the specified expressions. + + .. note:: + + If no expressions are supplied then will never match. + + ''' + + def __init__(self, expressions=None): + '''Initialise with list of *expressions* to match against.''' + self._expressions = expressions or [] + super(Any, self).__init__() + + def __str__(self): + '''Return string representation.''' + return '<{0} [{1}]>'.format( + self.__class__.__name__, + ' '.join(map(str, self._expressions)) + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return any([ + expression.match(candidate) for expression in self._expressions + ]) + + +class Not(Expression): + '''Negate expression.''' + + def __init__(self, expression): + '''Initialise with *expression* to negate.''' + self._expression = expression + super(Not, self).__init__() + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}>'.format( + self.__class__.__name__, + self._expression + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return not self._expression.match(candidate) + + +class Condition(Expression): + '''Represent condition.''' + + def __init__(self, key, operator, value): + '''Initialise condition. + + *key* is the key to check on the data when matching. It can be a nested + key represented by dots. For example, 'data.eventType' would attempt to + match candidate['data']['eventType']. If the candidate is missing any + of the requested keys then the match fails immediately. + + *operator* is the operator function to use to perform the match between + the retrieved candidate value and the conditional *value*. + + If *value* is a string, it can use a wildcard '*' at the end to denote + that any values matching the substring portion are valid when matching + equality only. + + ''' + self._key = key + self._operator = operator + self._value = value + self._wildcard = '*' + self._operatorMapping = { + eq: '=', + ne: '!=', + ge: '>=', + le: '<=', + gt: '>', + lt: '<' + } + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}{2}{3}>'.format( + self.__class__.__name__, + self._key, + self._operatorMapping.get(self._operator, self._operator), + self._value + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + key_parts = self._key.split('.') + + try: + value = candidate + for keyPart in key_parts: + value = value[keyPart] + except (KeyError, TypeError): + return False + + if ( + self._operator is eq + and isinstance(self._value, basestring) + and self._value[-1] == self._wildcard + ): + return self._value[:-1] in value + else: + return self._operator(value, self._value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py new file mode 100644 index 00000000000..9f4ba80c6ef --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py @@ -0,0 +1,1091 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +from __future__ import absolute_import + +import collections +import urlparse +import threading +import Queue as queue +import logging +import time +import uuid +import operator +import functools +import json +import socket +import warnings + +import requests +import requests.exceptions +import websocket + +import ftrack_api.exception +import ftrack_api.event.base +import ftrack_api.event.subscriber +import ftrack_api.event.expression +from ftrack_api.logging import LazyLogMessage as L + + +SocketIoSession = collections.namedtuple('SocketIoSession', [ + 'id', + 'heartbeatTimeout', + 'supportedTransports', +]) + + +ServerDetails = collections.namedtuple('ServerDetails', [ + 'scheme', + 'hostname', + 'port', +]) + + + + +class EventHub(object): + '''Manage routing of events.''' + + _future_signature_warning = ( + 'When constructing your Session object you did not explicitly define ' + 'auto_connect_event_hub as True even though you appear to be publishing ' + 'and / or subscribing to asynchronous events. In version version 2.0 of ' + 'the ftrack-python-api the default behavior will change from True ' + 'to False. Please make sure to update your tools. You can read more at ' + 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' + ) + + def __init__(self, server_url, api_user, api_key): + '''Initialise hub, connecting to ftrack *server_url*. + + *api_user* is the user to authenticate as and *api_key* is the API key + to authenticate with. + + ''' + super(EventHub, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.id = uuid.uuid4().hex + self._connection = None + + self._unique_packet_id = 0 + self._packet_callbacks = {} + self._lock = threading.RLock() + + self._wait_timeout = 4 + + self._subscribers = [] + self._reply_callbacks = {} + self._intentional_disconnect = False + + self._event_queue = queue.Queue() + self._event_namespace = 'ftrack.event' + self._expression_parser = ftrack_api.event.expression.Parser() + + # Default values for auto reconnection timeout on unintentional + # disconnection. Equates to 5 minutes. + self._auto_reconnect_attempts = 30 + self._auto_reconnect_delay = 10 + + self._deprecation_warning_auto_connect = False + + # Mapping of Socket.IO codes to meaning. + self._code_name_mapping = { + '0': 'disconnect', + '1': 'connect', + '2': 'heartbeat', + '3': 'message', + '4': 'json', + '5': 'event', + '6': 'acknowledge', + '7': 'error' + } + self._code_name_mapping.update( + dict((name, code) for code, name in self._code_name_mapping.items()) + ) + + self._server_url = server_url + self._api_user = api_user + self._api_key = api_key + + # Parse server URL and store server details. + url_parse_result = urlparse.urlparse(self._server_url) + if not url_parse_result.scheme: + raise ValueError('Could not determine scheme from server url.') + + if not url_parse_result.hostname: + raise ValueError('Could not determine hostname from server url.') + + self.server = ServerDetails( + url_parse_result.scheme, + url_parse_result.hostname, + url_parse_result.port + ) + + def get_server_url(self): + '''Return URL to server.''' + return '{0}://{1}'.format( + self.server.scheme, self.get_network_location() + ) + + def get_network_location(self): + '''Return network location part of url (hostname with optional port).''' + if self.server.port: + return '{0}:{1}'.format(self.server.hostname, self.server.port) + else: + return self.server.hostname + + @property + def secure(self): + '''Return whether secure connection used.''' + return self.server.scheme == 'https' + + def connect(self): + '''Initialise connection to server. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already + connected or connection fails. + + ''' + + self._deprecation_warning_auto_connect = False + + if self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Already connected.' + ) + + # Reset flag tracking whether disconnection was intentional. + self._intentional_disconnect = False + + try: + # Connect to socket.io server using websocket transport. + session = self._get_socket_io_session() + + if 'websocket' not in session.supportedTransports: + raise ValueError( + 'Server does not support websocket sessions.' + ) + + scheme = 'wss' if self.secure else 'ws' + url = '{0}://{1}/socket.io/1/websocket/{2}'.format( + scheme, self.get_network_location(), session.id + ) + + # timeout is set to 60 seconds to avoid the issue where the socket + # ends up in a bad state where it is reported as connected but the + # connection has been closed. The issue happens often when connected + # to a secure socket and the computer goes to sleep. + # More information on how the timeout works can be found here: + # https://docs.python.org/2/library/socket.html#socket.socket.setblocking + self._connection = websocket.create_connection(url, timeout=60) + + except Exception as error: + error_message = ( + 'Failed to connect to event server at {server_url} with ' + 'error: "{error}".' + ) + + error_details = { + 'error': unicode(error), + 'server_url': self.get_server_url() + } + + self.logger.debug( + L( + error_message, **error_details + ), + exc_info=1 + ) + raise ftrack_api.exception.EventHubConnectionError( + error_message, + details=error_details + ) + + # Start background processing thread. + self._processor_thread = _ProcessorThread(self) + self._processor_thread.start() + + # Subscribe to reply events if not already. Note: Only adding the + # subscriber locally as the following block will notify server of all + # existing subscribers, which would cause the server to report a + # duplicate subscriber error if EventHub.subscribe was called here. + try: + self._add_subscriber( + 'topic=ftrack.meta.reply', + self._handle_reply, + subscriber=dict( + id=self.id + ) + ) + except ftrack_api.exception.NotUniqueError: + pass + + # Now resubscribe any existing stored subscribers. This can happen when + # reconnecting automatically for example. + for subscriber in self._subscribers[:]: + self._notify_server_about_subscriber(subscriber) + + @property + def connected(self): + '''Return if connected.''' + return self._connection is not None and self._connection.connected + + def disconnect(self, unsubscribe=True): + '''Disconnect from server. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not + currently connected. + + If *unsubscribe* is True then unsubscribe all current subscribers + automatically before disconnecting. + + ''' + if not self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Not currently connected.' + ) + + else: + # Set flag to indicate disconnection was intentional. + self._intentional_disconnect = True + + # Set blocking to true on socket to make sure unsubscribe events + # are emitted before closing the connection. + self._connection.sock.setblocking(1) + + # Unsubscribe all subscribers. + if unsubscribe: + for subscriber in self._subscribers[:]: + self.unsubscribe(subscriber.metadata['id']) + + # Now disconnect. + self._connection.close() + self._connection = None + + # Shutdown background processing thread. + self._processor_thread.cancel() + + # Join to it if it is not current thread to help ensure a clean + # shutdown. + if threading.current_thread() != self._processor_thread: + self._processor_thread.join(self._wait_timeout) + + def reconnect(self, attempts=10, delay=5): + '''Reconnect to server. + + Make *attempts* number of attempts with *delay* in seconds between each + attempt. + + .. note:: + + All current subscribers will be automatically resubscribed after + successful reconnection. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to + reconnect. + + ''' + try: + self.disconnect(unsubscribe=False) + except ftrack_api.exception.EventHubConnectionError: + pass + + for attempt in range(attempts): + self.logger.debug(L( + 'Reconnect attempt {0} of {1}', attempt, attempts + )) + + # Silence logging temporarily to avoid lots of failed connection + # related information. + try: + logging.disable(logging.CRITICAL) + + try: + self.connect() + except ftrack_api.exception.EventHubConnectionError: + time.sleep(delay) + else: + break + + finally: + logging.disable(logging.NOTSET) + + if not self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to reconnect to event server at {0} after {1} attempts.' + .format(self.get_server_url(), attempts) + ) + + def wait(self, duration=None): + '''Wait for events and handle as they arrive. + + If *duration* is specified, then only process events until duration is + reached. *duration* is in seconds though float values can be used for + smaller values. + + ''' + started = time.time() + + while True: + try: + event = self._event_queue.get(timeout=0.1) + except queue.Empty: + pass + else: + self._handle(event) + + # Additional special processing of events. + if event['topic'] == 'ftrack.meta.disconnected': + break + + if duration is not None: + if (time.time() - started) > duration: + break + + def get_subscriber_by_identifier(self, identifier): + '''Return subscriber with matching *identifier*. + + Return None if no subscriber with *identifier* found. + + ''' + for subscriber in self._subscribers[:]: + if subscriber.metadata.get('id') == identifier: + return subscriber + + return None + + def subscribe(self, subscription, callback, subscriber=None, priority=100): + '''Register *callback* for *subscription*. + + A *subscription* is a string that can specify in detail which events the + callback should receive. The filtering is applied against each event + object. Nested references are supported using '.' separators. + For example, 'topic=foo and data.eventType=Shot' would match the + following event:: + + + + The *callback* should accept an instance of + :class:`ftrack_api.event.base.Event` as its sole argument. + + Callbacks are called in order of *priority*. The lower the priority + number the sooner it will be called, with 0 being the first. The + default priority is 100. Note that priority only applies against other + callbacks registered with this hub and not as a global priority. + + An earlier callback can prevent processing of subsequent callbacks by + calling :meth:`Event.stop` on the passed `event` before + returning. + + .. warning:: + + Handlers block processing of other received events. For long + running callbacks it is advisable to delegate the main work to + another process or thread. + + A *callback* can be attached to *subscriber* information that details + the subscriber context. A subscriber context will be generated + automatically if not supplied. + + .. note:: + + The subscription will be stored locally, but until the server + receives notification of the subscription it is possible the + callback will not be called. + + Return subscriber identifier. + + Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with + the same identifier already exists. + + ''' + # Add subscriber locally. + subscriber = self._add_subscriber( + subscription, callback, subscriber, priority + ) + + # Notify server now if possible. + try: + self._notify_server_about_subscriber(subscriber) + except ftrack_api.exception.EventHubConnectionError: + self.logger.debug(L( + 'Failed to notify server about new subscriber {0} ' + 'as server not currently reachable.', subscriber.metadata['id'] + )) + + return subscriber.metadata['id'] + + def _add_subscriber( + self, subscription, callback, subscriber=None, priority=100 + ): + '''Add subscriber locally. + + See :meth:`subscribe` for argument descriptions. + + Return :class:`ftrack_api.event.subscriber.Subscriber` instance. + + Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with + the same identifier already exists. + + ''' + if subscriber is None: + subscriber = {} + + subscriber.setdefault('id', uuid.uuid4().hex) + + # Check subscriber not already subscribed. + existing_subscriber = self.get_subscriber_by_identifier( + subscriber['id'] + ) + + if existing_subscriber is not None: + raise ftrack_api.exception.NotUniqueError( + 'Subscriber with identifier {0} already exists.' + .format(subscriber['id']) + ) + + subscriber = ftrack_api.event.subscriber.Subscriber( + subscription=subscription, + callback=callback, + metadata=subscriber, + priority=priority + ) + + self._subscribers.append(subscriber) + + return subscriber + + def _notify_server_about_subscriber(self, subscriber): + '''Notify server of new *subscriber*.''' + subscribe_event = ftrack_api.event.base.Event( + topic='ftrack.meta.subscribe', + data=dict( + subscriber=subscriber.metadata, + subscription=str(subscriber.subscription) + ) + ) + + self._publish( + subscribe_event, + callback=functools.partial(self._on_subscribed, subscriber) + ) + + def _on_subscribed(self, subscriber, response): + '''Handle acknowledgement of subscription.''' + if response.get('success') is False: + self.logger.warning(L( + 'Server failed to subscribe subscriber {0}: {1}', + subscriber.metadata['id'], response.get('message') + )) + + def unsubscribe(self, subscriber_identifier): + '''Unsubscribe subscriber with *subscriber_identifier*. + + .. note:: + + If the server is not reachable then it won't be notified of the + unsubscription. However, the subscriber will be removed locally + regardless. + + ''' + subscriber = self.get_subscriber_by_identifier(subscriber_identifier) + + if subscriber is None: + raise ftrack_api.exception.NotFoundError( + 'Cannot unsubscribe missing subscriber with identifier {0}' + .format(subscriber_identifier) + ) + + self._subscribers.pop(self._subscribers.index(subscriber)) + + # Notify the server if possible. + unsubscribe_event = ftrack_api.event.base.Event( + topic='ftrack.meta.unsubscribe', + data=dict(subscriber=subscriber.metadata) + ) + + try: + self._publish( + unsubscribe_event, + callback=functools.partial(self._on_unsubscribed, subscriber) + ) + except ftrack_api.exception.EventHubConnectionError: + self.logger.debug(L( + 'Failed to notify server to unsubscribe subscriber {0} as ' + 'server not currently reachable.', subscriber.metadata['id'] + )) + + def _on_unsubscribed(self, subscriber, response): + '''Handle acknowledgement of unsubscribing *subscriber*.''' + if response.get('success') is not True: + self.logger.warning(L( + 'Server failed to unsubscribe subscriber {0}: {1}', + subscriber.metadata['id'], response.get('message') + )) + + def _prepare_event(self, event): + '''Prepare *event* for sending.''' + event['source'].setdefault('id', self.id) + event['source'].setdefault('user', { + 'username': self._api_user + }) + + def _prepare_reply_event(self, event, source_event, source=None): + '''Prepare *event* as a reply to another *source_event*. + + Modify *event*, setting appropriate values to target event correctly as + a reply. + + ''' + event['target'] = 'id={0}'.format(source_event['source']['id']) + event['in_reply_to_event'] = source_event['id'] + if source is not None: + event['source'] = source + + def publish( + self, event, synchronous=False, on_reply=None, on_error='raise' + ): + '''Publish *event*. + + If *synchronous* is specified as True then this method will wait and + return a list of results from any called callbacks. + + .. note:: + + Currently, if synchronous is True then only locally registered + callbacks will be called and no event will be sent to the server. + This may change in future. + + *on_reply* is an optional callable to call with any reply event that is + received in response to the published *event*. + + .. note:: + + Will not be called when *synchronous* is True. + + If *on_error* is set to 'ignore' then errors raised during publish of + event will be caught by this method and ignored. + + ''' + if self._deprecation_warning_auto_connect and not synchronous: + warnings.warn( + self._future_signature_warning, FutureWarning + ) + + try: + return self._publish( + event, synchronous=synchronous, on_reply=on_reply + ) + except Exception: + if on_error == 'ignore': + pass + else: + raise + + def publish_reply(self, source_event, data, source=None): + '''Publish a reply event to *source_event* with supplied *data*. + + If *source* is specified it will be used for the source value of the + sent event. + + ''' + reply_event = ftrack_api.event.base.Event( + 'ftrack.meta.reply', + data=data + ) + self._prepare_reply_event(reply_event, source_event, source=source) + self.publish(reply_event) + + def _publish(self, event, synchronous=False, callback=None, on_reply=None): + '''Publish *event*. + + If *synchronous* is specified as True then this method will wait and + return a list of results from any called callbacks. + + .. note:: + + Currently, if synchronous is True then only locally registered + callbacks will be called and no event will be sent to the server. + This may change in future. + + A *callback* can also be specified. This callback will be called once + the server acknowledges receipt of the sent event. A default callback + that checks for errors from the server will be used if not specified. + + *on_reply* is an optional callable to call with any reply event that is + received in response to the published *event*. Note that there is no + guarantee that a reply will be sent. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not + currently connected. + + ''' + # Prepare event adding any relevant additional information. + self._prepare_event(event) + + if synchronous: + # Bypass emitting event to server and instead call locally + # registered handlers directly, collecting and returning results. + return self._handle(event, synchronous=synchronous) + + if not self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Cannot publish event asynchronously as not connected to ' + 'server.' + ) + + # Use standard callback if none specified. + if callback is None: + callback = functools.partial(self._on_published, event) + + # Emit event to central server for asynchronous processing. + try: + # Register on reply callback if specified. + if on_reply is not None: + # TODO: Add cleanup process that runs after a set duration to + # garbage collect old reply callbacks and prevent dictionary + # growing too large. + self._reply_callbacks[event['id']] = on_reply + + try: + self._emit_event_packet( + self._event_namespace, event, callback=callback + ) + except ftrack_api.exception.EventHubConnectionError: + # Connection may have dropped temporarily. Wait a few moments to + # see if background thread reconnects automatically. + time.sleep(15) + + self._emit_event_packet( + self._event_namespace, event, callback=callback + ) + except: + raise + + except Exception: + # Failure to send event should not cause caller to fail. + # TODO: This behaviour is inconsistent with the failing earlier on + # lack of connection and also with the error handling parameter of + # EventHub.publish. Consider refactoring. + self.logger.exception(L('Error sending event {0}.', event)) + + def _on_published(self, event, response): + '''Handle acknowledgement of published event.''' + if response.get('success', False) is False: + self.logger.error(L( + 'Server responded with error while publishing event {0}. ' + 'Error was: {1}', event, response.get('message') + )) + + def _handle(self, event, synchronous=False): + '''Handle *event*. + + If *synchronous* is True, do not send any automatic reply events. + + ''' + # Sort by priority, lower is higher. + # TODO: Use a sorted list to avoid sorting each time in order to improve + # performance. + subscribers = sorted( + self._subscribers, key=operator.attrgetter('priority') + ) + + results = [] + + target = event.get('target', None) + target_expression = None + if target: + try: + target_expression = self._expression_parser.parse(target) + except Exception: + self.logger.exception(L( + 'Cannot handle event as failed to parse event target ' + 'information: {0}', event + )) + return + + for subscriber in subscribers: + # Check if event is targeted to the subscriber. + if ( + target_expression is not None + and not target_expression.match(subscriber.metadata) + ): + continue + + # Check if subscriber interested in the event. + if not subscriber.interested_in(event): + continue + + response = None + + try: + response = subscriber.callback(event) + results.append(response) + except Exception: + self.logger.exception(L( + 'Error calling subscriber {0} for event {1}.', + subscriber, event + )) + + # Automatically publish a non None response as a reply when not in + # synchronous mode. + if not synchronous: + if self._deprecation_warning_auto_connect: + warnings.warn( + self._future_signature_warning, FutureWarning + ) + + if response is not None: + try: + self.publish_reply( + event, data=response, source=subscriber.metadata + ) + + except Exception: + self.logger.exception(L( + 'Error publishing response {0} from subscriber {1} ' + 'for event {2}.', response, subscriber, event + )) + + # Check whether to continue processing topic event. + if event.is_stopped(): + self.logger.debug(L( + 'Subscriber {0} stopped event {1}. Will not process ' + 'subsequent subscriber callbacks for this event.', + subscriber, event + )) + break + + return results + + def _handle_reply(self, event): + '''Handle reply *event*, passing it to any registered callback.''' + callback = self._reply_callbacks.get(event['in_reply_to_event'], None) + if callback is not None: + callback(event) + + def subscription(self, subscription, callback, subscriber=None, + priority=100): + '''Return context manager with *callback* subscribed to *subscription*. + + The subscribed callback will be automatically unsubscribed on exit + of the context manager. + + ''' + return _SubscriptionContext( + self, subscription, callback, subscriber=subscriber, + priority=priority, + ) + + # Socket.IO interface. + # + + def _get_socket_io_session(self): + '''Connect to server and retrieve session information.''' + socket_io_url = ( + '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' + ).format( + self.server.scheme, + self.get_network_location(), + self._api_user, + self._api_key + ) + try: + response = requests.get( + socket_io_url, + timeout=60 # 60 seconds timeout to recieve errors faster. + ) + except requests.exceptions.Timeout as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Timed out connecting to server: {0}.'.format(error) + ) + except requests.exceptions.SSLError as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to negotiate SSL with server: {0}.'.format(error) + ) + except requests.exceptions.ConnectionError as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to connect to server: {0}.'.format(error) + ) + else: + status = response.status_code + if status != 200: + raise ftrack_api.exception.EventHubConnectionError( + 'Received unexpected status code {0}.'.format(status) + ) + + # Parse result and return session information. + parts = response.text.split(':') + return SocketIoSession( + parts[0], + parts[1], + parts[3].split(',') + ) + + def _add_packet_callback(self, callback): + '''Store callback against a new unique packet ID. + + Return the unique packet ID. + + ''' + with self._lock: + self._unique_packet_id += 1 + unique_identifier = self._unique_packet_id + + self._packet_callbacks[unique_identifier] = callback + + return '{0}+'.format(unique_identifier) + + def _pop_packet_callback(self, packet_identifier): + '''Pop and return callback for *packet_identifier*.''' + return self._packet_callbacks.pop(packet_identifier) + + def _emit_event_packet(self, namespace, event, callback): + '''Send *event* packet under *namespace*.''' + data = self._encode( + dict(name=namespace, args=[event]) + ) + self._send_packet( + self._code_name_mapping['event'], data=data, callback=callback + ) + + def _acknowledge_packet(self, packet_identifier, *args): + '''Send acknowledgement of packet with *packet_identifier*.''' + packet_identifier = packet_identifier.rstrip('+') + data = str(packet_identifier) + if args: + data += '+{1}'.format(self._encode(args)) + + self._send_packet(self._code_name_mapping['acknowledge'], data=data) + + def _send_packet(self, code, data='', callback=None): + '''Send packet via connection.''' + path = '' + packet_identifier = ( + self._add_packet_callback(callback) if callback else '' + ) + packet_parts = (str(code), packet_identifier, path, data) + packet = ':'.join(packet_parts) + + try: + self._connection.send(packet) + self.logger.debug(L(u'Sent packet: {0}', packet)) + except socket.error as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to send packet: {0}'.format(error) + ) + + def _receive_packet(self): + '''Receive and return packet via connection.''' + try: + packet = self._connection.recv() + except Exception as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Error receiving packet: {0}'.format(error) + ) + + try: + parts = packet.split(':', 3) + except AttributeError: + raise ftrack_api.exception.EventHubPacketError( + 'Received invalid packet {0}'.format(packet) + ) + + code, packet_identifier, path, data = None, None, None, None + + count = len(parts) + if count == 4: + code, packet_identifier, path, data = parts + elif count == 3: + code, packet_identifier, path = parts + elif count == 1: + code = parts[0] + else: + raise ftrack_api.exception.EventHubPacketError( + 'Received invalid packet {0}'.format(packet) + ) + + self.logger.debug(L('Received packet: {0}', packet)) + return code, packet_identifier, path, data + + def _handle_packet(self, code, packet_identifier, path, data): + '''Handle packet received from server.''' + code_name = self._code_name_mapping[code] + + if code_name == 'connect': + self.logger.debug('Connected to event server.') + event = ftrack_api.event.base.Event('ftrack.meta.connected') + self._prepare_event(event) + self._event_queue.put(event) + + elif code_name == 'disconnect': + self.logger.debug('Disconnected from event server.') + if not self._intentional_disconnect: + self.logger.debug( + 'Disconnected unexpectedly. Attempting to reconnect.' + ) + try: + self.reconnect( + attempts=self._auto_reconnect_attempts, + delay=self._auto_reconnect_delay + ) + except ftrack_api.exception.EventHubConnectionError: + self.logger.debug('Failed to reconnect automatically.') + else: + self.logger.debug('Reconnected successfully.') + + if not self.connected: + event = ftrack_api.event.base.Event('ftrack.meta.disconnected') + self._prepare_event(event) + self._event_queue.put(event) + + elif code_name == 'heartbeat': + # Reply with heartbeat. + self._send_packet(self._code_name_mapping['heartbeat']) + + elif code_name == 'message': + self.logger.debug(L('Message received: {0}', data)) + + elif code_name == 'event': + payload = self._decode(data) + args = payload.get('args', []) + + if len(args) == 1: + event_payload = args[0] + if isinstance(event_payload, collections.Mapping): + try: + event = ftrack_api.event.base.Event(**event_payload) + except Exception: + self.logger.exception(L( + 'Failed to convert payload into event: {0}', + event_payload + )) + return + + self._event_queue.put(event) + + elif code_name == 'acknowledge': + parts = data.split('+', 1) + acknowledged_packet_identifier = int(parts[0]) + args = [] + if len(parts) == 2: + args = self._decode(parts[1]) + + try: + callback = self._pop_packet_callback( + acknowledged_packet_identifier + ) + except KeyError: + pass + else: + callback(*args) + + elif code_name == 'error': + self.logger.error(L('Event server reported error: {0}.', data)) + + else: + self.logger.debug(L('{0}: {1}', code_name, data)) + + def _encode(self, data): + '''Return *data* encoded as JSON formatted string.''' + return json.dumps( + data, + default=self._encode_object_hook, + ensure_ascii=False + ) + + def _encode_object_hook(self, item): + '''Return *item* transformed for encoding.''' + if isinstance(item, ftrack_api.event.base.Event): + # Convert to dictionary for encoding. + item = dict(**item) + + if 'in_reply_to_event' in item: + # Convert keys to server convention. + item['inReplyToEvent'] = item.pop('in_reply_to_event') + + return item + + raise TypeError('{0!r} is not JSON serializable'.format(item)) + + def _decode(self, string): + '''Return decoded JSON *string* as Python object.''' + return json.loads(string, object_hook=self._decode_object_hook) + + def _decode_object_hook(self, item): + '''Return *item* transformed.''' + if isinstance(item, collections.Mapping): + if 'inReplyToEvent' in item: + item['in_reply_to_event'] = item.pop('inReplyToEvent') + + return item + + +class _SubscriptionContext(object): + '''Context manager for a one-off subscription.''' + + def __init__(self, hub, subscription, callback, subscriber, priority): + '''Initialise context.''' + self._hub = hub + self._subscription = subscription + self._callback = callback + self._subscriber = subscriber + self._priority = priority + self._subscriberIdentifier = None + + def __enter__(self): + '''Enter context subscribing callback to topic.''' + self._subscriberIdentifier = self._hub.subscribe( + self._subscription, self._callback, subscriber=self._subscriber, + priority=self._priority + ) + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit context unsubscribing callback from topic.''' + self._hub.unsubscribe(self._subscriberIdentifier) + + +class _ProcessorThread(threading.Thread): + '''Process messages from server.''' + + daemon = True + + def __init__(self, client): + '''Initialise thread with Socket.IO *client* instance.''' + super(_ProcessorThread, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.client = client + self.done = threading.Event() + + def run(self): + '''Perform work in thread.''' + while not self.done.is_set(): + try: + code, packet_identifier, path, data = self.client._receive_packet() + self.client._handle_packet(code, packet_identifier, path, data) + + except ftrack_api.exception.EventHubPacketError as error: + self.logger.debug(L('Ignoring invalid packet: {0}', error)) + continue + + except ftrack_api.exception.EventHubConnectionError: + self.cancel() + + # Fake a disconnection event in order to trigger reconnection + # when necessary. + self.client._handle_packet('0', '', '', '') + + break + + except Exception as error: + self.logger.debug(L('Aborting processor thread: {0}', error)) + self.cancel() + break + + def cancel(self): + '''Cancel work as soon as possible.''' + self.done.set() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py new file mode 100644 index 00000000000..0d38463aaf7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py @@ -0,0 +1,27 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import ftrack_api.event.subscription + + +class Subscriber(object): + '''Represent event subscriber.''' + + def __init__(self, subscription, callback, metadata, priority): + '''Initialise subscriber.''' + self.subscription = ftrack_api.event.subscription.Subscription( + subscription + ) + self.callback = callback + self.metadata = metadata + self.priority = priority + + def __str__(self): + '''Return string representation.''' + return '<{0} metadata={1} subscription="{2}">'.format( + self.__class__.__name__, self.metadata, self.subscription + ) + + def interested_in(self, event): + '''Return whether subscriber interested in *event*.''' + return self.subscription.includes(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py new file mode 100644 index 00000000000..0b208d9977c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py @@ -0,0 +1,23 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import ftrack_api.event.expression + + +class Subscription(object): + '''Represent a subscription.''' + + parser = ftrack_api.event.expression.Parser() + + def __init__(self, subscription): + '''Initialise with *subscription*.''' + self._subscription = subscription + self._expression = self.parser.parse(subscription) + + def __str__(self): + '''Return string representation.''' + return self._subscription + + def includes(self, event): + '''Return whether subscription includes *event*.''' + return self._expression.match(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py new file mode 100644 index 00000000000..8a2eb9bc041 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py @@ -0,0 +1,392 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import sys +import traceback + +import ftrack_api.entity.base + + +class Error(Exception): + '''ftrack specific error.''' + + default_message = 'Unspecified error occurred.' + + def __init__(self, message=None, details=None): + '''Initialise exception with *message*. + + If *message* is None, the class 'default_message' will be used. + + *details* should be a mapping of extra information that can be used in + the message and also to provide more context. + + ''' + if message is None: + message = self.default_message + + self.message = message + self.details = details + if self.details is None: + self.details = {} + + self.traceback = traceback.format_exc() + + def __str__(self): + '''Return string representation.''' + keys = {} + for key, value in self.details.iteritems(): + if isinstance(value, unicode): + value = value.encode(sys.getfilesystemencoding()) + keys[key] = value + + return str(self.message.format(**keys)) + + +class AuthenticationError(Error): + '''Raise when an authentication error occurs.''' + + default_message = 'Authentication error.' + + +class ServerError(Error): + '''Raise when the server reports an error.''' + + default_message = 'Server reported error processing request.' + + +class ServerCompatibilityError(ServerError): + '''Raise when server appears incompatible.''' + + default_message = 'Server incompatible.' + + +class NotFoundError(Error): + '''Raise when something that should exist is not found.''' + + default_message = 'Not found.' + + +class NotUniqueError(Error): + '''Raise when unique value required and duplicate detected.''' + + default_message = 'Non-unique value detected.' + + +class IncorrectResultError(Error): + '''Raise when a result is incorrect.''' + + default_message = 'Incorrect result detected.' + + +class NoResultFoundError(IncorrectResultError): + '''Raise when a result was expected but no result was found.''' + + default_message = 'Expected result, but no result was found.' + + +class MultipleResultsFoundError(IncorrectResultError): + '''Raise when a single result expected, but multiple results found.''' + + default_message = 'Expected single result, but received multiple results.' + + +class EntityTypeError(Error): + '''Raise when an entity type error occurs.''' + + default_message = 'Entity type error.' + + +class UnrecognisedEntityTypeError(EntityTypeError): + '''Raise when an unrecognised entity type detected.''' + + default_message = 'Entity type "{entity_type}" not recognised.' + + def __init__(self, entity_type, **kw): + '''Initialise with *entity_type* that is unrecognised.''' + kw.setdefault('details', {}).update(dict( + entity_type=entity_type + )) + super(UnrecognisedEntityTypeError, self).__init__(**kw) + + +class OperationError(Error): + '''Raise when an operation error occurs.''' + + default_message = 'Operation error.' + + +class InvalidStateError(Error): + '''Raise when an invalid state detected.''' + + default_message = 'Invalid state.' + + +class InvalidStateTransitionError(InvalidStateError): + '''Raise when an invalid state transition detected.''' + + default_message = ( + 'Invalid transition from {current_state!r} to {target_state!r} state ' + 'for entity {entity!r}' + ) + + def __init__(self, current_state, target_state, entity, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + current_state=current_state, + target_state=target_state, + entity=entity + )) + super(InvalidStateTransitionError, self).__init__(**kw) + + +class AttributeError(Error): + '''Raise when an error related to an attribute occurs.''' + + default_message = 'Attribute error.' + + +class ImmutableAttributeError(AttributeError): + '''Raise when modification of immutable attribute attempted.''' + + default_message = ( + 'Cannot modify value of immutable {attribute.name!r} attribute.' + ) + + def __init__(self, attribute, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + attribute=attribute + )) + super(ImmutableAttributeError, self).__init__(**kw) + + +class CollectionError(Error): + '''Raise when an error related to collections occurs.''' + + default_message = 'Collection error.' + + def __init__(self, collection, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + collection=collection + )) + super(CollectionError, self).__init__(**kw) + + +class ImmutableCollectionError(CollectionError): + '''Raise when modification of immutable collection attempted.''' + + default_message = ( + 'Cannot modify value of immutable collection {collection!r}.' + ) + + +class DuplicateItemInCollectionError(CollectionError): + '''Raise when duplicate item in collection detected.''' + + default_message = ( + 'Item {item!r} already exists in collection {collection!r}.' + ) + + def __init__(self, item, collection, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + item=item + )) + super(DuplicateItemInCollectionError, self).__init__(collection, **kw) + + +class ParseError(Error): + '''Raise when a parsing error occurs.''' + + default_message = 'Failed to parse.' + + +class EventHubError(Error): + '''Raise when issues related to event hub occur.''' + + default_message = 'Event hub error occurred.' + + +class EventHubConnectionError(EventHubError): + '''Raise when event hub encounters connection problem.''' + + default_message = 'Event hub is not connected.' + + +class EventHubPacketError(EventHubError): + '''Raise when event hub encounters an issue with a packet.''' + + default_message = 'Invalid packet.' + + +class PermissionDeniedError(Error): + '''Raise when permission is denied.''' + + default_message = 'Permission denied.' + + +class LocationError(Error): + '''Base for errors associated with locations.''' + + default_message = 'Unspecified location error' + + +class ComponentNotInAnyLocationError(LocationError): + '''Raise when component not available in any location.''' + + default_message = 'Component not available in any location.' + + +class ComponentNotInLocationError(LocationError): + '''Raise when component(s) not in location.''' + + default_message = ( + 'Component(s) {formatted_components} not found in location {location}.' + ) + + def __init__(self, components, location, **kw): + '''Initialise with *components* and *location*.''' + if isinstance(components, ftrack_api.entity.base.Entity): + components = [components] + + kw.setdefault('details', {}).update(dict( + components=components, + formatted_components=', '.join( + [str(component) for component in components] + ), + location=location + )) + + super(ComponentNotInLocationError, self).__init__(**kw) + + +class ComponentInLocationError(LocationError): + '''Raise when component(s) already exists in location.''' + + default_message = ( + 'Component(s) {formatted_components} already exist in location ' + '{location}.' + ) + + def __init__(self, components, location, **kw): + '''Initialise with *components* and *location*.''' + if isinstance(components, ftrack_api.entity.base.Entity): + components = [components] + + kw.setdefault('details', {}).update(dict( + components=components, + formatted_components=', '.join( + [str(component) for component in components] + ), + location=location + )) + + super(ComponentInLocationError, self).__init__(**kw) + + +class AccessorError(Error): + '''Base for errors associated with accessors.''' + + default_message = 'Unspecified accessor error' + + +class AccessorOperationFailedError(AccessorError): + '''Base for failed operations on accessors.''' + + default_message = 'Operation {operation} failed: {error}' + + def __init__( + self, operation='', resource_identifier=None, error=None, **kw + ): + kw.setdefault('details', {}).update(dict( + operation=operation, + resource_identifier=resource_identifier, + error=error + )) + super(AccessorOperationFailedError, self).__init__(**kw) + + +class AccessorUnsupportedOperationError(AccessorOperationFailedError): + '''Raise when operation is unsupported.''' + + default_message = 'Operation {operation} unsupported.' + + +class AccessorPermissionDeniedError(AccessorOperationFailedError): + '''Raise when permission denied.''' + + default_message = ( + 'Cannot {operation} {resource_identifier}. Permission denied.' + ) + + +class AccessorResourceIdentifierError(AccessorError): + '''Raise when a error related to a resource_identifier occurs.''' + + default_message = 'Resource identifier is invalid: {resource_identifier}.' + + def __init__(self, resource_identifier, **kw): + kw.setdefault('details', {}).update(dict( + resource_identifier=resource_identifier + )) + super(AccessorResourceIdentifierError, self).__init__(**kw) + + +class AccessorFilesystemPathError(AccessorResourceIdentifierError): + '''Raise when a error related to an accessor filesystem path occurs.''' + + default_message = ( + 'Could not determine filesystem path from resource identifier: ' + '{resource_identifier}.' + ) + + +class AccessorResourceError(AccessorError): + '''Base for errors associated with specific resource.''' + + default_message = 'Unspecified resource error: {resource_identifier}' + + def __init__(self, operation='', resource_identifier=None, error=None, + **kw): + kw.setdefault('details', {}).update(dict( + operation=operation, + resource_identifier=resource_identifier + )) + super(AccessorResourceError, self).__init__(**kw) + + +class AccessorResourceNotFoundError(AccessorResourceError): + '''Raise when a required resource is not found.''' + + default_message = 'Resource not found: {resource_identifier}' + + +class AccessorParentResourceNotFoundError(AccessorResourceError): + '''Raise when a parent resource (such as directory) is not found.''' + + default_message = 'Parent resource is missing: {resource_identifier}' + + +class AccessorResourceInvalidError(AccessorResourceError): + '''Raise when a resource is not the right type.''' + + default_message = 'Resource invalid: {resource_identifier}' + + +class AccessorContainerNotEmptyError(AccessorResourceError): + '''Raise when container is not empty.''' + + default_message = 'Container is not empty: {resource_identifier}' + + +class StructureError(Error): + '''Base for errors associated with structures.''' + + default_message = 'Unspecified structure error' + + +class ConnectionClosedError(Error): + '''Raise when attempt to use closed connection detected.''' + + default_message = "Connection closed." diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py new file mode 100644 index 00000000000..c282fcc8141 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py @@ -0,0 +1,131 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import termcolor + +import ftrack_api.entity.base +import ftrack_api.collection +import ftrack_api.symbol +import ftrack_api.inspection + + +#: Useful filters to pass to :func:`format`.` +FILTER = { + 'ignore_unset': ( + lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET + ) +} + + +def format( + entity, formatters=None, attribute_filter=None, recursive=False, + indent=0, indent_first_line=True, _seen=None +): + '''Return formatted string representing *entity*. + + *formatters* can be used to customise formatting of elements. It should be a + mapping with one or more of the following keys: + + * header - Used to format entity type. + * label - Used to format attribute names. + + Specify an *attribute_filter* to control which attributes to include. By + default all attributes are included. The *attribute_filter* should be a + callable that accepts `(entity, attribute_name, attribute_value)` and + returns True if the attribute should be included in the output. For example, + to filter out all unset values:: + + attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] + + If *recursive* is True then recurse into Collections and format each entity + present. + + *indent* specifies the overall indentation in spaces of the formatted text, + whilst *indent_first_line* determines whether to apply that indent to the + first generated line. + + .. warning:: + + Iterates over all *entity* attributes which may cause multiple queries + to the server. Turn off auto populating in the session to prevent this. + + ''' + # Initialise default formatters. + if formatters is None: + formatters = dict() + + formatters.setdefault( + 'header', lambda text: termcolor.colored( + text, 'white', 'on_blue', attrs=['bold'] + ) + ) + formatters.setdefault( + 'label', lambda text: termcolor.colored( + text, 'blue', attrs=['bold'] + ) + ) + + # Determine indents. + spacer = ' ' * indent + if indent_first_line: + first_line_spacer = spacer + else: + first_line_spacer = '' + + # Avoid infinite recursion on circular references. + if _seen is None: + _seen = set() + + identifier = str(ftrack_api.inspection.identity(entity)) + if identifier in _seen: + return ( + first_line_spacer + + formatters['header'](entity.entity_type) + '{...}' + ) + + _seen.add(identifier) + information = list() + + information.append( + first_line_spacer + formatters['header'](entity.entity_type) + ) + for key, value in sorted(entity.items()): + if attribute_filter is not None: + if not attribute_filter(entity, key, value): + continue + + child_indent = indent + len(key) + 3 + + if isinstance(value, ftrack_api.entity.base.Entity): + value = format( + value, + formatters=formatters, + attribute_filter=attribute_filter, + recursive=recursive, + indent=child_indent, + indent_first_line=False, + _seen=_seen.copy() + ) + + if isinstance(value, ftrack_api.collection.Collection): + if recursive: + child_values = [] + for index, child in enumerate(value): + child_value = format( + child, + formatters=formatters, + attribute_filter=attribute_filter, + recursive=recursive, + indent=child_indent, + indent_first_line=index != 0, + _seen=_seen.copy() + ) + child_values.append(child_value) + + value = '\n'.join(child_values) + + information.append( + spacer + u' {0}: {1}'.format(formatters['label'](key), value) + ) + + return '\n'.join(information) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py new file mode 100644 index 00000000000..d8b815200ec --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py @@ -0,0 +1,135 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import collections + +import ftrack_api.symbol +import ftrack_api.operation + + +def identity(entity): + '''Return unique identity of *entity*.''' + return ( + str(entity.entity_type), + primary_key(entity).values() + ) + + +def primary_key(entity): + '''Return primary key of *entity* as an ordered mapping of {field: value}. + + To get just the primary key values:: + + primary_key(entity).values() + + ''' + primary_key = collections.OrderedDict() + for name in entity.primary_key_attributes: + value = entity[name] + if value is ftrack_api.symbol.NOT_SET: + raise KeyError( + 'Missing required value for primary key attribute "{0}" on ' + 'entity {1!r}.'.format(name, entity) + ) + + primary_key[str(name)] = str(value) + + return primary_key + + +def _state(operation, state): + '''Return state following *operation* against current *state*.''' + if ( + isinstance( + operation, ftrack_api.operation.CreateEntityOperation + ) + and state is ftrack_api.symbol.NOT_SET + ): + state = ftrack_api.symbol.CREATED + + elif ( + isinstance( + operation, ftrack_api.operation.UpdateEntityOperation + ) + and state is ftrack_api.symbol.NOT_SET + ): + state = ftrack_api.symbol.MODIFIED + + elif isinstance( + operation, ftrack_api.operation.DeleteEntityOperation + ): + state = ftrack_api.symbol.DELETED + + return state + + +def state(entity): + '''Return current *entity* state. + + .. seealso:: :func:`ftrack_api.inspection.states`. + + ''' + value = ftrack_api.symbol.NOT_SET + + for operation in entity.session.recorded_operations: + # Determine if operation refers to an entity and whether that entity + # is *entity*. + if ( + isinstance( + operation, + ( + ftrack_api.operation.CreateEntityOperation, + ftrack_api.operation.UpdateEntityOperation, + ftrack_api.operation.DeleteEntityOperation + ) + ) + and operation.entity_type == entity.entity_type + and operation.entity_key == primary_key(entity) + ): + value = _state(operation, value) + + return value + + +def states(entities): + '''Return current states of *entities*. + + An optimised function for determining states of multiple entities in one + go. + + .. note:: + + All *entities* should belong to the same session. + + .. seealso:: :func:`ftrack_api.inspection.state`. + + ''' + if not entities: + return [] + + session = entities[0].session + + entities_by_identity = collections.OrderedDict() + for entity in entities: + key = (entity.entity_type, str(primary_key(entity).values())) + entities_by_identity[key] = ftrack_api.symbol.NOT_SET + + for operation in session.recorded_operations: + if ( + isinstance( + operation, + ( + ftrack_api.operation.CreateEntityOperation, + ftrack_api.operation.UpdateEntityOperation, + ftrack_api.operation.DeleteEntityOperation + ) + ) + ): + key = (operation.entity_type, str(operation.entity_key.values())) + if key not in entities_by_identity: + continue + + value = _state(operation, entities_by_identity[key]) + entities_by_identity[key] = value + + return entities_by_identity.values() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py new file mode 100644 index 00000000000..41969c5b2a8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py @@ -0,0 +1,43 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + +import functools +import warnings + + +def deprecation_warning(message): + def decorator(function): + @functools.wraps(function) + def wrapper(*args, **kwargs): + warnings.warn( + message, + PendingDeprecationWarning + ) + return function(*args, **kwargs) + return wrapper + + return decorator + + +class LazyLogMessage(object): + '''A log message that can be evaluated lazily for improved performance. + + Example:: + + # Formatting of string will not occur unless debug logging enabled. + logger.debug(LazyLogMessage( + 'Hello {0}', 'world' + )) + + ''' + + def __init__(self, message, *args, **kwargs): + '''Initialise with *message* format string and arguments.''' + self.message = message + self.args = args + self.kwargs = kwargs + + def __str__(self): + '''Return string representation.''' + return self.message.format(*self.args, **self.kwargs) + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py new file mode 100644 index 00000000000..bb3bb4ee2c9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py @@ -0,0 +1,115 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import copy + + +class Operations(object): + '''Stack of operations.''' + + def __init__(self): + '''Initialise stack.''' + self._stack = [] + super(Operations, self).__init__() + + def clear(self): + '''Clear all operations.''' + del self._stack[:] + + def push(self, operation): + '''Push *operation* onto stack.''' + self._stack.append(operation) + + def pop(self): + '''Pop and return most recent operation from stack.''' + return self._stack.pop() + + def __len__(self): + '''Return count of operations.''' + return len(self._stack) + + def __iter__(self): + '''Return iterator over operations.''' + return iter(self._stack) + + +class Operation(object): + '''Represent an operation.''' + + +class CreateEntityOperation(Operation): + '''Represent create entity operation.''' + + def __init__(self, entity_type, entity_key, entity_data): + '''Initialise operation. + + *entity_type* should be the type of entity in string form (as returned + from :attr:`ftrack_api.entity.base.Entity.entity_type`). + + *entity_key* should be the unique key for the entity and should follow + the form returned from :func:`ftrack_api.inspection.primary_key`. + + *entity_data* should be a mapping of the initial data to populate the + entity with when creating. + + .. note:: + + Shallow copies will be made of each value in *entity_data*. + + ''' + super(CreateEntityOperation, self).__init__() + self.entity_type = entity_type + self.entity_key = entity_key + self.entity_data = {} + for key, value in entity_data.items(): + self.entity_data[key] = copy.copy(value) + + +class UpdateEntityOperation(Operation): + '''Represent update entity operation.''' + + def __init__( + self, entity_type, entity_key, attribute_name, old_value, new_value + ): + '''Initialise operation. + + *entity_type* should be the type of entity in string form (as returned + from :attr:`ftrack_api.entity.base.Entity.entity_type`). + + *entity_key* should be the unique key for the entity and should follow + the form returned from :func:`ftrack_api.inspection.primary_key`. + + *attribute_name* should be the string name of the attribute being + modified and *old_value* and *new_value* should reflect the change in + value. + + .. note:: + + Shallow copies will be made of both *old_value* and *new_value*. + + ''' + super(UpdateEntityOperation, self).__init__() + self.entity_type = entity_type + self.entity_key = entity_key + self.attribute_name = attribute_name + self.old_value = copy.copy(old_value) + self.new_value = copy.copy(new_value) + + +class DeleteEntityOperation(Operation): + '''Represent delete entity operation.''' + + def __init__(self, entity_type, entity_key): + '''Initialise operation. + + *entity_type* should be the type of entity in string form (as returned + from :attr:`ftrack_api.entity.base.Entity.entity_type`). + + *entity_key* should be the unique key for the entity and should follow + the form returned from :func:`ftrack_api.inspection.primary_key`. + + ''' + super(DeleteEntityOperation, self).__init__() + self.entity_type = entity_type + self.entity_key = entity_key + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py new file mode 100644 index 00000000000..2c7a9a45009 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py @@ -0,0 +1,121 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import logging +import os +import uuid +import imp +import inspect + + +def discover(paths, positional_arguments=None, keyword_arguments=None): + '''Find and load plugins in search *paths*. + + Each discovered module should implement a register function that accepts + *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs + respectively. + + If a register function does not accept variable arguments, then attempt to + only pass accepted arguments to the function by inspecting its signature. + + ''' + logger = logging.getLogger(__name__ + '.discover') + + if positional_arguments is None: + positional_arguments = [] + + if keyword_arguments is None: + keyword_arguments = {} + + for path in paths: + # Ignore empty paths that could resolve to current directory. + path = path.strip() + if not path: + continue + + for base, directories, filenames in os.walk(path): + for filename in filenames: + name, extension = os.path.splitext(filename) + if extension != '.py': + continue + + module_path = os.path.join(base, filename) + unique_name = uuid.uuid4().hex + + try: + module = imp.load_source(unique_name, module_path) + except Exception as error: + logger.warning( + 'Failed to load plugin from "{0}": {1}' + .format(module_path, error) + ) + continue + + try: + module.register + except AttributeError: + logger.warning( + 'Failed to load plugin that did not define a ' + '"register" function at the module level: {0}' + .format(module_path) + ) + else: + # Attempt to only pass arguments that are accepted by the + # register function. + specification = inspect.getargspec(module.register) + + selected_positional_arguments = positional_arguments + selected_keyword_arguments = keyword_arguments + + if ( + not specification.varargs and + len(positional_arguments) > len(specification.args) + ): + logger.warning( + 'Culling passed arguments to match register ' + 'function signature.' + ) + + selected_positional_arguments = positional_arguments[ + len(specification.args): + ] + selected_keyword_arguments = {} + + elif not specification.keywords: + # Remove arguments that have been passed as positionals. + remainder = specification.args[ + len(positional_arguments): + ] + + # Determine remaining available keyword arguments. + defined_keyword_arguments = [] + if specification.defaults: + defined_keyword_arguments = specification.args[ + -len(specification.defaults): + ] + + remaining_keyword_arguments = set([ + keyword_argument for keyword_argument + in defined_keyword_arguments + if keyword_argument in remainder + ]) + + if not set(keyword_arguments.keys()).issubset( + remaining_keyword_arguments + ): + logger.warning( + 'Culling passed arguments to match register ' + 'function signature.' + ) + selected_keyword_arguments = { + key: value + for key, value in keyword_arguments.items() + if key in remaining_keyword_arguments + } + + module.register( + *selected_positional_arguments, + **selected_keyword_arguments + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py new file mode 100644 index 00000000000..ea101a29d4a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py @@ -0,0 +1,202 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import re +import collections + +import ftrack_api.exception + + +class QueryResult(collections.Sequence): + '''Results from a query.''' + + OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') + LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') + + def __init__(self, session, expression, page_size=500): + '''Initialise result set. + + *session* should be an instance of :class:`ftrack_api.session.Session` + that will be used for executing the query *expression*. + + *page_size* should be an integer specifying the maximum number of + records to fetch in one request allowing the results to be fetched + incrementally in a transparent manner for optimal performance. Any + offset or limit specified in *expression* are honoured for final result + set, but intermediate queries may be issued with different offsets and + limits in order to fetch pages. When an embedded limit is smaller than + the given *page_size* it will be used instead and no paging will take + place. + + .. warning:: + + Setting *page_size* to a very large amount may negatively impact + performance of not only the caller, but the server in general. + + ''' + super(QueryResult, self).__init__() + self._session = session + self._results = [] + + ( + self._expression, + self._offset, + self._limit + ) = self._extract_offset_and_limit(expression) + + self._page_size = page_size + if self._limit is not None and self._limit < self._page_size: + # Optimise case where embedded limit is less than fetching a + # single page. + self._page_size = self._limit + + self._next_offset = self._offset + if self._next_offset is None: + # Initialise with zero offset. + self._next_offset = 0 + + def _extract_offset_and_limit(self, expression): + '''Process *expression* extracting offset and limit. + + Return (expression, offset, limit). + + ''' + offset = None + match = self.OFFSET_EXPRESSION.search(expression) + if match: + offset = int(match.group('value')) + expression = ( + expression[:match.start('offset')] + + expression[match.end('offset'):] + ) + + limit = None + match = self.LIMIT_EXPRESSION.search(expression) + if match: + limit = int(match.group('value')) + expression = ( + expression[:match.start('limit')] + + expression[match.end('limit'):] + ) + + return expression.strip(), offset, limit + + def __getitem__(self, index): + '''Return value at *index*.''' + while self._can_fetch_more() and index >= len(self._results): + self._fetch_more() + + return self._results[index] + + def __len__(self): + '''Return number of items.''' + while self._can_fetch_more(): + self._fetch_more() + + return len(self._results) + + def _can_fetch_more(self): + '''Return whether more results are available to fetch.''' + return self._next_offset is not None + + def _fetch_more(self): + '''Fetch next page of results if available.''' + if not self._can_fetch_more(): + return + + expression = '{0} offset {1} limit {2}'.format( + self._expression, self._next_offset, self._page_size + ) + records, metadata = self._session._query(expression) + self._results.extend(records) + + if self._limit is not None and (len(self._results) >= self._limit): + # Original limit reached. + self._next_offset = None + del self._results[self._limit:] + else: + # Retrieve next page offset from returned metadata. + self._next_offset = metadata.get('next', {}).get('offset', None) + + def all(self): + '''Fetch and return all data.''' + return list(self) + + def one(self): + '''Return exactly one single result from query by applying a limit. + + Raise :exc:`ValueError` if an existing limit is already present in the + expression. + + Raise :exc:`ValueError` if an existing offset is already present in the + expression as offset is inappropriate when expecting a single item. + + Raise :exc:`~ftrack_api.exception.MultipleResultsFoundError` if more + than one result was available or + :exc:`~ftrack_api.exception.NoResultFoundError` if no results were + available. + + .. note:: + + Both errors subclass + :exc:`~ftrack_api.exception.IncorrectResultError` if you want to + catch only one error type. + + ''' + expression = self._expression + + if self._limit is not None: + raise ValueError( + 'Expression already contains a limit clause.' + ) + + if self._offset is not None: + raise ValueError( + 'Expression contains an offset clause which does not make ' + 'sense when selecting a single item.' + ) + + # Apply custom limit as optimisation. A limit of 2 is used rather than + # 1 so that it is possible to test for multiple matching entries + # case. + expression += ' limit 2' + + results, metadata = self._session._query(expression) + + if not results: + raise ftrack_api.exception.NoResultFoundError() + + if len(results) != 1: + raise ftrack_api.exception.MultipleResultsFoundError() + + return results[0] + + def first(self): + '''Return first matching result from query by applying a limit. + + Raise :exc:`ValueError` if an existing limit is already present in the + expression. + + If no matching result available return None. + + ''' + expression = self._expression + + if self._limit is not None: + raise ValueError( + 'Expression already contains a limit clause.' + ) + + # Apply custom offset if present. + if self._offset is not None: + expression += ' offset {0}'.format(self._offset) + + # Apply custom limit as optimisation. + expression += ' limit 1' + + results, metadata = self._session._query(expression) + + if results: + return results[0] + + return None diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py new file mode 100644 index 00000000000..1aab07ed77a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py new file mode 100644 index 00000000000..ee069b57b68 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py @@ -0,0 +1,50 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + + +class ResourceIdentifierTransformer(object): + '''Transform resource identifiers. + + Provide ability to modify resource identifier before it is stored centrally + (:meth:`encode`), or after it has been retrieved, but before it is used + locally (:meth:`decode`). + + For example, you might want to decompose paths into a set of key, value + pairs to store centrally and then compose a path from those values when + reading back. + + .. note:: + + This is separate from any transformations an + :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted + towards common transformations. + + ''' + + def __init__(self, session): + '''Initialise resource identifier transformer. + + *session* should be the :class:`ftrack_api.session.Session` instance + to use for communication with the server. + + ''' + self.session = session + super(ResourceIdentifierTransformer, self).__init__() + + def encode(self, resource_identifier, context=None): + '''Return encoded *resource_identifier* for storing centrally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return resource_identifier + + def decode(self, resource_identifier, context=None): + '''Return decoded *resource_identifier* for use locally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py new file mode 100644 index 00000000000..1a5da444324 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py @@ -0,0 +1,2515 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import json +import logging +import collections +import datetime +import os +import getpass +import functools +import itertools +import distutils.version +import hashlib +import tempfile +import threading +import atexit +import warnings + +import requests +import requests.auth +import arrow +import clique + +import ftrack_api +import ftrack_api.exception +import ftrack_api.entity.factory +import ftrack_api.entity.base +import ftrack_api.entity.location +import ftrack_api.cache +import ftrack_api.symbol +import ftrack_api.query +import ftrack_api.attribute +import ftrack_api.collection +import ftrack_api.event.hub +import ftrack_api.event.base +import ftrack_api.plugin +import ftrack_api.inspection +import ftrack_api.operation +import ftrack_api.accessor.disk +import ftrack_api.structure.origin +import ftrack_api.structure.entity_id +import ftrack_api.accessor.server +import ftrack_api._centralized_storage_scenario +import ftrack_api.logging +from ftrack_api.logging import LazyLogMessage as L + +try: + from weakref import WeakMethod +except ImportError: + from ftrack_api._weakref import WeakMethod + + +class SessionAuthentication(requests.auth.AuthBase): + '''Attach ftrack session authentication information to requests.''' + + def __init__(self, api_key, api_user): + '''Initialise with *api_key* and *api_user*.''' + self.api_key = api_key + self.api_user = api_user + super(SessionAuthentication, self).__init__() + + def __call__(self, request): + '''Modify *request* to have appropriate headers.''' + request.headers.update({ + 'ftrack-api-key': self.api_key, + 'ftrack-user': self.api_user + }) + return request + + +class Session(object): + '''An isolated session for interaction with an ftrack server.''' + + def __init__( + self, server_url=None, api_key=None, api_user=None, auto_populate=True, + plugin_paths=None, cache=None, cache_key_maker=None, + auto_connect_event_hub=None, schema_cache_path=None, + plugin_arguments=None + ): + '''Initialise session. + + *server_url* should be the URL of the ftrack server to connect to + including any port number. If not specified attempt to look up from + :envvar:`FTRACK_SERVER`. + + *api_key* should be the API key to use for authentication whilst + *api_user* should be the username of the user in ftrack to record + operations against. If not specified, *api_key* should be retrieved + from :envvar:`FTRACK_API_KEY` and *api_user* from + :envvar:`FTRACK_API_USER`. + + If *auto_populate* is True (the default), then accessing entity + attributes will cause them to be automatically fetched from the server + if they are not already. This flag can be changed on the session + directly at any time. + + *plugin_paths* should be a list of paths to search for plugins. If not + specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. + + *cache* should be an instance of a cache that fulfils the + :class:`ftrack_api.cache.Cache` interface and will be used as the cache + for the session. It can also be a callable that will be called with the + session instance as sole argument. The callable should return ``None`` + if a suitable cache could not be configured, but session instantiation + can continue safely. + + .. note:: + + The session will add the specified cache to a pre-configured layered + cache that specifies the top level cache as a + :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary + to construct a separate memory cache for typical behaviour. Working + around this behaviour or removing the memory cache can lead to + unexpected behaviour. + + *cache_key_maker* should be an instance of a key maker that fulfils the + :class:`ftrack_api.cache.KeyMaker` interface and will be used to + generate keys for objects being stored in the *cache*. If not specified, + a :class:`~ftrack_api.cache.StringKeyMaker` will be used. + + If *auto_connect_event_hub* is True then embedded event hub will be + automatically connected to the event server and allow for publishing and + subscribing to **non-local** events. If False, then only publishing and + subscribing to **local** events will be possible until the hub is + manually connected using :meth:`EventHub.connect + `. + + .. note:: + + The event hub connection is performed in a background thread to + improve session startup time. If a registered plugin requires a + connected event hub then it should check the event hub connection + status explicitly. Subscribing to events does *not* require a + connected event hub. + + Enable schema caching by setting *schema_cache_path* to a folder path. + If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to + determine the path to store cache in. If the environment variable is + also not specified then a temporary directory will be used. Set to + `False` to disable schema caching entirely. + + *plugin_arguments* should be an optional mapping (dict) of keyword + arguments to pass to plugin register functions upon discovery. If a + discovered plugin has a signature that is incompatible with the passed + arguments, the discovery mechanism will attempt to reduce the passed + arguments to only those that the plugin accepts. Note that a warning + will be logged in this case. + + ''' + super(Session, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self._closed = False + + if server_url is None: + server_url = os.environ.get('FTRACK_SERVER') + + if not server_url: + raise TypeError( + 'Required "server_url" not specified. Pass as argument or set ' + 'in environment variable FTRACK_SERVER.' + ) + + self._server_url = server_url + + if api_key is None: + api_key = os.environ.get( + 'FTRACK_API_KEY', + # Backwards compatibility + os.environ.get('FTRACK_APIKEY') + ) + + if not api_key: + raise TypeError( + 'Required "api_key" not specified. Pass as argument or set in ' + 'environment variable FTRACK_API_KEY.' + ) + + self._api_key = api_key + + if api_user is None: + api_user = os.environ.get('FTRACK_API_USER') + if not api_user: + try: + api_user = getpass.getuser() + except Exception: + pass + + if not api_user: + raise TypeError( + 'Required "api_user" not specified. Pass as argument, set in ' + 'environment variable FTRACK_API_USER or one of the standard ' + 'environment variables used by Python\'s getpass module.' + ) + + self._api_user = api_user + + # Currently pending operations. + self.recorded_operations = ftrack_api.operation.Operations() + self.record_operations = True + + self.cache_key_maker = cache_key_maker + if self.cache_key_maker is None: + self.cache_key_maker = ftrack_api.cache.StringKeyMaker() + + # Enforce always having a memory cache at top level so that the same + # in-memory instance is returned from session. + self.cache = ftrack_api.cache.LayeredCache([ + ftrack_api.cache.MemoryCache() + ]) + + if cache is not None: + if callable(cache): + cache = cache(self) + + if cache is not None: + self.cache.caches.append(cache) + + self._managed_request = None + self._request = requests.Session() + self._request.auth = SessionAuthentication( + self._api_key, self._api_user + ) + + self.auto_populate = auto_populate + + # Fetch server information and in doing so also check credentials. + self._server_information = self._fetch_server_information() + + # Now check compatibility of server based on retrieved information. + self.check_server_compatibility() + + # Construct event hub and load plugins. + self._event_hub = ftrack_api.event.hub.EventHub( + self._server_url, + self._api_user, + self._api_key, + ) + + self._auto_connect_event_hub_thread = None + if auto_connect_event_hub in (None, True): + # Connect to event hub in background thread so as not to block main + # session usage waiting for event hub connection. + self._auto_connect_event_hub_thread = threading.Thread( + target=self._event_hub.connect + ) + self._auto_connect_event_hub_thread.daemon = True + self._auto_connect_event_hub_thread.start() + + # To help with migration from auto_connect_event_hub default changing + # from True to False. + self._event_hub._deprecation_warning_auto_connect = ( + auto_connect_event_hub is None + ) + + # Register to auto-close session on exit. + atexit.register(WeakMethod(self.close)) + + self._plugin_paths = plugin_paths + if self._plugin_paths is None: + self._plugin_paths = os.environ.get( + 'FTRACK_EVENT_PLUGIN_PATH', '' + ).split(os.pathsep) + + self._discover_plugins(plugin_arguments=plugin_arguments) + + # TODO: Make schemas read-only and non-mutable (or at least without + # rebuilding types)? + if schema_cache_path is not False: + if schema_cache_path is None: + schema_cache_path = os.environ.get( + 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + ) + + schema_cache_path = os.path.join( + schema_cache_path, 'ftrack_api_schema_cache.json' + ) + + self.schemas = self._load_schemas(schema_cache_path) + self.types = self._build_entity_type_classes(self.schemas) + + ftrack_api._centralized_storage_scenario.register(self) + + self._configure_locations() + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.ready', + data=dict( + session=self + ) + ), + synchronous=True + ) + + def __enter__(self): + '''Return session as context manager.''' + return self + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit session context, closing session in process.''' + self.close() + + @property + def _request(self): + '''Return request session. + + Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has + been closed and connection unavailable. + + ''' + if self._managed_request is None: + raise ftrack_api.exception.ConnectionClosedError() + + return self._managed_request + + @_request.setter + def _request(self, value): + '''Set request session to *value*.''' + self._managed_request = value + + @property + def closed(self): + '''Return whether session has been closed.''' + return self._closed + + @property + def server_information(self): + '''Return server information such as server version.''' + return self._server_information.copy() + + @property + def server_url(self): + '''Return server ulr used for session.''' + return self._server_url + + @property + def api_user(self): + '''Return username used for session.''' + return self._api_user + + @property + def api_key(self): + '''Return API key used for session.''' + return self._api_key + + @property + def event_hub(self): + '''Return event hub.''' + return self._event_hub + + @property + def _local_cache(self): + '''Return top level memory cache.''' + return self.cache.caches[0] + + def check_server_compatibility(self): + '''Check compatibility with connected server.''' + server_version = self.server_information.get('version') + if server_version is None: + raise ftrack_api.exception.ServerCompatibilityError( + 'Could not determine server version.' + ) + + # Perform basic version check. + if server_version != 'dev': + min_server_version = '3.3.11' + if ( + distutils.version.LooseVersion(min_server_version) + > distutils.version.LooseVersion(server_version) + ): + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0} incompatible with this version of the ' + 'API which requires a server version >= {1}'.format( + server_version, + min_server_version + ) + ) + + def close(self): + '''Close session. + + Close connections to server. Clear any pending operations and local + cache. + + Use this to ensure that session is cleaned up properly after use. + + ''' + if self.closed: + self.logger.debug('Session already closed.') + return + + self._closed = True + + self.logger.debug('Closing session.') + if self.recorded_operations: + self.logger.warning( + 'Closing session with pending operations not persisted.' + ) + + # Clear pending operations. + self.recorded_operations.clear() + + # Clear top level cache (expected to be enforced memory cache). + self._local_cache.clear() + + # Close connections. + self._request.close() + self._request = None + + try: + self.event_hub.disconnect() + if self._auto_connect_event_hub_thread: + self._auto_connect_event_hub_thread.join() + except ftrack_api.exception.EventHubConnectionError: + pass + + self.logger.debug('Session closed.') + + def reset(self): + '''Reset session clearing local state. + + Clear all pending operations and expunge all entities from session. + + Also clear the local cache. If the cache used by the session is a + :class:`~ftrack_api.cache.LayeredCache` then only clear top level cache. + Otherwise, clear the entire cache. + + Plugins are not rediscovered or reinitialised, but certain plugin events + are re-emitted to properly configure session aspects that are dependant + on cache (such as location plugins). + + .. warning:: + + Previously attached entities are not reset in memory and will retain + their state, but should not be used. Doing so will cause errors. + + ''' + if self.recorded_operations: + self.logger.warning( + 'Resetting session with pending operations not persisted.' + ) + + # Clear pending operations. + self.recorded_operations.clear() + + # Clear top level cache (expected to be enforced memory cache). + self._local_cache.clear() + + # Re-configure certain session aspects that may be dependant on cache. + self._configure_locations() + + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.reset', + data=dict( + session=self + ) + ), + synchronous=True + ) + + def auto_populating(self, auto_populate): + '''Temporarily set auto populate to *auto_populate*. + + The current setting will be restored automatically when done. + + Example:: + + with session.auto_populating(False): + print entity['name'] + + ''' + return AutoPopulatingContext(self, auto_populate) + + def operation_recording(self, record_operations): + '''Temporarily set operation recording to *record_operations*. + + The current setting will be restored automatically when done. + + Example:: + + with session.operation_recording(False): + entity['name'] = 'change_not_recorded' + + ''' + return OperationRecordingContext(self, record_operations) + + @property + def created(self): + '''Return list of newly created entities.''' + entities = self._local_cache.values() + states = ftrack_api.inspection.states(entities) + + return [ + entity for (entity, state) in itertools.izip(entities, states) + if state is ftrack_api.symbol.CREATED + ] + + @property + def modified(self): + '''Return list of locally modified entities.''' + entities = self._local_cache.values() + states = ftrack_api.inspection.states(entities) + + return [ + entity for (entity, state) in itertools.izip(entities, states) + if state is ftrack_api.symbol.MODIFIED + ] + + @property + def deleted(self): + '''Return list of deleted entities.''' + entities = self._local_cache.values() + states = ftrack_api.inspection.states(entities) + + return [ + entity for (entity, state) in itertools.izip(entities, states) + if state is ftrack_api.symbol.DELETED + ] + + def reset_remote(self, reset_type, entity=None): + '''Perform a server side reset. + + *reset_type* is a server side supported reset type, + passing the optional *entity* to perform the option upon. + + Please refer to ftrack documentation for a complete list of + supported server side reset types. + ''' + + payload = { + 'action': 'reset_remote', + 'reset_type': reset_type + } + + if entity is not None: + payload.update({ + 'entity_type': entity.entity_type, + 'entity_key': entity.get('id') + }) + + result = self.call( + [payload] + ) + + return result[0]['data'] + + def create(self, entity_type, data=None, reconstructing=False): + '''Create and return an entity of *entity_type* with initial *data*. + + If specified, *data* should be a dictionary of key, value pairs that + should be used to populate attributes on the entity. + + If *reconstructing* is False then create a new entity setting + appropriate defaults for missing data. If True then reconstruct an + existing entity. + + Constructed entity will be automatically :meth:`merged ` + into the session. + + ''' + entity = self._create(entity_type, data, reconstructing=reconstructing) + entity = self.merge(entity) + return entity + + def _create(self, entity_type, data, reconstructing): + '''Create and return an entity of *entity_type* with initial *data*.''' + try: + EntityTypeClass = self.types[entity_type] + except KeyError: + raise ftrack_api.exception.UnrecognisedEntityTypeError(entity_type) + + return EntityTypeClass(self, data=data, reconstructing=reconstructing) + + def ensure(self, entity_type, data, identifying_keys=None): + '''Retrieve entity of *entity_type* with *data*, creating if necessary. + + *data* should be a dictionary of the same form passed to :meth:`create`. + + By default, check for an entity that has matching *data*. If + *identifying_keys* is specified as a list of keys then only consider the + values from *data* for those keys when searching for existing entity. If + *data* is missing an identifying key then raise :exc:`KeyError`. + + If no *identifying_keys* specified then use all of the keys from the + passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be + determined. + + Each key should be a string. + + .. note:: + + Currently only top level scalars supported. To ensure an entity by + looking at relationships, manually issue the :meth:`query` and + :meth:`create` calls. + + If more than one entity matches the determined filter criteria then + raise :exc:`~ftrack_api.exception.MultipleResultsFoundError`. + + If no matching entity found then create entity using supplied *data*. + + If a matching entity is found, then update it if necessary with *data*. + + .. note:: + + If entity created or updated then a :meth:`commit` will be issued + automatically. If this behaviour is undesired, perform the + :meth:`query` and :meth:`create` calls manually. + + Return retrieved or created entity. + + Example:: + + # First time, a new entity with `username=martin` is created. + entity = session.ensure('User', {'username': 'martin'}) + + # After that, the existing entity is retrieved. + entity = session.ensure('User', {'username': 'martin'}) + + # When existing entity retrieved, entity may also be updated to + # match supplied data. + entity = session.ensure( + 'User', {'username': 'martin', 'email': 'martin@example.com'} + ) + + ''' + if not identifying_keys: + identifying_keys = data.keys() + + self.logger.debug(L( + 'Ensuring entity {0!r} with data {1!r} using identifying keys ' + '{2!r}', entity_type, data, identifying_keys + )) + + if not identifying_keys: + raise ValueError( + 'Could not determine any identifying data to check against ' + 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' + .format(entity_type, data, identifying_keys) + ) + + expression = '{0} where'.format(entity_type) + criteria = [] + for identifying_key in identifying_keys: + value = data[identifying_key] + + if isinstance(value, basestring): + value = '"{0}"'.format(value) + + elif isinstance( + value, (arrow.Arrow, datetime.datetime, datetime.date) + ): + # Server does not store microsecond or timezone currently so + # need to strip from query. + # TODO: When datetime handling improved, update this logic. + value = ( + arrow.get(value).naive.replace(microsecond=0).isoformat() + ) + value = '"{0}"'.format(value) + + criteria.append('{0} is {1}'.format(identifying_key, value)) + + expression = '{0} {1}'.format( + expression, ' and '.join(criteria) + ) + + try: + entity = self.query(expression).one() + + except ftrack_api.exception.NoResultFoundError: + self.logger.debug('Creating entity as did not already exist.') + + # Create entity. + entity = self.create(entity_type, data) + self.commit() + + else: + self.logger.debug('Retrieved matching existing entity.') + + # Update entity if required. + updated = False + for key, target_value in data.items(): + if entity[key] != target_value: + entity[key] = target_value + updated = True + + if updated: + self.logger.debug('Updating existing entity to match new data.') + self.commit() + + return entity + + def delete(self, entity): + '''Mark *entity* for deletion.''' + if self.record_operations: + self.recorded_operations.push( + ftrack_api.operation.DeleteEntityOperation( + entity.entity_type, + ftrack_api.inspection.primary_key(entity) + ) + ) + + def get(self, entity_type, entity_key): + '''Return entity of *entity_type* with unique *entity_key*. + + First check for an existing entry in the configured cache, otherwise + issue a query to the server. + + If no matching entity found, return None. + + ''' + self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) + + primary_key_definition = self.types[entity_type].primary_key_attributes + if isinstance(entity_key, basestring): + entity_key = [entity_key] + + if len(entity_key) != len(primary_key_definition): + raise ValueError( + 'Incompatible entity_key {0!r} supplied. Entity type {1} ' + 'expects a primary key composed of {2} values ({3}).' + .format( + entity_key, entity_type, len(primary_key_definition), + ', '.join(primary_key_definition) + ) + ) + + entity = None + try: + entity = self._get(entity_type, entity_key) + + + except KeyError: + + # Query for matching entity. + self.logger.debug( + 'Entity not present in cache. Issuing new query.' + ) + condition = [] + for key, value in zip(primary_key_definition, entity_key): + condition.append('{0} is "{1}"'.format(key, value)) + + expression = '{0} where ({1})'.format( + entity_type, ' and '.join(condition) + ) + + results = self.query(expression).all() + if results: + entity = results[0] + + return entity + + def _get(self, entity_type, entity_key): + '''Return cached entity of *entity_type* with unique *entity_key*. + + Raise :exc:`KeyError` if no such entity in the cache. + + ''' + # Check cache for existing entity emulating + # ftrack_api.inspection.identity result object to pass to key maker. + cache_key = self.cache_key_maker.key( + (str(entity_type), map(str, entity_key)) + ) + self.logger.debug(L( + 'Checking cache for entity with key {0}', cache_key + )) + entity = self.cache.get(cache_key) + self.logger.debug(L( + 'Retrieved existing entity from cache: {0} at {1}', + entity, id(entity) + )) + + return entity + + def query(self, expression, page_size=500): + '''Query against remote data according to *expression*. + + *expression* is not executed directly. Instead return an + :class:`ftrack_api.query.QueryResult` instance that will execute remote + call on access. + + *page_size* specifies the maximum page size that the returned query + result object should be configured with. + + .. seealso:: :ref:`querying` + + ''' + self.logger.debug(L('Query {0!r}', expression)) + + # Add in sensible projections if none specified. Note that this is + # done here rather than on the server to allow local modification of the + # schema setting to include commonly used custom attributes for example. + # TODO: Use a proper parser perhaps? + if not expression.startswith('select'): + entity_type = expression.split(' ', 1)[0] + EntityTypeClass = self.types[entity_type] + projections = EntityTypeClass.default_projections + + expression = 'select {0} from {1}'.format( + ', '.join(projections), + expression + ) + + query_result = ftrack_api.query.QueryResult( + self, expression, page_size=page_size + ) + return query_result + + def _query(self, expression): + '''Execute *query* and return (records, metadata). + + Records will be a list of entities retrieved via the query and metadata + a dictionary of accompanying information about the result set. + + ''' + # TODO: Actually support batching several queries together. + # TODO: Should batches have unique ids to match them up later. + batch = [{ + 'action': 'query', + 'expression': expression + }] + + # TODO: When should this execute? How to handle background=True? + results = self.call(batch) + + # Merge entities into local cache and return merged entities. + data = [] + merged = dict() + for entity in results[0]['data']: + data.append(self._merge_recursive(entity, merged)) + + return data, results[0]['metadata'] + + def merge(self, value, merged=None): + '''Merge *value* into session and return merged value. + + *merged* should be a mapping to record merges during run and should be + used to avoid infinite recursion. If not set will default to a + dictionary. + + ''' + if merged is None: + merged = {} + + with self.operation_recording(False): + return self._merge(value, merged) + + def _merge(self, value, merged): + '''Return merged *value*.''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if isinstance(value, ftrack_api.entity.base.Entity): + log_debug and self.logger.debug( + 'Merging entity into session: {0} at {1}' + .format(value, id(value)) + ) + + return self._merge_entity(value, merged=merged) + + elif isinstance(value, ftrack_api.collection.Collection): + log_debug and self.logger.debug( + 'Merging collection into session: {0!r} at {1}' + .format(value, id(value)) + ) + + merged_collection = [] + for entry in value: + merged_collection.append( + self._merge(entry, merged=merged) + ) + + return merged_collection + + elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): + log_debug and self.logger.debug( + 'Merging mapped collection into session: {0!r} at {1}' + .format(value, id(value)) + ) + + merged_collection = [] + for entry in value.collection: + merged_collection.append( + self._merge(entry, merged=merged) + ) + + return merged_collection + + else: + return value + + def _merge_recursive(self, entity, merged=None): + '''Merge *entity* and all its attributes recursivly.''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + attached = self.merge(entity, merged) + + for attribute in entity.attributes: + # Remote attributes. + remote_value = attribute.get_remote_value(entity) + + if isinstance( + remote_value, + ( + ftrack_api.entity.base.Entity, + ftrack_api.collection.Collection, + ftrack_api.collection.MappedCollectionProxy + ) + ): + log_debug and self.logger.debug( + 'Merging remote value for attribute {0}.'.format(attribute) + ) + + if isinstance(remote_value, ftrack_api.entity.base.Entity): + self._merge_recursive(remote_value, merged=merged) + + elif isinstance( + remote_value, ftrack_api.collection.Collection + ): + for entry in remote_value: + self._merge_recursive(entry, merged=merged) + + elif isinstance( + remote_value, ftrack_api.collection.MappedCollectionProxy + ): + for entry in remote_value.collection: + self._merge_recursive(entry, merged=merged) + + return attached + + def _merge_entity(self, entity, merged=None): + '''Merge *entity* into session returning merged entity. + + Merge is recursive so any references to other entities will also be + merged. + + *entity* will never be modified in place. Ensure that the returned + merged entity instance is used. + + ''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + with self.auto_populating(False): + entity_key = self.cache_key_maker.key( + ftrack_api.inspection.identity(entity) + ) + + # Check whether this entity has already been processed. + attached_entity = merged.get(entity_key) + if attached_entity is not None: + log_debug and self.logger.debug( + 'Entity already processed for key {0} as {1} at {2}' + .format(entity_key, attached_entity, id(attached_entity)) + ) + + return attached_entity + else: + log_debug and self.logger.debug( + 'Entity not already processed for key {0}.' + .format(entity_key) + ) + + # Check for existing instance of entity in cache. + log_debug and self.logger.debug( + 'Checking for entity in cache with key {0}'.format(entity_key) + ) + try: + attached_entity = self.cache.get(entity_key) + + log_debug and self.logger.debug( + 'Retrieved existing entity from cache: {0} at {1}' + .format(attached_entity, id(attached_entity)) + ) + + except KeyError: + # Construct new minimal instance to store in cache. + attached_entity = self._create( + entity.entity_type, {}, reconstructing=True + ) + + log_debug and self.logger.debug( + 'Entity not present in cache. Constructed new instance: ' + '{0} at {1}'.format(attached_entity, id(attached_entity)) + ) + + # Mark entity as seen to avoid infinite loops. + merged[entity_key] = attached_entity + + changes = attached_entity.merge(entity, merged=merged) + if changes: + self.cache.set(entity_key, attached_entity) + self.logger.debug('Cache updated with merged entity.') + + else: + self.logger.debug( + 'Cache not updated with merged entity as no differences ' + 'detected.' + ) + + return attached_entity + + def populate(self, entities, projections): + '''Populate *entities* with attributes specified by *projections*. + + Any locally set values included in the *projections* will not be + overwritten with the retrieved remote value. If this 'synchronise' + behaviour is required, first clear the relevant values on the entity by + setting them to :attr:`ftrack_api.symbol.NOT_SET`. Deleting the key will + have the same effect:: + + >>> print(user['username']) + martin + >>> del user['username'] + >>> print(user['username']) + Symbol(NOT_SET) + + .. note:: + + Entities that have been created and not yet persisted will be + skipped as they have no remote values to fetch. + + ''' + self.logger.debug(L( + 'Populate {0!r} projections for {1}.', projections, entities + )) + + if not isinstance( + entities, (list, tuple, ftrack_api.query.QueryResult) + ): + entities = [entities] + + # TODO: How to handle a mixed collection of different entity types + # Should probably fail, but need to consider handling hierarchies such + # as User and Group both deriving from Resource. Actually, could just + # proceed and ignore projections that are not present in entity type. + + entities_to_process = [] + + for entity in entities: + if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: + # Created entities that are not yet persisted have no remote + # values. Don't raise an error here as it is reasonable to + # iterate over an entities properties and see that some of them + # are NOT_SET. + self.logger.debug(L( + 'Skipping newly created entity {0!r} for population as no ' + 'data will exist in the remote for this entity yet.', entity + )) + continue + + entities_to_process.append(entity) + + if entities_to_process: + reference_entity = entities_to_process[0] + entity_type = reference_entity.entity_type + query = 'select {0} from {1}'.format(projections, entity_type) + + primary_key_definition = reference_entity.primary_key_attributes + entity_keys = [ + ftrack_api.inspection.primary_key(entity).values() + for entity in entities_to_process + ] + + if len(primary_key_definition) > 1: + # Composite keys require full OR syntax unfortunately. + conditions = [] + for entity_key in entity_keys: + condition = [] + for key, value in zip(primary_key_definition, entity_key): + condition.append('{0} is "{1}"'.format(key, value)) + + conditions.append('({0})'.format('and '.join(condition))) + + query = '{0} where {1}'.format(query, ' or '.join(conditions)) + + else: + primary_key = primary_key_definition[0] + + if len(entity_keys) > 1: + query = '{0} where {1} in ({2})'.format( + query, primary_key, + ','.join([ + str(entity_key[0]) for entity_key in entity_keys + ]) + ) + else: + query = '{0} where {1} is {2}'.format( + query, primary_key, str(entity_keys[0][0]) + ) + + result = self.query(query) + + # Fetch all results now. Doing so will cause them to populate the + # relevant entities in the cache. + result.all() + + # TODO: Should we check that all requested attributes were + # actually populated? If some weren't would we mark that to avoid + # repeated calls or perhaps raise an error? + + # TODO: Make atomic. + def commit(self): + '''Commit all local changes to the server.''' + batch = [] + + with self.auto_populating(False): + for operation in self.recorded_operations: + + # Convert operation to payload. + if isinstance( + operation, ftrack_api.operation.CreateEntityOperation + ): + # At present, data payload requires duplicating entity + # type in data and also ensuring primary key added. + entity_data = { + '__entity_type__': operation.entity_type, + } + entity_data.update(operation.entity_key) + entity_data.update(operation.entity_data) + + payload = OperationPayload({ + 'action': 'create', + 'entity_type': operation.entity_type, + 'entity_key': operation.entity_key.values(), + 'entity_data': entity_data + }) + + elif isinstance( + operation, ftrack_api.operation.UpdateEntityOperation + ): + entity_data = { + # At present, data payload requires duplicating entity + # type. + '__entity_type__': operation.entity_type, + operation.attribute_name: operation.new_value + } + + payload = OperationPayload({ + 'action': 'update', + 'entity_type': operation.entity_type, + 'entity_key': operation.entity_key.values(), + 'entity_data': entity_data + }) + + elif isinstance( + operation, ftrack_api.operation.DeleteEntityOperation + ): + payload = OperationPayload({ + 'action': 'delete', + 'entity_type': operation.entity_type, + 'entity_key': operation.entity_key.values() + }) + + else: + raise ValueError( + 'Cannot commit. Unrecognised operation type {0} ' + 'detected.'.format(type(operation)) + ) + + batch.append(payload) + + # Optimise batch. + # TODO: Might be better to perform these on the operations list instead + # so all operation contextual information available. + + # If entity was created and deleted in one batch then remove all + # payloads for that entity. + created = set() + deleted = set() + + for payload in batch: + if payload['action'] == 'create': + created.add( + (payload['entity_type'], str(payload['entity_key'])) + ) + + elif payload['action'] == 'delete': + deleted.add( + (payload['entity_type'], str(payload['entity_key'])) + ) + + created_then_deleted = deleted.intersection(created) + if created_then_deleted: + optimised_batch = [] + for payload in batch: + entity_type = payload.get('entity_type') + entity_key = str(payload.get('entity_key')) + + if (entity_type, entity_key) in created_then_deleted: + continue + + optimised_batch.append(payload) + + batch = optimised_batch + + # Remove early update operations so that only last operation on + # attribute is applied server side. + updates_map = set() + for payload in reversed(batch): + if payload['action'] in ('update', ): + for key, value in payload['entity_data'].items(): + if key == '__entity_type__': + continue + + identity = ( + payload['entity_type'], str(payload['entity_key']), key + ) + if identity in updates_map: + del payload['entity_data'][key] + else: + updates_map.add(identity) + + # Remove NOT_SET values from entity_data. + for payload in batch: + entity_data = payload.get('entity_data', {}) + for key, value in entity_data.items(): + if value is ftrack_api.symbol.NOT_SET: + del entity_data[key] + + # Remove payloads with redundant entity_data. + optimised_batch = [] + for payload in batch: + entity_data = payload.get('entity_data') + if entity_data is not None: + keys = entity_data.keys() + if not keys or keys == ['__entity_type__']: + continue + + optimised_batch.append(payload) + + batch = optimised_batch + + # Collapse updates that are consecutive into one payload. Also, collapse + # updates that occur immediately after creation into the create payload. + optimised_batch = [] + previous_payload = None + + for payload in batch: + if ( + previous_payload is not None + and payload['action'] == 'update' + and previous_payload['action'] in ('create', 'update') + and previous_payload['entity_type'] == payload['entity_type'] + and previous_payload['entity_key'] == payload['entity_key'] + ): + previous_payload['entity_data'].update(payload['entity_data']) + continue + + else: + optimised_batch.append(payload) + previous_payload = payload + + batch = optimised_batch + + # Process batch. + if batch: + result = self.call(batch) + + # Clear recorded operations. + self.recorded_operations.clear() + + # As optimisation, clear local values which are not primary keys to + # avoid redundant merges when merging references. Note: primary keys + # remain as needed for cache retrieval on new entities. + with self.auto_populating(False): + with self.operation_recording(False): + for entity in self._local_cache.values(): + for attribute in entity: + if attribute not in entity.primary_key_attributes: + del entity[attribute] + + # Process results merging into cache relevant data. + for entry in result: + + if entry['action'] in ('create', 'update'): + # Merge returned entities into local cache. + self.merge(entry['data']) + + elif entry['action'] == 'delete': + # TODO: Detach entity - need identity returned? + # TODO: Expunge entity from cache. + pass + # Clear remaining local state, including local values for primary + # keys on entities that were merged. + with self.auto_populating(False): + with self.operation_recording(False): + for entity in self._local_cache.values(): + entity.clear() + + def rollback(self): + '''Clear all recorded operations and local state. + + Typically this would be used following a failed :meth:`commit` in order + to revert the session to a known good state. + + Newly created entities not yet persisted will be detached from the + session / purged from cache and no longer contribute, but the actual + objects are not deleted from memory. They should no longer be used and + doing so could cause errors. + + ''' + with self.auto_populating(False): + with self.operation_recording(False): + + # Detach all newly created entities and remove from cache. This + # is done because simply clearing the local values of newly + # created entities would result in entities with no identity as + # primary key was local while not persisted. In addition, it + # makes no sense for failed created entities to exist in session + # or cache. + for operation in self.recorded_operations: + if isinstance( + operation, ftrack_api.operation.CreateEntityOperation + ): + entity_key = str(( + str(operation.entity_type), + operation.entity_key.values() + )) + try: + self.cache.remove(entity_key) + except KeyError: + pass + + # Clear locally stored modifications on remaining entities. + for entity in self._local_cache.values(): + entity.clear() + + self.recorded_operations.clear() + + def _fetch_server_information(self): + '''Return server information.''' + result = self.call([{'action': 'query_server_information'}]) + return result[0] + + def _discover_plugins(self, plugin_arguments=None): + '''Find and load plugins in search paths. + + Each discovered module should implement a register function that + accepts this session as first argument. Typically the function should + register appropriate event listeners against the session's event hub. + + def register(session): + session.event_hub.subscribe( + 'topic=ftrack.api.session.construct-entity-type', + construct_entity_type + ) + + *plugin_arguments* should be an optional mapping of keyword arguments + and values to pass to plugin register functions upon discovery. + + ''' + plugin_arguments = plugin_arguments or {} + ftrack_api.plugin.discover( + self._plugin_paths, [self], plugin_arguments + ) + + def _read_schemas_from_cache(self, schema_cache_path): + '''Return schemas and schema hash from *schema_cache_path*. + + *schema_cache_path* should be the path to the file containing the + schemas in JSON format. + + ''' + self.logger.debug(L( + 'Reading schemas from cache {0!r}', schema_cache_path + )) + + if not os.path.exists(schema_cache_path): + self.logger.info(L( + 'Cache file not found at {0!r}.', schema_cache_path + )) + + return [], None + + with open(schema_cache_path, 'r') as schema_file: + schemas = json.load(schema_file) + hash_ = hashlib.md5( + json.dumps(schemas, sort_keys=True) + ).hexdigest() + + return schemas, hash_ + + def _write_schemas_to_cache(self, schemas, schema_cache_path): + '''Write *schemas* to *schema_cache_path*. + + *schema_cache_path* should be a path to a file that the schemas can be + written to in JSON format. + + ''' + self.logger.debug(L( + 'Updating schema cache {0!r} with new schemas.', schema_cache_path + )) + + with open(schema_cache_path, 'w') as local_cache_file: + json.dump(schemas, local_cache_file, indent=4) + + def _load_schemas(self, schema_cache_path): + '''Load schemas. + + First try to load schemas from cache at *schema_cache_path*. If the + cache is not available or the cache appears outdated then load schemas + from server and store fresh copy in cache. + + If *schema_cache_path* is set to `False`, always load schemas from + server bypassing cache. + + ''' + local_schema_hash = None + schemas = [] + + if schema_cache_path: + try: + schemas, local_schema_hash = self._read_schemas_from_cache( + schema_cache_path + ) + except (IOError, TypeError, AttributeError, ValueError): + # Catch any known exceptions when trying to read the local + # schema cache to prevent API from being unusable. + self.logger.exception(L( + 'Schema cache could not be loaded from {0!r}', + schema_cache_path + )) + + # Use `dictionary.get` to retrieve hash to support older version of + # ftrack server not returning a schema hash. + server_hash = self._server_information.get( + 'schema_hash', False + ) + if local_schema_hash != server_hash: + self.logger.debug(L( + 'Loading schemas from server due to hash not matching.' + 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash + )) + schemas = self.call([{'action': 'query_schemas'}])[0] + + if schema_cache_path: + try: + self._write_schemas_to_cache(schemas, schema_cache_path) + except (IOError, TypeError): + self.logger.exception(L( + 'Failed to update schema cache {0!r}.', + schema_cache_path + )) + + else: + self.logger.debug(L( + 'Using cached schemas from {0!r}', schema_cache_path + )) + + return schemas + + def _build_entity_type_classes(self, schemas): + '''Build default entity type classes.''' + fallback_factory = ftrack_api.entity.factory.StandardFactory() + classes = {} + + for schema in schemas: + results = self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.construct-entity-type', + data=dict( + schema=schema, + schemas=schemas + ) + ), + synchronous=True + ) + + results = [result for result in results if result is not None] + + if not results: + self.logger.debug(L( + 'Using default StandardFactory to construct entity type ' + 'class for "{0}"', schema['id'] + )) + entity_type_class = fallback_factory.create(schema) + + elif len(results) > 1: + raise ValueError( + 'Expected single entity type to represent schema "{0}" but ' + 'received {1} entity types instead.' + .format(schema['id'], len(results)) + ) + + else: + entity_type_class = results[0] + + classes[entity_type_class.entity_type] = entity_type_class + + return classes + + def _configure_locations(self): + '''Configure locations.''' + # First configure builtin locations, by injecting them into local cache. + + # Origin. + location = self.create( + 'Location', + data=dict( + name='ftrack.origin', + id=ftrack_api.symbol.ORIGIN_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.OriginLocationMixin, + name='OriginLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + location.priority = 100 + + # Unmanaged. + location = self.create( + 'Location', + data=dict( + name='ftrack.unmanaged', + id=ftrack_api.symbol.UNMANAGED_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.UnmanagedLocationMixin, + name='UnmanagedLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + # location.resource_identifier_transformer = ( + # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) + # ) + location.priority = 90 + + # Review. + location = self.create( + 'Location', + data=dict( + name='ftrack.review', + id=ftrack_api.symbol.REVIEW_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.UnmanagedLocationMixin, + name='UnmanagedLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + location.priority = 110 + + # Server. + location = self.create( + 'Location', + data=dict( + name='ftrack.server', + id=ftrack_api.symbol.SERVER_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.ServerLocationMixin, + name='ServerLocation' + ) + location.accessor = ftrack_api.accessor.server._ServerAccessor( + session=self + ) + location.structure = ftrack_api.structure.entity_id.EntityIdStructure() + location.priority = 150 + + # Master location based on server scenario. + storage_scenario = self.server_information.get('storage_scenario') + + if ( + storage_scenario and + storage_scenario.get('scenario') + ): + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.storage-scenario.activate', + data=dict( + storage_scenario=storage_scenario + ) + ), + synchronous=True + ) + + # Next, allow further configuration of locations via events. + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.configure-location', + data=dict( + session=self + ) + ), + synchronous=True + ) + + @ftrack_api.logging.deprecation_warning( + 'Session._call is now available as public method Session.call. The ' + 'private method will be removed in version 2.0.' + ) + def _call(self, data): + '''Make request to server with *data* batch describing the actions. + + .. note:: + + This private method is now available as public method + :meth:`entity_reference`. This alias remains for backwards + compatibility, but will be removed in version 2.0. + + ''' + return self.call(data) + + def call(self, data): + '''Make request to server with *data* batch describing the actions.''' + url = self._server_url + '/api' + headers = { + 'content-type': 'application/json', + 'accept': 'application/json' + } + data = self.encode(data, entity_attribute_strategy='modified_only') + + self.logger.debug(L('Calling server {0} with {1!r}', url, data)) + + response = self._request.post( + url, + headers=headers, + data=data + ) + + self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) + + self.logger.debug(L('Response: {0!r}', response.text)) + try: + result = self.decode(response.text) + + except Exception: + error_message = ( + 'Server reported error in unexpected format. Raw error was: {0}' + .format(response.text) + ) + self.logger.exception(error_message) + raise ftrack_api.exception.ServerError(error_message) + + else: + if 'exception' in result: + # Handle exceptions. + error_message = 'Server reported error: {0}({1})'.format( + result['exception'], result['content'] + ) + self.logger.exception(error_message) + raise ftrack_api.exception.ServerError(error_message) + + return result + + def encode(self, data, entity_attribute_strategy='set_only'): + '''Return *data* encoded as JSON formatted string. + + *entity_attribute_strategy* specifies how entity attributes should be + handled. The following strategies are available: + + * *all* - Encode all attributes, loading any that are currently NOT_SET. + * *set_only* - Encode only attributes that are currently set without + loading any from the remote. + * *modified_only* - Encode only attributes that have been modified + locally. + * *persisted_only* - Encode only remote (persisted) attribute values. + + ''' + entity_attribute_strategies = ( + 'all', 'set_only', 'modified_only', 'persisted_only' + ) + if entity_attribute_strategy not in entity_attribute_strategies: + raise ValueError( + 'Unsupported entity_attribute_strategy "{0}". Must be one of ' + '{1}'.format( + entity_attribute_strategy, + ', '.join(entity_attribute_strategies) + ) + ) + + return json.dumps( + data, + sort_keys=True, + default=functools.partial( + self._encode, + entity_attribute_strategy=entity_attribute_strategy + ) + ) + + def _encode(self, item, entity_attribute_strategy='set_only'): + '''Return JSON encodable version of *item*. + + *entity_attribute_strategy* specifies how entity attributes should be + handled. See :meth:`Session.encode` for available strategies. + + ''' + if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): + return { + '__type__': 'datetime', + 'value': item.isoformat() + } + + if isinstance(item, OperationPayload): + data = dict(item.items()) + if "entity_data" in data: + for key, value in data["entity_data"].items(): + if isinstance(value, ftrack_api.entity.base.Entity): + data["entity_data"][key] = self.entity_reference(value) + + return data + + if isinstance(item, ftrack_api.entity.base.Entity): + data = self.entity_reference(item) + + with self.auto_populating(True): + + for attribute in item.attributes: + value = ftrack_api.symbol.NOT_SET + + if entity_attribute_strategy == 'all': + value = attribute.get_value(item) + + elif entity_attribute_strategy == 'set_only': + if attribute.is_set(item): + value = attribute.get_local_value(item) + if value is ftrack_api.symbol.NOT_SET: + value = attribute.get_remote_value(item) + + elif entity_attribute_strategy == 'modified_only': + if attribute.is_modified(item): + value = attribute.get_local_value(item) + + elif entity_attribute_strategy == 'persisted_only': + if not attribute.computed: + value = attribute.get_remote_value(item) + + if value is not ftrack_api.symbol.NOT_SET: + if isinstance( + attribute, ftrack_api.attribute.ReferenceAttribute + ): + if isinstance(value, ftrack_api.entity.base.Entity): + value = self.entity_reference(value) + + data[attribute.name] = value + + return data + + if isinstance( + item, ftrack_api.collection.MappedCollectionProxy + ): + # Use proxied collection for serialisation. + item = item.collection + + if isinstance(item, ftrack_api.collection.Collection): + data = [] + for entity in item: + data.append(self.entity_reference(entity)) + + return data + + raise TypeError('{0!r} is not JSON serializable'.format(item)) + + def entity_reference(self, entity): + '''Return entity reference that uniquely identifies *entity*. + + Return a mapping containing the __entity_type__ of the entity along with + the key, value pairs that make up it's primary key. + + ''' + reference = { + '__entity_type__': entity.entity_type + } + with self.auto_populating(False): + reference.update(ftrack_api.inspection.primary_key(entity)) + + return reference + + @ftrack_api.logging.deprecation_warning( + 'Session._entity_reference is now available as public method ' + 'Session.entity_reference. The private method will be removed ' + 'in version 2.0.' + ) + def _entity_reference(self, entity): + '''Return entity reference that uniquely identifies *entity*. + + Return a mapping containing the __entity_type__ of the entity along + with the key, value pairs that make up it's primary key. + + .. note:: + + This private method is now available as public method + :meth:`entity_reference`. This alias remains for backwards + compatibility, but will be removed in version 2.0. + + ''' + return self.entity_reference(entity) + + def decode(self, string): + '''Return decoded JSON *string* as Python object.''' + with self.operation_recording(False): + return json.loads(string, object_hook=self._decode) + + def _decode(self, item): + '''Return *item* transformed into appropriate representation.''' + if isinstance(item, collections.Mapping): + if '__type__' in item: + if item['__type__'] == 'datetime': + item = arrow.get(item['value']) + + elif '__entity_type__' in item: + item = self._create( + item['__entity_type__'], item, reconstructing=True + ) + + return item + + def _get_locations(self, filter_inaccessible=True): + '''Helper to returns locations ordered by priority. + + If *filter_inaccessible* is True then only accessible locations will be + included in result. + + ''' + # Optimise this call. + locations = self.query('Location') + + # Filter. + if filter_inaccessible: + locations = filter( + lambda location: location.accessor, + locations + ) + + # Sort by priority. + locations = sorted( + locations, key=lambda location: location.priority + ) + + return locations + + def pick_location(self, component=None): + '''Return suitable location to use. + + If no *component* specified then return highest priority accessible + location. Otherwise, return highest priority accessible location that + *component* is available in. + + Return None if no suitable location could be picked. + + ''' + if component: + return self.pick_locations([component])[0] + + else: + locations = self._get_locations() + if locations: + return locations[0] + else: + return None + + def pick_locations(self, components): + '''Return suitable locations for *components*. + + Return list of locations corresponding to *components* where each + picked location is the highest priority accessible location for that + component. If a component has no location available then its + corresponding entry will be None. + + ''' + candidate_locations = self._get_locations() + availabilities = self.get_component_availabilities( + components, locations=candidate_locations + ) + + locations = [] + for component, availability in zip(components, availabilities): + location = None + + for candidate_location in candidate_locations: + if availability.get(candidate_location['id']) > 0.0: + location = candidate_location + break + + locations.append(location) + + return locations + + def create_component( + self, path, data=None, location='auto' + ): + '''Create a new component from *path* with additional *data* + + .. note:: + + This is a helper method. To create components manually use the + standard :meth:`Session.create` method. + + *path* can be a string representing a filesystem path to the data to + use for the component. The *path* can also be specified as a sequence + string, in which case a sequence component with child components for + each item in the sequence will be created automatically. The accepted + format for a sequence is '{head}{padding}{tail} [{ranges}]'. For + example:: + + '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' + + .. seealso:: + + `Clique documentation `_ + + *data* should be a dictionary of any additional data to construct the + component with (as passed to :meth:`Session.create`). + + If *location* is specified then automatically add component to that + location. The default of 'auto' will automatically pick a suitable + location to add the component to if one is available. To not add to any + location specifiy locations as None. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the components registration in the + location. + ''' + if data is None: + data = {} + + if location == 'auto': + # Check if the component name matches one of the ftrackreview + # specific names. Add the component to the ftrack.review location if + # so. This is used to not break backwards compatibility. + if data.get('name') in ( + 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' + ): + location = self.get( + 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID + ) + + else: + location = self.pick_location() + + try: + collection = clique.parse(path) + + except ValueError: + # Assume is a single file. + if 'size' not in data: + data['size'] = self._get_filesystem_size(path) + + data.setdefault('file_type', os.path.splitext(path)[-1]) + + return self._create_component( + 'FileComponent', path, data, location + ) + + else: + # Calculate size of container and members. + member_sizes = {} + container_size = data.get('size') + + if container_size is not None: + if len(collection.indexes) > 0: + member_size = int( + round(container_size / len(collection.indexes)) + ) + for item in collection: + member_sizes[item] = member_size + + else: + container_size = 0 + for item in collection: + member_sizes[item] = self._get_filesystem_size(item) + container_size += member_sizes[item] + + # Create sequence component + container_path = collection.format('{head}{padding}{tail}') + data.setdefault('padding', collection.padding) + data.setdefault('file_type', os.path.splitext(container_path)[-1]) + data.setdefault('size', container_size) + + container = self._create_component( + 'SequenceComponent', container_path, data, location=None + ) + + # Create member components for sequence. + for member_path in collection: + member_data = { + 'name': collection.match(member_path).group('index'), + 'container': container, + 'size': member_sizes[member_path], + 'file_type': os.path.splitext(member_path)[-1] + } + + component = self._create_component( + 'FileComponent', member_path, member_data, location=None + ) + container['members'].append(component) + + if location: + origin_location = self.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + location.add_component( + container, origin_location, recursive=True + ) + + return container + + def _create_component(self, entity_type, path, data, location): + '''Create and return component. + + See public function :py:func:`createComponent` for argument details. + + ''' + component = self.create(entity_type, data) + + # Add to special origin location so that it is possible to add to other + # locations. + origin_location = self.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + origin_location.add_component(component, path, recursive=False) + + if location: + location.add_component(component, origin_location, recursive=False) + + return component + + def _get_filesystem_size(self, path): + '''Return size from *path*''' + try: + size = os.path.getsize(path) + except OSError: + size = 0 + + return size + + def get_component_availability(self, component, locations=None): + '''Return availability of *component*. + + If *locations* is set then limit result to availability of *component* + in those *locations*. + + Return a dictionary of {location_id:percentage_availability} + + ''' + return self.get_component_availabilities( + [component], locations=locations + )[0] + + def get_component_availabilities(self, components, locations=None): + '''Return availabilities of *components*. + + If *locations* is set then limit result to availabilities of + *components* in those *locations*. + + Return a list of dictionaries of {location_id:percentage_availability}. + The list indexes correspond to those of *components*. + + ''' + availabilities = [] + + if locations is None: + locations = self.query('Location') + + # Separate components into two lists, those that are containers and + # those that are not, so that queries can be optimised. + standard_components = [] + container_components = [] + + for component in components: + if 'members' in component.keys(): + container_components.append(component) + else: + standard_components.append(component) + + # Perform queries. + if standard_components: + self.populate( + standard_components, 'component_locations.location_id' + ) + + if container_components: + self.populate( + container_components, + 'members, component_locations.location_id' + ) + + base_availability = {} + for location in locations: + base_availability[location['id']] = 0.0 + + for component in components: + availability = base_availability.copy() + availabilities.append(availability) + + is_container = 'members' in component.keys() + if is_container and len(component['members']): + member_availabilities = self.get_component_availabilities( + component['members'], locations=locations + ) + multiplier = 1.0 / len(component['members']) + for member, member_availability in zip( + component['members'], member_availabilities + ): + for location_id, ratio in member_availability.items(): + availability[location_id] += ( + ratio * multiplier + ) + else: + for component_location in component['component_locations']: + location_id = component_location['location_id'] + if location_id in availability: + availability[location_id] = 100.0 + + for location_id, percentage in availability.items(): + # Avoid quantization error by rounding percentage and clamping + # to range 0-100. + adjusted_percentage = round(percentage, 9) + adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) + availability[location_id] = adjusted_percentage + + return availabilities + + @ftrack_api.logging.deprecation_warning( + 'Session.delayed_job has been deprecated in favour of session.call. ' + 'Please refer to the release notes for more information.' + ) + def delayed_job(self, job_type): + '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. + + *job_type* should be one of the allowed job types. There is currently + only one remote job type "SYNC_USERS_LDAP". + ''' + if job_type not in (ftrack_api.symbol.JOB_SYNC_USERS_LDAP, ): + raise ValueError( + u'Invalid Job type: {0}.'.format(job_type) + ) + + operation = { + 'action': 'delayed_job', + 'job_type': job_type.name + } + + try: + result = self.call( + [operation] + )[0] + + except ftrack_api.exception.ServerError as error: + raise + + return result['data'] + + def get_widget_url(self, name, entity=None, theme=None): + '''Return an authenticated URL for widget with *name* and given options. + + The returned URL will be authenticated using a token which will expire + after 6 minutes. + + *name* should be the name of the widget to return and should be one of + 'info', 'tasks' or 'tasks_browser'. + + Certain widgets require an entity to be specified. If so, specify it by + setting *entity* to a valid entity instance. + + *theme* sets the theme of the widget and can be either 'light' or 'dark' + (defaulting to 'dark' if an invalid option given). + + ''' + operation = { + 'action': 'get_widget_url', + 'name': name, + 'theme': theme + } + if entity: + operation['entity_type'] = entity.entity_type + operation['entity_key'] = ( + ftrack_api.inspection.primary_key(entity).values() + ) + + try: + result = self.call([operation]) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'get_widget_url\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support "get_widget_url", ' + 'please update server and try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + else: + return result[0]['widget_url'] + + def encode_media(self, media, version_id=None, keep_original='auto'): + '''Return a new Job that encode *media* to make it playable in browsers. + + *media* can be a path to a file or a FileComponent in the ftrack.server + location. + + The job will encode *media* based on the file type and job data contains + information about encoding in the following format:: + + { + 'output': [{ + 'format': 'video/mp4', + 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' + }, { + 'format': 'image/jpeg', + 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' + }], + 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', + 'keep_original': True + } + + The output components are associated with the job via the job_components + relation. + + An image component will always be generated if possible that can be used + as a thumbnail. + + If *media* is a file path, a new source component will be created and + added to the ftrack server location and a call to :meth:`commit` will be + issued. If *media* is a FileComponent, it will be assumed to be in + available in the ftrack.server location. + + If *version_id* is specified, the new components will automatically be + associated with the AssetVersion. Otherwise, the components will not + be associated to a version even if the supplied *media* belongs to one. + A server version of 3.3.32 or higher is required for the version_id + argument to function properly. + + If *keep_original* is not set, the original media will be kept if it + is a FileComponent, and deleted if it is a file path. You can specify + True or False to change this behavior. + ''' + if isinstance(media, basestring): + # Media is a path to a file. + server_location = self.get( + 'Location', ftrack_api.symbol.SERVER_LOCATION_ID + ) + if keep_original == 'auto': + keep_original = False + + component_data = None + if keep_original: + component_data = dict(version_id=version_id) + + component = self.create_component( + path=media, + data=component_data, + location=server_location + ) + + # Auto commit to ensure component exists when sent to server. + self.commit() + + elif ( + hasattr(media, 'entity_type') and + media.entity_type in ('FileComponent',) + ): + # Existing file component. + component = media + if keep_original == 'auto': + keep_original = True + + else: + raise ValueError( + 'Unable to encode media of type: {0}'.format(type(media)) + ) + + operation = { + 'action': 'encode_media', + 'component_id': component['id'], + 'version_id': version_id, + 'keep_original': keep_original + } + + try: + result = self.call([operation]) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'encode_media\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support "encode_media", ' + 'please update server and try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + return self.get('Job', result[0]['job_id']) + + def get_upload_metadata( + self, component_id, file_name, file_size, checksum=None + ): + '''Return URL and headers used to upload data for *component_id*. + + *file_name* and *file_size* should match the components details. + + The returned URL should be requested using HTTP PUT with the specified + headers. + + The *checksum* is used as the Content-MD5 header and should contain + the base64-encoded 128-bit MD5 digest of the message (without the + headers) according to RFC 1864. This can be used as a message integrity + check to verify that the data is the same data that was originally sent. + ''' + operation = { + 'action': 'get_upload_metadata', + 'component_id': component_id, + 'file_name': file_name, + 'file_size': file_size, + 'checksum': checksum + } + + try: + result = self.call([operation]) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'get_upload_metadata\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support ' + '"get_upload_metadata", please update server and try ' + 'again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + return result[0] + + def send_user_invite(self, user): + '''Send a invitation to the provided *user*. + + *user* is a User instance + + ''' + + self.send_user_invites( + [user] + ) + + def send_user_invites(self, users): + '''Send a invitation to the provided *user*. + + *users* is a list of User instances + + ''' + + operations = [] + + for user in users: + operations.append( + { + 'action':'send_user_invite', + 'user_id': user['id'] + } + ) + + try: + self.call(operations) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'send_user_invite\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support ' + '"send_user_invite", please update server and ' + 'try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + def send_review_session_invite(self, invitee): + '''Send an invite to a review session to *invitee*. + + *invitee* is a instance of ReviewSessionInvitee. + + .. note:: + + The *invitee* must be committed. + + ''' + self.send_review_session_invites([invitee]) + + def send_review_session_invites(self, invitees): + '''Send an invite to a review session to a list of *invitees*. + + *invitee* is a list of ReviewSessionInvitee objects. + + .. note:: + + All *invitees* must be committed. + + ''' + operations = [] + + for invitee in invitees: + operations.append( + { + 'action': 'send_review_session_invite', + 'review_session_invitee_id': invitee['id'] + } + ) + + try: + self.call(operations) + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'send_review_session_invite\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support ' + '"send_review_session_invite", please update server and ' + 'try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + +class AutoPopulatingContext(object): + '''Context manager for temporary change of session auto_populate value.''' + + def __init__(self, session, auto_populate): + '''Initialise context.''' + super(AutoPopulatingContext, self).__init__() + self._session = session + self._auto_populate = auto_populate + self._current_auto_populate = None + + def __enter__(self): + '''Enter context switching to desired auto populate setting.''' + self._current_auto_populate = self._session.auto_populate + self._session.auto_populate = self._auto_populate + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit context resetting auto populate to original setting.''' + self._session.auto_populate = self._current_auto_populate + + +class OperationRecordingContext(object): + '''Context manager for temporary change of session record_operations.''' + + def __init__(self, session, record_operations): + '''Initialise context.''' + super(OperationRecordingContext, self).__init__() + self._session = session + self._record_operations = record_operations + self._current_record_operations = None + + def __enter__(self): + '''Enter context.''' + self._current_record_operations = self._session.record_operations + self._session.record_operations = self._record_operations + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit context.''' + self._session.record_operations = self._current_record_operations + + +class OperationPayload(collections.MutableMapping): + '''Represent operation payload.''' + + def __init__(self, *args, **kwargs): + '''Initialise payload.''' + super(OperationPayload, self).__init__() + self._data = dict() + self.update(dict(*args, **kwargs)) + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}>'.format( + self.__class__.__name__, str(self._data) + ) + + def __getitem__(self, key): + '''Return value for *key*.''' + return self._data[key] + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + self._data[key] = value + + def __delitem__(self, key): + '''Remove *key*.''' + del self._data[key] + + def __iter__(self): + '''Iterate over all keys.''' + return iter(self._data) + + def __len__(self): + '''Return count of keys.''' + return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py new file mode 100644 index 00000000000..1aab07ed77a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py new file mode 100644 index 00000000000..eae3784dc2e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py @@ -0,0 +1,38 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from abc import ABCMeta, abstractmethod + + +class Structure(object): + '''Structure plugin interface. + + A structure plugin should compute appropriate paths for data. + + ''' + + __metaclass__ = ABCMeta + + def __init__(self, prefix=''): + '''Initialise structure.''' + self.prefix = prefix + self.path_separator = '/' + super(Structure, self).__init__() + + @abstractmethod + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information. + + ''' + + def _get_sequence_expression(self, sequence): + '''Return a sequence expression for *sequence* component.''' + padding = sequence['padding'] + if padding: + expression = '%0{0}d'.format(padding) + else: + expression = '%d' + + return expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py new file mode 100644 index 00000000000..ae466bf6d9f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py @@ -0,0 +1,12 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.structure.base + + +class EntityIdStructure(ftrack_api.structure.base.Structure): + '''Entity id pass-through structure.''' + + def get_resource_identifier(self, entity, context=None): + '''Return a *resourceIdentifier* for supplied *entity*.''' + return entity['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py new file mode 100644 index 00000000000..acc3e21b026 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py @@ -0,0 +1,91 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import os + +import ftrack_api.symbol +import ftrack_api.structure.base + + +class IdStructure(ftrack_api.structure.base.Structure): + '''Id based structure supporting Components only. + + A components unique id will be used to form a path to store the data at. + To avoid millions of entries in one directory each id is chunked into four + prefix directories with the remainder used to name the file:: + + /prefix/1/2/3/4/56789 + + If the component has a defined filetype it will be added to the path:: + + /prefix/1/2/3/4/56789.exr + + Components that are children of container components will be placed inside + the id structure of their parent:: + + /prefix/1/2/3/4/56789/355827648d.exr + /prefix/1/2/3/4/56789/ajf24215b5.exr + + However, sequence children will be named using their label as an index and + a common prefix of 'file.':: + + /prefix/1/2/3/4/56789/file.0001.exr + /prefix/1/2/3/4/56789/file.0002.exr + + ''' + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information. + + ''' + if entity.entity_type in ('FileComponent',): + # When in a container, place the file inside a directory named + # after the container. + container = entity['container'] + if container and container is not ftrack_api.symbol.NOT_SET: + path = self.get_resource_identifier(container) + + if container.entity_type in ('SequenceComponent',): + # Label doubles as index for now. + name = 'file.{0}{1}'.format( + entity['name'], entity['file_type'] + ) + parts = [os.path.dirname(path), name] + + else: + # Just place uniquely identified file into directory + name = entity['id'] + entity['file_type'] + parts = [path, name] + + else: + name = entity['id'][4:] + entity['file_type'] + parts = ([self.prefix] + list(entity['id'][:4]) + [name]) + + elif entity.entity_type in ('SequenceComponent',): + name = 'file' + + # Add a sequence identifier. + sequence_expression = self._get_sequence_expression(entity) + name += '.{0}'.format(sequence_expression) + + if ( + entity['file_type'] and + entity['file_type'] is not ftrack_api.symbol.NOT_SET + ): + name += entity['file_type'] + + parts = ([self.prefix] + list(entity['id'][:4]) + + [entity['id'][4:]] + [name]) + + elif entity.entity_type in ('ContainerComponent',): + # Just an id directory + parts = ([self.prefix] + + list(entity['id'][:4]) + [entity['id'][4:]]) + + else: + raise NotImplementedError('Cannot generate path for unsupported ' + 'entity {0}'.format(entity)) + + return self.path_separator.join(parts).strip('/') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py new file mode 100644 index 00000000000..0d4d3a57f57 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py @@ -0,0 +1,28 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from .base import Structure + + +class OriginStructure(Structure): + '''Origin structure that passes through existing resource identifier.''' + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* should be a mapping that includes at least a + 'source_resource_identifier' key that refers to the resource identifier + to pass through. + + ''' + if context is None: + context = {} + + resource_identifier = context.get('source_resource_identifier') + if resource_identifier is None: + raise ValueError( + 'Could not generate resource identifier as no source resource ' + 'identifier found in passed context.' + ) + + return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py new file mode 100644 index 00000000000..0b0602df003 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py @@ -0,0 +1,217 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import re +import unicodedata + +import ftrack_api.symbol +import ftrack_api.structure.base + + +class StandardStructure(ftrack_api.structure.base.Structure): + '''Project hierarchy based structure that only supports Components. + + The resource identifier is generated from the project code, the name + of objects in the project structure, asset name and version number:: + + my_project/folder_a/folder_b/asset_name/v003 + + If the component is a `FileComponent` then the name of the component and the + file type are used as filename in the resource_identifier:: + + my_project/folder_a/folder_b/asset_name/v003/foo.jpg + + If the component is a `SequenceComponent` then a sequence expression, + `%04d`, is used. E.g. a component with the name `foo` yields:: + + my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg + + For the member components their index in the sequence is used:: + + my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg + + The name of the component is added to the resource identifier if the + component is a `ContainerComponent`. E.g. a container component with the + name `bar` yields:: + + my_project/folder_a/folder_b/asset_name/v003/bar + + For a member of that container the file name is based on the component name + and file type:: + + my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf + + ''' + + def __init__( + self, project_versions_prefix=None, illegal_character_substitute='_' + ): + '''Initialise structure. + + If *project_versions_prefix* is defined, insert after the project code + for versions published directly under the project:: + + my_project//v001/foo.jpg + + Replace illegal characters with *illegal_character_substitute* if + defined. + + .. note:: + + Nested component containers/sequences are not supported. + + ''' + super(StandardStructure, self).__init__() + self.project_versions_prefix = project_versions_prefix + self.illegal_character_substitute = illegal_character_substitute + + def _get_parts(self, entity): + '''Return resource identifier parts from *entity*.''' + session = entity.session + + version = entity['version'] + + if version is ftrack_api.symbol.NOT_SET and entity['version_id']: + version = session.get('AssetVersion', entity['version_id']) + + error_message = ( + 'Component {0!r} must be attached to a committed ' + 'version and a committed asset with a parent context.'.format( + entity + ) + ) + + if ( + version is ftrack_api.symbol.NOT_SET or + version in session.created + ): + raise ftrack_api.exception.StructureError(error_message) + + link = version['link'] + + if not link: + raise ftrack_api.exception.StructureError(error_message) + + structure_names = [ + item['name'] + for item in link[1:-1] + ] + + project_id = link[0]['id'] + project = session.get('Project', project_id) + asset = version['asset'] + + version_number = self._format_version(version['version']) + + parts = [] + parts.append(project['name']) + + if structure_names: + parts.extend(structure_names) + elif self.project_versions_prefix: + # Add *project_versions_prefix* if configured and the version is + # published directly under the project. + parts.append(self.project_versions_prefix) + + parts.append(asset['name']) + parts.append(version_number) + + return [self.sanitise_for_filesystem(part) for part in parts] + + def _format_version(self, number): + '''Return a formatted string representing version *number*.''' + return 'v{0:03d}'.format(number) + + def sanitise_for_filesystem(self, value): + '''Return *value* with illegal filesystem characters replaced. + + An illegal character is one that is not typically valid for filesystem + usage, such as non ascii characters, or can be awkward to use in a + filesystem, such as spaces. Replace these characters with + the character specified by *illegal_character_substitute* on + initialisation. If no character was specified as substitute then return + *value* unmodified. + + ''' + if self.illegal_character_substitute is None: + return value + + if isinstance(value, str): + value = value.decode('utf-8') + + value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') + value = re.sub('[^\w\.-]', self.illegal_character_substitute, value) + return unicode(value.strip().lower()) + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information, but + is unused in this implementation. + + + Raise a :py:exc:`ftrack_api.exeption.StructureError` if *entity* is not + attached to a committed version and a committed asset with a parent + context. + + ''' + if entity.entity_type in ('FileComponent',): + container = entity['container'] + + if container: + # Get resource identifier for container. + container_path = self.get_resource_identifier(container) + + if container.entity_type in ('SequenceComponent',): + # Strip the sequence component expression from the parent + # container and back the correct filename, i.e. + # /sequence/component/sequence_component_name.0012.exr. + name = '{0}.{1}{2}'.format( + container['name'], entity['name'], entity['file_type'] + ) + parts = [ + os.path.dirname(container_path), + self.sanitise_for_filesystem(name) + ] + + else: + # Container is not a sequence component so add it as a + # normal component inside the container. + name = entity['name'] + entity['file_type'] + parts = [ + container_path, self.sanitise_for_filesystem(name) + ] + + else: + # File component does not have a container, construct name from + # component name and file type. + parts = self._get_parts(entity) + name = entity['name'] + entity['file_type'] + parts.append(self.sanitise_for_filesystem(name)) + + elif entity.entity_type in ('SequenceComponent',): + # Create sequence expression for the sequence component and add it + # to the parts. + parts = self._get_parts(entity) + sequence_expression = self._get_sequence_expression(entity) + parts.append( + '{0}.{1}{2}'.format( + self.sanitise_for_filesystem(entity['name']), + sequence_expression, + self.sanitise_for_filesystem(entity['file_type']) + ) + ) + + elif entity.entity_type in ('ContainerComponent',): + # Add the name of the container to the resource identifier parts. + parts = self._get_parts(entity) + parts.append(self.sanitise_for_filesystem(entity['name'])) + + else: + raise NotImplementedError( + 'Cannot generate resource identifier for unsupported ' + 'entity {0!r}'.format(entity) + ) + + return self.path_separator.join(parts) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py new file mode 100644 index 00000000000..f46760f634b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py @@ -0,0 +1,77 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import os + + +class Symbol(object): + '''A constant symbol.''' + + def __init__(self, name, value=True): + '''Initialise symbol with unique *name* and *value*. + + *value* is used for nonzero testing. + + ''' + self.name = name + self.value = value + + def __str__(self): + '''Return string representation.''' + return self.name + + def __repr__(self): + '''Return representation.''' + return '{0}({1})'.format(self.__class__.__name__, self.name) + + def __nonzero__(self): + '''Return whether symbol represents non-zero value.''' + return bool(self.value) + + def __copy__(self): + '''Return shallow copy. + + Overridden to always return same instance. + + ''' + return self + + +#: Symbol representing that no value has been set or loaded. +NOT_SET = Symbol('NOT_SET', False) + +#: Symbol representing created state. +CREATED = Symbol('CREATED') + +#: Symbol representing modified state. +MODIFIED = Symbol('MODIFIED') + +#: Symbol representing deleted state. +DELETED = Symbol('DELETED') + +#: Topic published when component added to a location. +COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' + +#: Topic published when component removed from a location. +COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' + +#: Identifier of builtin origin location. +ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' + +#: Identifier of builtin unmanaged location. +UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' + +#: Identifier of builtin review location. +REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' + +#: Identifier of builtin connect location. +CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' + +#: Identifier of builtin server location. +SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' + +#: Chunk size used when working with data, default to 1Mb. +CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 + +#: Symbol representing syncing users with ldap +JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov new file mode 100644 index 0000000000000000000000000000000000000000..db34709c2426d85147e9512b4de3c66c7dd48a00 GIT binary patch literal 17627 zcmchf2S5|e_UMz)dsR9F2*nC0QWObIB?ux`L?JX01O&wbQW81{NV6ayAVn--P{9fW z=_m?Vz)C_>G$I6ugpg$31pV&!{qMW?zWcxL|K6BocV}nM%+8$o%{eD)5C{a??|4M$ z(c@?|T7VD1+avyKI_`Ju;6a!b6nxU(mv^aq{j4ExLm*JDi$#$L1pNG&{>ur>{=0Ll zKTH0jBVK9YQvtrPPAgQfuh(&SE-r`=B9b~02tI@r5uPs*6p-Odp569bbnC|}=hnBp zE>O@wzzYH%IuFoj3INHaL_{2+abfcJN1*_)r+5nZ^kY^{_xr}!b+g^8 zBASH_@`eQ+`1Vobb@{MGgoVc=Lz1XhZ}nQ~kGx*}J2)yNsFUkCpKnw1{G8=zT zdVlP8l6s)BZq>J%tks_TQZ{K-7pWo)2?ZlRKMb2?GPe&0r*z+8<|`PVo-#Y4QmAx` zsV*_$Jh86zsK!)vtMUmWAx>aw7G~DC6hD=pniUfVn1k9WLrl%idm1K*AFMNqxAY5o zrF8132yXTnneKW=(3qOKzO`OYm5j39koUB6E9r%NM7xOKENxg+wCl^z%mr^D3ijJ0 zZdtrQqI&tt>wd(6icrH6Pfw-Rd$t8wl)995-e3DzdxF{dIL3zHw}M&kn^@U zAl$iaIFFg*^9Yk2ld5wXlU*^3(-t-_#j(0XUvHD&u5EkfTOF{zqAp|Dpt?JAZwl4U ze@KR=NkjBEF*0pN;`j6QV{dRyxOG%TEnBt9yyfdy=+Ba8o?iY-U8;-tPfA%L4=TT- zO0M^u838vI1S*>z(erckuIXIUsi=D{A#-;|IXx+ndm8=7?a%c4=}F*+b2I9Jd_-vC zxx#iK``t&Q&JVVP3U%ZS70?6oU3Ksx>+6Li^*11yn)Ha+vRYUX$|8{EXrGRzv9ka zTxNuMSN%V`KIzn954Y;59;}cp-0aQn&nrBUl>ZA>X&Xkmtv>a`s-w}a` zG2p;P%M4m6>Z{6pv-}iv;*;#oBGdS@Vm@fM^m<3!u1lzuTefI1vR^n2<5qj@IH+Tn zQ@3pS4sV=Cs_x5NS*J=JH%x(@tk`n7u~gj+!}i>SJNIYs)mAPlJI%k$m_H$oE>-@N zh^=W8w#q_oQVQSOJeE-|9R2)uRMJZdoBPDS-sDq0psrbXF!4?IkI4i(o}F@L(`qZC zHF6?l^9O|T=g>u zln9rHSn#$!rG{vO40ZrCN^;1uu*UngaTWSO`VYsFI7Fy2nPezMt)8cK8_6abGj#Ys(SQgLfv0 z=09u-fMgrfOwG!WDDwj8^Qjy4R3$sQetY-bl}gG#)&d}wfG6A4&P6s1PM%sEX#i>! zM2R}b3tWDB`)(vwm&j1HT@=UXkef=S_T|-Au=5PAyeRo@AJ#KYc;gjnMcl4wWBXb1 zO}wO_zFjgK$Dj5HlN*zYfukwUI<|>*Of#nN{OoA(B`ut;F>ZMu<~=4GJDvk+Y2e_9 zvrNUHA9ZD8QbJF)=CaGnF-1Fam#xO+44clE=)KT4^R{fe*&?G^FW-zhwlsvflUWs- z@%RLZv+Spshm2w5*@Rj$RW9B3m`CLEr7LLrVRGZqkCSol_OKPv-rtcWU+A~}<5Dlp z&w|oXs&RpclUPU8?9`mW`qZoZ+SWf>+tbdr8)2^RUbY-_eipZ4JjZUP59849aAy}z zG4|=XyqsVNwf8%M3Uusb1U_Kcfu zAA=3DOwh)U&G~na`ZcU&C81q~o1ThugN?pKRY+PWzjiYE(R^;DxBW`o{EB63a%Jrw z+vTi&nNxvrzN6!tI@INMF5^xvq`{A_U_gvrUCOLwr{}uwXdCi zL5uKyu)SYZ#Lr{JnG=S>voCwHgEok^pdC<4{R<9@N)b_PGPk*%)a3qRu_CKm} z5x7=#rU@tW%ov8RNxgNA0&V%DAy}wzB4J#kF;{Q@Vo}4u>Tth$StXlzHrR zsNUA|i*Dp=?O%Y!PXu3)$&nwAyU8eyTkG9meX(wufa;K3Qq9_~LMlEp9_A^p9^Y}> zXteo-UCs)KBZ6HHH&rKE)`11t?*Lm`i<2(?+9lVXs-NAFyQgM&Bj0|01kN=|Fe zNG)6)wx%$1Hl57NwkR9ZdFt3&?E>`#Q+&LVWvi-XaNPx;wKSw&5_$zzzDnU)=l^DS9HLlx$k-O`lRvN zp<~38#@LY&hI?RjAPHiL`)I(8)!}0cK!0Oh|AX)TFzYm}I&j1afSHP0jkn+S0x;DO zSRMY#Qe&C$t5HjVsjD~YZk2L;kQsacG&8CLA*_=0kCqUIlG9f;LOLaLjTDE(+~=ZB z8iOHv^M@akY8g^3U?`B6BgJN>2rhm6gZbS*Mgxe2JRZ4SLSOG1X3Vzp67lo!^ZU zDmUD*xEy(;Vh)@HTsgGWiNGe|3*$}c^6Z5j9V?L@*Aib>zLjojxo|iwTrNanwV48t zHW1*W;a^K$lfHsUHQQj9W6*~=qfrXit{j8$oK4tmb_%3=tieM8QPS?$h(i@?J8J}I zbZT7lj=xt7x3Y*m`iL9R=w%_N8e8lhEsG0>+Wt9Jha-y%%S^?P`!Hk;BW?NhS1|!b zUsHW%TW`;{o-O%`1K@4wZW;8!eQU#{s||$}Wpy^GJ`_m%uum}YVCb}$+V5E^#XKVK zxN}`s#%ZW(sUz~8IFavKu69}B4<=+CW^tD1&cC-CFID0{aH=V^@Zd8~gLIm{2=S;+ zypIl_93^CuBWb0)K*dBGdYsVSqIxjWzwAkDRwzP$U*6p}?Cgnznh#f>?3j-8zr|Hu zx=Sqnn}wdznkNe1-xr{*(uU=ZS^Rv-ZTiGId)r4cREc^|lcsobPjL0#jaEMCni3L@ z-&X9OeQtjLRiurVmnvz9=a(_Xyc0&p6AC zx~53l$CM)PqQn9T!`x#|SM!(n-DG}jtzby)v;xFrQVanh{Ittl(R*)Cga3yR!IiJy ztYsGYaGFROBG`97^zFSi3bR-33uQ7}cmsHX+K8;YH?(q{*pQlSKmC4#1kt4a^ z>ZTpyaNLuL1BcDTHyO7aVxL{!&G(M#TRed1=G?H&~*Ix>v$g z_Jf}1=7^0y28s7+l(o}R*o{3J>y}GyAAegj$yqBw`L5r$>Lt1RKywe961{{)cq+a{ zk3kiZsA;eG8O~?Q;?#^yTFJZ!KT+bAxb9)^9Yf;X$S5fn>DOO~?fQE_Fe;kS7ISw! z5}TqqLwRw5Sa8bN{Cbt)-u%LckMu5CRFKjtDie7tY;nx)+h&owgcZc+g=f`&6mAJg>zE+aeC#}ZhkK-zU$_!s3uN}c zkYrz5cc%Bd)aTl>78`UYZS6mN5yF!Iw`H$!`3EtSXF9S_2Xd9H`PIJifd^RO! zDEVofN7F5gDV@L~0+pW3q~78qYC7w>3Z9~ja0x*1Nk@!Tf6p5q0!J|8GqNmT=H~a$ z`<`K`xicY8RSvO|*Mlp!9()SLbAw(n3O+3qPaz#LWDHpaj{#+JXjg_tM4-m(Qm@#L zLD8RPz!VC(#`O@1i>EezLJ-D-l=|l;DPWG!A~G(CS{8T#a-CckqddZm^@=h?)FnPW zGQm}$#LP`z1h%QK*H1K%EyX;zCS=Pm=-x}|Y9Ehluw$)>s3p~5lU&4`{ch_|O$t;; zdnGG&3Ma+SJJ?~UIeSWRughyO=fiVCWT97d`rIX`yt4MUN^wgDrXV;EenTI5waOb= z7pxz?M78FEwA$F$!h(e6U(h_kiplz^BC<7iViK{8@jWhhw0K{IRd&K184bmUacQsi z#WjiFzS?O$)`uB{Lt@~9dFYpdS$Sm^r^`NxIX7R5JNnTscNVABSeW8`b{Lb(F2(sD zId>MFvd(#nKlCFV>T%2Im_f{CyWDFzCwPOq^)+u~KX3!O~idwBG z5mZ{4`@J`EU`T;6%s;Kni6}Xa;71TOnoLfj=crDMA`7i0OHzC;0>Hm}%+?mnGu;{= zd=E%zsX)H%ZS|7f9S@gfz})ZgV1z#@%D}okNgGvvQ%Di|EIo0nj>0mYdv849xiAB9 z=kehq%7h0Uv2q4bV`p}AEs_R$GrktiqO8*?ZZr;J1^^0 zq2u5X=^CjLoz(91blvt#{AY7#o_DExy1Hsv`BW@z7nQnKAOjMM=u&6wdQN+-{}tH* zl`yiBVpN+ST?|-kLEV8etZ1};QHysY;uUM_zG3$8(VH6GBZ^iJMP`ZuYSvt6+UoQL zJw4v0ql+nM&6$+7ujW~-IGv8d3qW>LM5^$`eNM6tP7wO05IUHTtu_(kIc{HZrqC73 zJq>-J6VR_Xs0S9-2?>Nsqk5f-26Iy!DuojL&5WHoUy_COZTJ!A_6~%FOej}DRn7k$nfmG^^fFadzNz>@3pndtFyD3@)ZMX*cMI*O?z@Uk<3q;w22v+kf>TC!ymqBFnVsd6S+}kJ zt?a(qk7u4Qv!-gQDO}x{lE88P9i1V(EFNDf`Q${}V{JcdQ({2ZwhzAEWt6Y8`H>R2 zA5kHZk`(7DYkKP{ltAa(J@HIoTR(xrgP8b_r+A%hLi8{BGR z=q{KlmcuC7b=*;m2S}brS)R+bklhBUg)>dn34uN)jjQ2j7P=R)Hu$TbBR0kB-4tq5 zf1GEtuD7s#W^ACKv#NkQfALb?c|zPBVp&yWrlWbdVO*;;TEVR3(Ys{w=NvnAaMk3| z?>;6MotrNoiOX5iaubUP`jo#1pjM5{2bWN5B%9anSof0iUFJZqMQ`4P`@p%T6I?er z)h;?L+`f1-*qtV7wDwlp1sZ_S3i{~BiiU3A$L?bR1jwZtyCFVlU3 zxe1kN#g1IjzA(TTLun-n zRIi9hiPNk0ncAK6z(u%i>;AhnRd0AShQ5*D2^~cE-+^wj5y;JdSsRZiL7G~6n`KNA z`KGjv@c9#M`y@9OHyLULi+vNfQ5Ql;Ke^&2C9-KVvC2;`v#F@vRz1N+du40B@jO{2 zrRl{Ead}fqy$+wS;i#h-J=Uj@H&h@_gHMk`vawf@+Y*P4U6j28sy|vW7D=0D3~R{)A^*Y=7Dm3V%Gob3-H* za~eLGBx+j(nnm2iK^E^A8kw?c5_XAAh^M-F=LAE;p8qPCv=j^f6Uje$tWrIgi`qG zGEB3{h3FbS!yJ#o$<5+4`Y;?>dr}|fem?Y* zx1P(3vvt%xo|j^|eYU(n)Zw5G(CWOi61`qT{Sfk5eJA3QRC;&5t7Pj`40%Xev64S4 zlh1E0+Bl>8Ci(S}GW4EwvKCh(slE4u#@rB6Q&{%x2g*+h`0VX*Jzy~f{<3$t1eQP? z+V+Q6I;*>YMm36IPRzgNJ}jp;`8`}NJ!(vh%~R8%ir&4}l(!z)hc)&YU^?=}-Bxe$ zQ71PA3O;d#QCM`$+$Vg`py6Iw2lB-q0!Gyjs-s%U9;n%1&VVyE6ae$j& zfA`#6G2_M;ua^bO%HDgRiqe(*b9r=t5zeU(hZQ3#*vm2k-_P0{6@$q**!m z_2446(;eM{h`P_qI}$a{9lB%`&qm5zSCxrrQs~uC(%^jhSkY-je#=*t^=9fw!*@&Wj~yCZW&J(L1c{`x<8RllJ(?9G zHiX!0jWDZC&3mK52QKk@uR1ZG<@Va;gv0}5=p%~+{2mJV^5@b{C?9Niq8dat< zFD$VSKqj@yb(8+%9Lkh3(@G}G>Pq?~h<^fFHKY3P|Izo*!v3e<;Uczc4vFxE0~R24 z0Ruo12b2!!X9Ht^ZA6nKZh{^FG0wKa zI{T-?w^GO~Fnm;l0pwt1*_z0a1dPP6<%2U^?Kzpv1OUJrFl^!c1^^1j(+i&V04#?< zf6Pdsi-Ns$#Zm%+PHx>>K*s`b5y(5swaNa_V^Ug{5M*Qkpg;Zw+T_?S>%Qp~7IO`_ z1Uy$qZHWj2;gUR$7f@aMrtqFWsF z0I5Jxuk|xfV`X_xgd_9bef9b3uXV9v`{|E0GGdz~EOkM5RNSWO)ATIgS3N~FrTPoa zXDp{m`}GS)hr0$|M;8{ln3aa*dKf(GognZk&I&;&sf0Ne^n6VjbHVEQX)9r`a16Jc zT;+fAGC8RRvG$v!z1SMXftX~%2m1Ua#J-y6>Yf!7(jUZ-(zDM|uCL0XdZli!G)J9! zG1%~I&Qro8&+J(SQo~d$epDd+!eFUwz5TMnQT1Ii0 zyv)&A9r(2h9Ji%T2-8rr7cbAf7++(5_~0f(l>0~D_6$X_zF0B$8Z#`;pOq1KcPH{) z=BNlw#5~V-qIj4}U6E}V9c=XAJ!Zwp#1{9juN=CC2gM(3lKPfHSJ(Dn<2jpYi`7=X zJ0A%~MffceW~bSLX77rkHB9teuwa2P48E<2GdQEC>O|o+kEVQ)0ID|mU_Jby0u^f_ z+{|pO218hYbGo^>ShVaEw_&J9lw03e?@YqO8-^}#roG9aYusu$t4A3CEou0IHqb@u z0t@XNOO`t;l?DvhhiofmWzqn>F2jRE06(u}wnwi9pq0RqcLpk8=v+q#Dcpt(TiPMZ z8S!P7u26M?Cue~HLhI(ivX5LvK$9(|ffR1pa`v*lB-X51n(Ochz}13H%+*AM^O~p! z+DlImlz=2>HES*zJA#F?h+5fB0CbH6EWop<$-tRBOc*L=9!gr_p#+kR?C@=JpqT*L zWSBkxBc?K_YzCTLRepLPTN$0Ip#frE}~<87%TkD7;?}RB4J40C0lQ?KL%| z3;+&YP~u9{^JnE_tcwA=nB9@+?FpOp&)(HQ=p(PidWL8)7$Ah>Ra}3G(4yQCwPLP% zn9zI)yU6T*EGDHAp%619a=26=wth>$His}ru56D2c-;miz~!2T7f;j1+S%Lg zMh6mxxzW+10MB82Oh<TZKFG zSb0Ke4rNl$5Y(zydZR`lZyx1+;=qMhpTn#2>d6312bCaKZ#A=dVJ$K4ey>$TSzAEc z##_%raEC;uUw9&SZ2VMjsEERVvT%h*i*^fV&g;~%3o>%m%|D-ai&C7PQrGtBl25fe$SuYVFR7;C=eui(JOPj|6(4v856Vnffy;H1A z-%A4?T`q!Mc2UAGT}!6K!A2+E?DS)SP%>Sx;Nlvq0LOHZ?Ph%k9G-fV;4{kBcA$b_jE z3xGg)v=B-{V-J2Ei{lC>TbxLY1ZjY?SR>trdE)w;on34{*KGW-%h%@%<#x^6&aAOE zw0wW@(#6+ee5zd@;U1DzVnoh2!;NQV0EMgoJjlG8E1wx~2wYA$nKcJkpkM&Nd>Wil zh!Z>*Cj5pWcZuCskcb^20FW|oK_0dnNM%4~0l;F=1}>`TvGMGp`eINOOyJ%obJ#8l zKneC8R0Y_DKz=I;aPR=qW0Fe-?4ufFy?%7g}(<0<%d!F?8m79(mMBJIpo1=I6Uje2p?JD=z; z13=?n>9-#8TFWm00@p*6y7;N~+Z0g_ExdT@sxx;wL|1l@+o(qy_JlG_ApgoF7Wt^x!qC|RzG0w-tMJYz1G19BDWi}ybxF{&4h3?UxX8@0}ki33cwAO;_9Z1(9$zW7i%ja~? zw(MdKfg=N^qx1j;wuuaeoTmwlYKS>*!!OQlg{Wb|d;bOKMwI?D&^bZSrThLP&;bIy zk-Y`LwI7UN=w!NOJq5D2uQO!PTAdpO98nzra{r!jl8|w*F@MK6mtrux6ir1q>AuIA zyd;1o)Lq}cEL*PE_ABBt>ol0UMfoQ&SwrFGGUy$dKb`-^yHOQJsq!%e^K<2{Vd7B@ zsd;fpd_q*AwHkh*ekTrSPfSVGT_hUR$LX`9sCjR$cwCfBo^@=SKR7DVeZwQGnty+X zn}@rti>|mu+n-1~i{8ZbtE+7O;3jNkx>1zDClutUr)<$dR`**c(H%K#^KwPmBL9350& zz0C8?)06t`kuj#{4E;mCZ7#C9yy4);k__pMaxq`dZn%H7ohjihNiz5@aO&q0@}j{u zl$mI0j%uUW|0t6UxSX_#rO7y^N}PL1peh99&5h|vO>O>~IkxACxtUahYzT6IeG??T zz7^NS29a%?K-g@_ zq!Pd=J%(M(n0!V93>qaLnp~#ep~2Ksm_tj zW}2U3)hl*}2X^mQx3tQ4i)>2yhW?`YV?G2C?Czz75Kvr;u23ENQ9b<@nb$q**fetW z_H+~Xi(_w+ct^WcW64(#vnKD&Q85wSU5{j3Cw(79ou2TC=B%cT?Y>PmjK*>n=!D-k z*IE*R9cz$PRD>sf=;zF{H#ZOlz~n$0AQ7NF1>wh_aqD}b>bOI!nG640Sc51_hy52r z1`BX^M)M~8J3@dbBzTb91(op+TaE2N;N>l@l=hHnybAuH8V59ELGGf}$C!jM8vr#M zCb(b%t~_8D19lM^h!X%zcqqB0n0Ev~CJ{FFpRSw`TU-7c*s4(b$Jhe8ro$%+0Et8= zST;@*$kX0)xwMEA+&W%YAvJl}g4O<QLeBp1nKvi%LwiW+FxGp(0vX1Z^iR!XJxl z^OR9T_FL&1>859SlAYL9!tw7P z-fGPIY5ntvudQJ0PtSQ&sKmg8z3ZyF*xjtIky$z;d_&0Qk4-X4=6^#{t>qvH;&yUWw4mJg z*M=!FYULhQ4eoc6r_Kl#`hh~exQB8=Hv)+2U@GUM1NBTR2dl5`4L;y6t_uHsLm7}*EYTQ=?e`X95)$d0@>d^GYU-sj zZ}zEKPB%-HGK&dx0&f&xy4+i1FF@RES*MB>>)>B`ti39x>Tt(&0hhDr55p21m0rR$ zY7J4`rmpAyBr>z^#VHDTA=&CiQlqabwgv6#iA>q^aB^}^IQmu?pb+LcgumEcWaxO% z2H;y2aA^1ODCO`UPzqXLfJp&d0Ooph92&91ILF$;J2xEMqvRtsNxGk*+V>nOx+ycVr``h(Rf%o* z@@bReG4ZCD;55m=lHhQ@?6;ewdn)tC z_9^w3PN&jFTHh#f4sU#Y$;mNHTn|k1b6wY~*JYLkT#8P~VB;4c-QzJsnXjn1iMrJT zXJ(sJ_9u?K%RB6|e$?-<@2NYAz-iy*PBMwu;DoiEO-y|?If|e*$Vf12jj%FL_f_*L zr(F}2Z+B0^zJ>3I#VDEBjXgQ^gCx13Vqp+Q@&I@>Zdmp)rnlt&DvKeM;mNk#1^#5Ma7U=|~v~r*&HP^t_<~l=9aX%1QT+>J}9K|kX=Pxc{c-p@a z-ZIntZ1k__1e#-<0he8rB>j|<%axgfF2gVu;1@N;8QNNTt^Wr;Ex@=+D&sSJC~jg_ z5f}zv81U;6&|9hp=fyB*9!sNK9Dq}eG+zQ&IZKEr*r5L%QD8=xicD~m-L_kMeaNCN z`8~EdHXBPDtI=9|PC05f^9%V8kB{5vcfnO?l>dEH2{%P=_QVw`BFt_6!tPY0eTIwK z#yhJKij`~cBKAKSzQ+l?6=1^naUi;BX^>y0VRstRFd|?1Ky8?{mD=hDS2mH`?jBe@ z<(hC+`NLZZv9%@IPn*KV^ZY89zWA&qNZIivnjzYs4ql8AM$OjceZEpV(t%QuA_m>8 zH8pP5bVYe~e^t72=U`~)9xZR%qlJe(Rb$(3xPBDebAuF}%24j#+I0%i5q3Ry!8s#w zcC@8$=z>C{hXB}z0CYG3pukE7;MNXGl40xt z4-fTnVTKyO!@>aiqi_!K0{n}-g7P7+coG(%M#up6Z|9J}^9A8-g9*w6QctO~-FP=1(Q?id`I=HzC88x{hlZwH zOfDg;GM2JPv!dcTCmoI{*o)ar%sWYMcbD3SkDh$dGg)pa87$^1+IOqQuKKSx>T;X`o&XU0JPeo`fAfsupLxg5SmFP50#5xmogWcX24wNMjzUL|pP}MfT+a z@=wK)7LLG00=Jfvq;3e1S@LOur$S6VkO>Qn-*y=MHtjd-7VWTOzwEGogLUAak`BuF zpCjED^Zy&t6^{Ool1>7W4mRi?kgndoMVu#R+6~s>xhDKIuSqGQYtitYR=7Js(aX}iNF_^jWsjWhi{}DfMI@z?PF36 zXL`K1lKzP8FRxww0v+TQSH45zjM3#@jgtpcnL$@G1JFMRXiB#jOE*ZY&eG~Z*504* z7JA^cCM2A3Dz0WnZ6Pzsi@*Tjg|&v>l#Y7g%NQ3W?+22HFx5|+I`V!oL^Sxu4-$WoY2H`z}AWZO=Q4j>{9Vn@_{Q1*L7#{%%o!j+L}x^P|5S zmC2^B2SUV^9gdY}Rx28-e;N6qR{p?ePpaNrdlPC`GlEzq9d#wkO&tD*rft%P4p7I? z?cMcr3S$Pp-aIRKzQ-M@xaAjJdpbH7Z3iB2J8L%^mq0AQSy&sR`38tgw3JC0|Wwd(D!&4+^Zf8 z3Ol}-v)Iz#au;RsSqJ+Dc)@GvVBf$0<%QqR{_Do|{=PxSc!$CMLB~$QUrz7}4Gog! zWgwAVO4jh?AbJnX4naE|gc8yAUPpX`4#GQ7l;D6Pez2VU$>7Br;CtId_&W+j-o563 zNcDwMy@CS5eoI9{sY8mtq*@0Bc!ivv4cF}yu^DTah{N0$0)zB z&FkiUmC0?0IuaiDTi)XxZ_>Wg9Y$(h^!R~{dXAci~8!sUR>{qWb;c4hU zONs!%ODmvMTcnefIzS@i_2B-xqP);O#A`)<QTLRpa;dTLTyWj@pBkAyd$FSo* zAv{4a@w%`afl!C9i)nenITMRO=y@R!%MQaKoj@Q~{X`%P;q^7m2*kS0(0ztw%we8| K0|H@L{l5Sv%EZ9{ literal 0 HcmV?d00001 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png new file mode 100644 index 0000000000000000000000000000000000000000..da6ec772092e788b9db8dd7bf98b9d713255bd72 GIT binary patch literal 115 zcmeAS@N?(olHy`uVBq!ia0vp^AT|dF8<0HkD{mW+vhs9s45^rt{Nwxo|NpDSef}SR z)AjRwLB+TFMqi%)*FV4eU*G8XwcaNAPd!`P&HpncCH@&33UInDb7J`WPs~|dc=2+|Ns5_85n>V41r7_ z2Z$G#W^V*)67zI%45?szdvPP9g98K0!D87L`y+K3D$*XGO*?h$S#W)QaTODT0|NsG z0|O(20s{jJLjwbY0MII^pec-jRhCRa3|q)F2jR8_Zn(Wbw&7?&LAE(a!FvT)I8$Oc za(q!@4wBoXsALXGT7oAksu>=jjG1By8QyR)iowr~<@`_ format, for example:: + + /tmp/asfjsfjoj3/%04d.jpg [1-3] + + ''' + items = [] + for index in range(3): + item_path = os.path.join( + temporary_directory, '{0:04d}.jpg'.format(index) + ) + with open(item_path, 'w') as file_descriptor: + file_descriptor.write(uuid.uuid4().hex) + file_descriptor.close() + + items.append(item_path) + + collections, _ = clique.assemble(items) + sequence_path = collections[0].format() + + return sequence_path + + +@pytest.fixture() +def video_path(): + '''Return a path to a video file.''' + video = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + 'fixture', + 'media', + 'colour_wheel.mov' + ) + ) + + return video + + +@pytest.fixture() +def session(): + '''Return session instance.''' + return ftrack_api.Session() + + +@pytest.fixture() +def session_no_autoconnect_hub(): + '''Return session instance not auto connected to hub.''' + return ftrack_api.Session(auto_connect_event_hub=False) + + +@pytest.fixture() +def unique_name(): + '''Return a unique name.''' + return 'test-{0}'.format(uuid.uuid4()) + + +@pytest.fixture() +def temporary_path(request): + '''Return temporary path.''' + path = tempfile.mkdtemp() + + def cleanup(): + '''Remove created path.''' + try: + shutil.rmtree(path) + except OSError: + pass + + request.addfinalizer(cleanup) + + return path + + +@pytest.fixture() +def new_user(request, session, unique_name): + '''Return a newly created unique user.''' + entity = session.create('User', {'username': unique_name}) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(entity) + session.commit() + + request.addfinalizer(cleanup) + + return entity + + +@pytest.fixture() +def user(session): + '''Return the same user entity for entire session.''' + # Jenkins user + entity = session.get('User', 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb') + assert entity is not None + + return entity + + +@pytest.fixture() +def project_schema(session): + '''Return project schema.''' + # VFX Scheme + entity = session.get( + 'ProjectSchema', '69cb7f92-4dbf-11e1-9902-f23c91df25eb' + ) + assert entity is not None + return entity + + +@pytest.fixture() +def new_project_tree(request, session, user): + '''Return new project with basic tree.''' + project_schema = session.query('ProjectSchema').first() + default_shot_status = project_schema.get_statuses('Shot')[0] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) + project = session.create('Project', { + 'name': project_name, + 'full_name': project_name + '_full', + 'project_schema': project_schema + }) + + for sequence_number in range(1): + sequence = session.create('Sequence', { + 'name': 'sequence_{0:03d}'.format(sequence_number), + 'parent': project + }) + + for shot_number in range(1): + shot = session.create('Shot', { + 'name': 'shot_{0:03d}'.format(shot_number * 10), + 'parent': sequence, + 'status': default_shot_status + }) + + for task_number in range(1): + task = session.create('Task', { + 'name': 'task_{0:03d}'.format(task_number), + 'parent': shot, + 'status': default_task_status, + 'type': default_task_type + }) + + session.create('Appointment', { + 'type': 'assignment', + 'context': task, + 'resource': user + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(project) + session.commit() + + request.addfinalizer(cleanup) + + return project + + +@pytest.fixture() +def new_project(request, session, user): + '''Return new empty project.''' + project_schema = session.query('ProjectSchema').first() + project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) + project = session.create('Project', { + 'name': project_name, + 'full_name': project_name + '_full', + 'project_schema': project_schema + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(project) + session.commit() + + request.addfinalizer(cleanup) + + return project + + +@pytest.fixture() +def project(session): + '''Return same project for entire session.''' + # Test project. + entity = session.get('Project', '5671dcb0-66de-11e1-8e6e-f23c91df25eb') + assert entity is not None + + return entity + + +@pytest.fixture() +def new_task(request, session, unique_name): + '''Return a new task.''' + project = session.query( + 'Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb' + ).one() + project_schema = project['project_schema'] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + task = session.create('Task', { + 'name': unique_name, + 'parent': project, + 'status': default_task_status, + 'type': default_task_type + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(task) + session.commit() + + request.addfinalizer(cleanup) + + return task + + +@pytest.fixture() +def task(session): + '''Return same task for entire session.''' + # Tests/python_api/tasks/t1 + entity = session.get('Task', 'adb4ad6c-7679-11e2-8df2-f23c91df25eb') + assert entity is not None + + return entity + + +@pytest.fixture() +def new_scope(request, session, unique_name): + '''Return a new scope.''' + scope = session.create('Scope', { + 'name': unique_name + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(scope) + session.commit() + + request.addfinalizer(cleanup) + + return scope + + +@pytest.fixture() +def new_job(request, session, unique_name, user): + '''Return a new scope.''' + job = session.create('Job', { + 'type': 'api_job', + 'user': user + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(job) + session.commit() + + request.addfinalizer(cleanup) + + return job + + +@pytest.fixture() +def new_note(request, session, unique_name, new_task, user): + '''Return a new note attached to a task.''' + note = new_task.create_note(unique_name, user) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(note) + session.commit() + + request.addfinalizer(cleanup) + + return note + + +@pytest.fixture() +def new_asset_version(request, session): + '''Return a new asset version.''' + asset_version = session.create('AssetVersion', { + 'asset_id': 'dd9a7e2e-c5eb-11e1-9885-f23c91df25eb' + }) + session.commit() + + # Do not cleanup the version as that will sometimes result in a deadlock + # database error. + + return asset_version + + +@pytest.fixture() +def new_component(request, session, temporary_file): + '''Return a new component not in any location except origin.''' + component = session.create_component(temporary_file, location=None) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(component) + session.commit() + + request.addfinalizer(cleanup) + + return component + + +@pytest.fixture() +def new_container_component(request, session, temporary_directory): + '''Return a new container component not in any location except origin.''' + component = session.create('ContainerComponent') + + # Add to special origin location so that it is possible to add to other + # locations. + origin_location = session.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + origin_location.add_component( + component, temporary_directory, recursive=False + ) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(component) + session.commit() + + request.addfinalizer(cleanup) + + return component + + +@pytest.fixture() +def new_sequence_component(request, session, temporary_sequence): + '''Return a new sequence component not in any location except origin.''' + component = session.create_component(temporary_sequence, location=None) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(component) + session.commit() + + request.addfinalizer(cleanup) + + return component + + +@pytest.fixture +def mocked_schemas(): + '''Return a list of mocked schemas.''' + return [{ + 'id': 'Foo', + 'type': 'object', + 'properties': { + 'id': { + 'type': 'string' + }, + 'string': { + 'type': 'string' + }, + 'integer': { + 'type': 'integer' + }, + 'number': { + 'type': 'number' + }, + 'boolean': { + 'type': 'boolean' + }, + 'bars': { + 'type': 'array', + 'items': { + 'ref': '$Bar' + } + }, + 'date': { + 'type': 'string', + 'format': 'date-time' + } + }, + 'immutable': [ + 'id' + ], + 'primary_key': [ + 'id' + ], + 'required': [ + 'id' + ], + 'default_projections': [ + 'id' + ] + }, { + 'id': 'Bar', + 'type': 'object', + 'properties': { + 'id': { + 'type': 'string' + }, + 'name': { + 'type': 'string' + }, + 'computed_value': { + 'type': 'string', + } + }, + 'computed': [ + 'computed_value' + ], + 'immutable': [ + 'id' + ], + 'primary_key': [ + 'id' + ], + 'required': [ + 'id' + ], + 'default_projections': [ + 'id' + ] + }] + + +@pytest.yield_fixture +def mocked_schema_session(mocker, mocked_schemas): + '''Return a session instance with mocked schemas.''' + with mocker.patch.object( + ftrack_api.Session, + '_load_schemas', + return_value=mocked_schemas + ): + # Mock _configure_locations since it will fail if no location schemas + # exist. + with mocker.patch.object( + ftrack_api.Session, + '_configure_locations' + ): + patched_session = ftrack_api.Session() + yield patched_session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py new file mode 100644 index 00000000000..bc98f15de24 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py new file mode 100644 index 00000000000..78d61a62d1c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py @@ -0,0 +1,54 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack +import json + + +def test_create_component(new_asset_version, temporary_file): + '''Create component on asset version.''' + session = new_asset_version.session + component = new_asset_version.create_component( + temporary_file, location=None + ) + assert component['version'] is new_asset_version + + # Have to delete component before can delete asset version. + session.delete(component) + + +def test_create_component_specifying_different_version( + new_asset_version, temporary_file +): + '''Create component on asset version ignoring specified version.''' + session = new_asset_version.session + component = new_asset_version.create_component( + temporary_file, location=None, + data=dict( + version_id='this-value-should-be-ignored', + version='this-value-should-be-overridden' + ) + ) + assert component['version'] is new_asset_version + + # Have to delete component before can delete asset version. + session.delete(component) + + +def test_encode_media(new_asset_version, video_path): + '''Encode media based on a file path + + Encoded components should be associated with the version. + ''' + session = new_asset_version.session + job = new_asset_version.encode_media(video_path) + assert job.entity_type == 'Job' + + job_data = json.loads(job['data']) + assert 'output' in job_data + assert len(job_data['output']) + assert 'component_id' in job_data['output'][0] + + component_id = job_data['output'][0]['component_id'] + component = session.get('FileComponent', component_id) + + # Component should be associated with the version. + assert component['version_id'] == new_asset_version['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py new file mode 100644 index 00000000000..aff456e2388 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py @@ -0,0 +1,14 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + +import pytest + + +def test_hash(project, task, user): + '''Entities can be hashed.''' + test_set = set() + test_set.add(project) + test_set.add(task) + test_set.add(user) + + assert test_set == set((project, task, user)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py new file mode 100644 index 00000000000..347c74a50de --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py @@ -0,0 +1,70 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack +import os + +import pytest + + +def test_get_availability(new_component): + '''Retrieve availability in locations.''' + session = new_component.session + availability = new_component.get_availability() + + # Note: Currently the origin location is also 0.0 as the link is not + # persisted to the server. This may change in future and this test would + # need updating as a result. + assert set(availability.values()) == set([0.0]) + + # Add to a location. + source_location = session.query( + 'Location where name is "ftrack.origin"' + ).one() + + target_location = session.query( + 'Location where name is "ftrack.unmanaged"' + ).one() + + target_location.add_component(new_component, source_location) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del new_component['component_locations'] + + availability = new_component.get_availability() + target_availability = availability.pop(target_location['id']) + assert target_availability == 100.0 + + # All other locations should still be 0. + assert set(availability.values()) == set([0.0]) + +@pytest.fixture() +def image_path(): + '''Return a path to an image file.''' + image_path = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'image.png' + ) + ) + + return image_path + +def test_create_task_thumbnail(task, image_path): + '''Successfully create thumbnail component and set as task thumbnail.''' + component = task.create_thumbnail(image_path) + component.session.commit() + assert component['id'] == task['thumbnail_id'] + + +def test_create_thumbnail_with_data(task, image_path, unique_name): + '''Successfully create thumbnail component with custom data.''' + data = {'name': unique_name} + component = task.create_thumbnail(image_path, data=data) + component.session.commit() + assert component['name'] == unique_name diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py new file mode 100644 index 00000000000..5d5a0baa7ca --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py @@ -0,0 +1,25 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.factory + + +class CustomUser(ftrack_api.entity.base.Entity): + '''Represent custom user.''' + + +def test_extend_standard_factory_with_bases(session): + '''Successfully add extra bases to standard factory.''' + standard_factory = ftrack_api.entity.factory.StandardFactory() + + schemas = session._load_schemas(False) + user_schema = [ + schema for schema in schemas if schema['id'] == 'User' + ].pop() + + user_class = standard_factory.create(user_schema, bases=[CustomUser]) + session.types[user_class.entity_type] = user_class + + user = session.query('User').first() + + assert CustomUser in type(user).__mro__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py new file mode 100644 index 00000000000..52ddbda0aca --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py @@ -0,0 +1,42 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + + +def test_create_job(session, user): + '''Create job.''' + job = session.create('Job', { + 'user': user + }) + + assert job + session.commit() + assert job['type'] == 'api_job' + + session.delete(job) + session.commit() + + +def test_create_job_with_valid_type(session, user): + '''Create job explicitly specifying valid type.''' + job = session.create('Job', { + 'user': user, + 'type': 'api_job' + }) + + assert job + session.commit() + assert job['type'] == 'api_job' + + session.delete(job) + session.commit() + + +def test_create_job_using_faulty_type(session, user): + '''Fail to create job with faulty type.''' + with pytest.raises(ValueError): + session.create('Job', { + 'user': user, + 'type': 'not-allowed-type' + }) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py new file mode 100644 index 00000000000..5bb90e451f1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py @@ -0,0 +1,516 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import base64 +import filecmp + +import pytest +import requests + +import ftrack_api.exception +import ftrack_api.accessor.disk +import ftrack_api.structure.origin +import ftrack_api.structure.id +import ftrack_api.entity.location +import ftrack_api.resource_identifier_transformer.base as _transformer +import ftrack_api.symbol + + +class Base64ResourceIdentifierTransformer( + _transformer.ResourceIdentifierTransformer +): + '''Resource identifier transformer for test purposes. + + Store resource identifier as base 64 encoded string. + + ''' + + def encode(self, resource_identifier, context=None): + '''Return encoded *resource_identifier* for storing centrally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return base64.encodestring(resource_identifier) + + def decode(self, resource_identifier, context=None): + '''Return decoded *resource_identifier* for use locally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return base64.decodestring(resource_identifier) + + +@pytest.fixture() +def new_location(request, session, unique_name, temporary_directory): + '''Return new managed location.''' + location = session.create('Location', { + 'name': 'test-location-{}'.format(unique_name) + }) + + location.accessor = ftrack_api.accessor.disk.DiskAccessor( + prefix=os.path.join(temporary_directory, 'location') + ) + location.structure = ftrack_api.structure.id.IdStructure() + location.priority = 10 + + session.commit() + + def cleanup(): + '''Remove created entity.''' + # First auto-remove all components in location. + for location_component in location['location_components']: + session.delete(location_component) + + # At present, need this intermediate commit otherwise server errors + # complaining that location still has components in it. + session.commit() + + session.delete(location) + session.commit() + + request.addfinalizer(cleanup) + + return location + + +@pytest.fixture() +def new_unmanaged_location(request, session, unique_name): + '''Return new unmanaged location.''' + location = session.create('Location', { + 'name': 'test-location-{}'.format(unique_name) + }) + + # TODO: Change to managed and use a temporary directory cleaned up after. + ftrack_api.mixin( + location, ftrack_api.entity.location.UnmanagedLocationMixin, + name='UnmanagedTestLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + location.priority = 10 + + session.commit() + + def cleanup(): + '''Remove created entity.''' + # First auto-remove all components in location. + for location_component in location['location_components']: + session.delete(location_component) + + # At present, need this intermediate commit otherwise server errors + # complaining that location still has components in it. + session.commit() + + session.delete(location) + session.commit() + + request.addfinalizer(cleanup) + + return location + + +@pytest.fixture() +def origin_location(session): + '''Return origin location.''' + return session.query('Location where name is "ftrack.origin"').one() + +@pytest.fixture() +def server_location(session): + '''Return server location.''' + return session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) + + +@pytest.fixture() +def server_image_component(request, session, server_location): + image_file = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'image.png' + ) + ) + component = session.create_component( + image_file, location=server_location + ) + + def cleanup(): + server_location.remove_component(component) + request.addfinalizer(cleanup) + + return component + + +@pytest.mark.parametrize('name', [ + 'named', + None +], ids=[ + 'named', + 'unnamed' +]) +def test_string_representation(session, name): + '''Return string representation.''' + location = session.create('Location', {'id': '1'}) + if name: + location['name'] = name + assert str(location) == '' + else: + assert str(location) == '' + + +def test_add_components(new_location, origin_location, session, temporary_file): + '''Add components.''' + component_a = session.create_component( + temporary_file, location=None + ) + component_b = session.create_component( + temporary_file, location=None + ) + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [0.0, 0.0] + ) + + new_location.add_components( + [component_a, component_b], [origin_location, origin_location] + ) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del component_a['component_locations'] + del component_b['component_locations'] + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [100.0, 100.0] + ) + + +def test_add_components_from_single_location( + new_location, origin_location, session, temporary_file +): + '''Add components from single location.''' + component_a = session.create_component( + temporary_file, location=None + ) + component_b = session.create_component( + temporary_file, location=None + ) + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [0.0, 0.0] + ) + + new_location.add_components([component_a, component_b], origin_location) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del component_a['component_locations'] + del component_b['component_locations'] + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [100.0, 100.0] + ) + + +def test_add_components_with_mismatching_sources(new_location, new_component): + '''Fail to add components when sources mismatched.''' + with pytest.raises(ValueError): + new_location.add_components([new_component], []) + + +def test_add_components_with_undefined_structure(new_location, mocker): + '''Fail to add components when location structure undefined.''' + mocker.patch.object(new_location, 'structure', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_components([], []) + + +def test_add_components_already_in_location( + session, temporary_file, new_location, new_component, origin_location +): + '''Fail to add components already in location.''' + new_location.add_component(new_component, origin_location) + + another_new_component = session.create_component( + temporary_file, location=None + ) + + with pytest.raises(ftrack_api.exception.ComponentInLocationError): + new_location.add_components( + [another_new_component, new_component], origin_location + ) + + +def test_add_component_when_data_already_exists( + new_location, new_component, origin_location +): + '''Fail to add component when data already exists.''' + # Inject pre-existing data on disk. + resource_identifier = new_location.structure.get_resource_identifier( + new_component + ) + container = new_location.accessor.get_container(resource_identifier) + new_location.accessor.make_container(container) + data = new_location.accessor.open(resource_identifier, 'w') + data.close() + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_component(new_component, origin_location) + + +def test_add_component_missing_source_accessor( + new_location, new_component, origin_location, mocker +): + '''Fail to add component when source is missing accessor.''' + mocker.patch.object(origin_location, 'accessor', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_component(new_component, origin_location) + + +def test_add_component_missing_target_accessor( + new_location, new_component, origin_location, mocker +): + '''Fail to add component when target is missing accessor.''' + mocker.patch.object(new_location, 'accessor', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_component(new_component, origin_location) + + +def test_add_container_component( + new_container_component, new_location, origin_location +): + '''Add container component.''' + new_location.add_component(new_container_component, origin_location) + + assert ( + new_location.get_component_availability(new_container_component) + == 100.0 + ) + + +def test_add_sequence_component_recursively( + new_sequence_component, new_location, origin_location +): + '''Add sequence component recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=True + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 100.0 + ) + + +def test_add_sequence_component_non_recursively( + new_sequence_component, new_location, origin_location +): + '''Add sequence component non recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=False + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 0.0 + ) + + +def test_remove_components( + session, new_location, origin_location, temporary_file +): + '''Remove components.''' + component_a = session.create_component( + temporary_file, location=None + ) + component_b = session.create_component( + temporary_file, location=None + ) + + new_location.add_components([component_a, component_b], origin_location) + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [100.0, 100.0] + ) + + new_location.remove_components([ + component_a, component_b + ]) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del component_a['component_locations'] + del component_b['component_locations'] + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [0.0, 0.0] + ) + + +def test_remove_sequence_component_recursively( + new_sequence_component, new_location, origin_location +): + '''Remove sequence component recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=True + ) + + new_location.remove_component( + new_sequence_component, recursive=True + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 0.0 + ) + + +def test_remove_sequence_component_non_recursively( + new_sequence_component, new_location, origin_location +): + '''Remove sequence component non recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=False + ) + + new_location.remove_component( + new_sequence_component, recursive=False + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 0.0 + ) + + +def test_remove_component_missing_accessor( + new_location, new_component, origin_location, mocker +): + '''Fail to remove component when location is missing accessor.''' + new_location.add_component(new_component, origin_location) + mocker.patch.object(new_location, 'accessor', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.remove_component(new_component) + + +def test_resource_identifier_transformer( + new_component, new_unmanaged_location, origin_location, mocker +): + '''Transform resource identifier.''' + session = new_unmanaged_location.session + + transformer = Base64ResourceIdentifierTransformer(session) + mocker.patch.object( + new_unmanaged_location, 'resource_identifier_transformer', transformer + ) + + new_unmanaged_location.add_component(new_component, origin_location) + + original_resource_identifier = origin_location.get_resource_identifier( + new_component + ) + assert ( + new_component['component_locations'][0]['resource_identifier'] + == base64.encodestring(original_resource_identifier) + ) + + assert ( + new_unmanaged_location.get_resource_identifier(new_component) + == original_resource_identifier + ) + + +def test_get_filesystem_path(new_component, new_location, origin_location): + '''Retrieve filesystem path.''' + new_location.add_component(new_component, origin_location) + resource_identifier = new_location.structure.get_resource_identifier( + new_component + ) + expected = os.path.normpath( + os.path.join(new_location.accessor.prefix, resource_identifier) + ) + assert new_location.get_filesystem_path(new_component) == expected + + +def test_get_context(new_component, new_location, origin_location): + '''Retrieve context for component.''' + resource_identifier = origin_location.get_resource_identifier( + new_component + ) + context = new_location._get_context(new_component, origin_location) + assert context == { + 'source_resource_identifier': resource_identifier + } + + +def test_get_context_for_component_not_in_source(new_component, new_location): + '''Retrieve context for component not in source location.''' + context = new_location._get_context(new_component, new_location) + assert context == {} + + +def test_data_transfer(session, new_location, origin_location): + '''Transfer a real file and make sure it is identical.''' + video_file = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'colour_wheel.mov' + ) + ) + component = session.create_component( + video_file, location=new_location + ) + new_video_file = new_location.get_filesystem_path(component) + + assert filecmp.cmp(video_file, new_video_file) + + +def test_get_thumbnail_url(server_location, server_image_component): + '''Test download a thumbnail image from server location''' + thumbnail_url = server_location.get_thumbnail_url( + server_image_component, + size=10 + ) + assert thumbnail_url + + response = requests.get(thumbnail_url) + response.raise_for_status() + + image_file = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'image-resized-10.png' + ) + ) + expected_image_contents = open(image_file).read() + assert response.content == expected_image_contents diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py new file mode 100644 index 00000000000..3a81fdbe858 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py @@ -0,0 +1,135 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import uuid + +import ftrack_api + + +def test_query_metadata(new_project): + '''Query metadata.''' + session = new_project.session + + metadata_key = uuid.uuid1().hex + metadata_value = uuid.uuid1().hex + new_project['metadata'][metadata_key] = metadata_value + session.commit() + + results = session.query( + 'Project where metadata.key is {0}'.format(metadata_key) + ) + + assert len(results) == 1 + assert new_project['id'] == results[0]['id'] + + results = session.query( + 'Project where metadata.value is {0}'.format(metadata_value) + ) + + assert len(results) == 1 + assert new_project['id'] == results[0]['id'] + + results = session.query( + 'Project where metadata.key is {0} and ' + 'metadata.value is {1}'.format(metadata_key, metadata_value) + ) + + assert len(results) == 1 + assert new_project['id'] == results[0]['id'] + + +def test_set_get_metadata_from_different_sessions(new_project): + '''Get and set metadata using different sessions.''' + session = new_project.session + + metadata_key = uuid.uuid1().hex + metadata_value = uuid.uuid1().hex + new_project['metadata'][metadata_key] = metadata_value + session.commit() + + new_session = ftrack_api.Session() + project = new_session.query( + 'Project where id is {0}'.format(new_project['id']) + )[0] + + assert project['metadata'][metadata_key] == metadata_value + + project['metadata'][metadata_key] = uuid.uuid1().hex + + new_session.commit() + + new_session = ftrack_api.Session() + project = new_session.query( + 'Project where id is {0}'.format(project['id']) + )[0] + + assert project['metadata'][metadata_key] != metadata_value + + +def test_get_set_multiple_metadata(new_project): + '''Get and set multiple metadata.''' + session = new_project.session + + new_project['metadata'] = { + 'key1': 'value1', + 'key2': 'value2' + } + session.commit() + + assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) + + new_session = ftrack_api.Session() + retrieved = new_session.query( + 'Project where id is {0}'.format(new_project['id']) + )[0] + + assert set(retrieved['metadata'].keys()) == set(['key1', 'key2']) + + +def test_metadata_parent_type_remains_in_schema_id_format(session, new_project): + '''Metadata parent_type remains in schema id format post commit.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + assert entity['parent_type'] == new_project.entity_type + + +def test_set_metadata_twice(new_project): + '''Set metadata twice in a row.''' + session = new_project.session + + new_project['metadata'] = { + 'key1': 'value1', + 'key2': 'value2' + } + session.commit() + + assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) + + new_project['metadata'] = { + 'key3': 'value3', + 'key4': 'value4' + } + session.commit() + + +def test_set_same_metadata_on_retrieved_entity(new_project): + '''Set same metadata on retrieved entity.''' + session = new_project.session + + new_project['metadata'] = { + 'key1': 'value1' + } + session.commit() + + project = session.get('Project', new_project['id']) + + project['metadata'] = { + 'key1': 'value1' + } + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py new file mode 100644 index 00000000000..5d854eaed4e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py @@ -0,0 +1,67 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api +import ftrack_api.inspection + + +def test_create_reply(session, new_note, user, unique_name): + '''Create reply to a note.''' + reply_text = 'My reply on note' + new_note.create_reply(reply_text, user) + + session.commit() + + assert len(new_note['replies']) == 1 + + assert reply_text == new_note['replies'][0]['content'] + + +def test_create_note_on_entity(session, new_task, user, unique_name): + '''Create note attached to an entity.''' + note = new_task.create_note(unique_name, user) + session.commit() + + session.reset() + retrieved_task = session.get(*ftrack_api.inspection.identity(new_task)) + assert len(retrieved_task['notes']) == 1 + assert ( + ftrack_api.inspection.identity(retrieved_task['notes'][0]) + == ftrack_api.inspection.identity(note) + ) + + +def test_create_note_on_entity_specifying_recipients( + session, new_task, user, unique_name, new_user +): + '''Create note with specified recipients attached to an entity.''' + recipient = new_user + note = new_task.create_note(unique_name, user, recipients=[recipient]) + session.commit() + + session.reset() + retrieved_note = session.get(*ftrack_api.inspection.identity(note)) + + # Note: The calling user is automatically added server side so there will be + # 2 recipients. + assert len(retrieved_note['recipients']) == 2 + specified_recipient_present = False + for entry in retrieved_note['recipients']: + if entry['resource_id'] == recipient['id']: + specified_recipient_present = True + break + + assert specified_recipient_present + + +def test_create_note_on_entity_specifying_category( + session, new_task, user, unique_name +): + '''Create note with specified category attached to an entity.''' + category = session.query('NoteCategory').first() + note = new_task.create_note(unique_name, user, category=category) + session.commit() + + session.reset() + retrieved_note = session.get(*ftrack_api.inspection.identity(note)) + assert retrieved_note['category']['id'] == category['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py new file mode 100644 index 00000000000..10ef485aed4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py @@ -0,0 +1,64 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest + + +@pytest.mark.parametrize('schema, expected', [ + ('Task', [ + 'Not started', 'In progress', 'Awaiting approval', 'Approved' + ]), + ('Shot', [ + 'Normal', 'Omitted', 'On Hold' + ]), + ('AssetVersion', [ + 'Approved', 'Pending' + ]), + ('AssetBuild', [ + 'Normal', 'Omitted', 'On Hold' + ]), + ('Invalid', ValueError) +], ids=[ + 'task', + 'shot', + 'asset version', + 'asset build', + 'invalid' +]) +def test_get_statuses(project_schema, schema, expected): + '''Retrieve statuses for schema and optional type.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + project_schema.get_statuses(schema) + + else: + statuses = project_schema.get_statuses(schema) + status_names = [status['name'] for status in statuses] + assert sorted(status_names) == sorted(expected) + + +@pytest.mark.parametrize('schema, expected', [ + ('Task', [ + 'Generic', 'Animation', 'Modeling', 'Previz', 'Lookdev', 'Hair', + 'Cloth', 'FX', 'Lighting', 'Compositing', 'Tracking', 'Rigging', + 'test 1', 'test type 2' + ]), + ('AssetBuild', ['Character', 'Prop', 'Environment', 'Matte Painting']), + ('Invalid', ValueError) +], ids=[ + 'task', + 'asset build', + 'invalid' +]) +def test_get_types(project_schema, schema, expected): + '''Retrieve types for schema.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + project_schema.get_types(schema) + + else: + types = project_schema.get_types(schema) + type_names = [type_['name'] for type_ in types] + assert sorted(type_names) == sorted(expected) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py new file mode 100644 index 00000000000..1a5afe70c96 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py @@ -0,0 +1,24 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + + +def test_add_remove_and_query_scopes_for_tasks(session, new_task, new_scope): + '''Add, remove and query scopes for task.''' + query_string = 'Task where scopes.name is {0}'.format(new_scope['name']) + tasks = session.query(query_string) + + assert len(tasks) == 0 + + new_task['scopes'].append(new_scope) + session.commit() + + tasks = session.query(query_string) + + assert len(tasks) == 1 and tasks[0] == new_task + + new_task['scopes'].remove(new_scope) + session.commit() + + tasks = session.query(query_string) + + assert len(tasks) == 0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py new file mode 100644 index 00000000000..4d7e4550421 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py @@ -0,0 +1,49 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + + +def test_force_start_timer(new_user, task): + '''Successfully force starting a timer when another timer is running.''' + first_timer = new_user.start_timer(context=task) + second_timer = new_user.start_timer(context=task, force=True) + + assert first_timer['id'] + assert second_timer['id'] + assert first_timer['id'] != second_timer['id'] + + +def test_timer_creates_timelog(new_user, task, unique_name): + '''Successfully create time log when stopping timer. + + A timer which was immediately stopped should have a duration less than + a minute. + + ''' + comment = 'comment' + unique_name + timer = new_user.start_timer( + context=task, + name=unique_name, + comment=comment + ) + timer_start = timer['start'] + timelog = new_user.stop_timer() + + assert timelog['user_id'] == new_user['id'] + assert timelog['context_id']== task['id'] + assert timelog['name'] == unique_name + assert timelog['comment'] == comment + assert timelog['start'] == timer_start + assert isinstance(timelog['duration'], (int, long, float)) + assert timelog['duration'] < 60 + + +def test_reset_user_api_key(new_user): + '''Test resetting of api keys.''' + + api_keys = list() + for i in range(0, 10): + api_keys.append(new_user.reset_api_key()) + + # make sure all api keys are unique + assert len(set(api_keys)) == 10 + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py new file mode 100644 index 00000000000..bc98f15de24 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py new file mode 100644 index 00000000000..09b270a0438 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py @@ -0,0 +1,92 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import sys +import time +import logging +import argparse + +import ftrack_api +from ftrack_api.event.base import Event + + +TOPIC = 'test_event_hub_server_heartbeat' +RECEIVED = [] + + +def callback(event): + '''Track received messages.''' + counter = event['data']['counter'] + RECEIVED.append(counter) + print('Received message {0} ({1} in total)'.format(counter, len(RECEIVED))) + + +def main(arguments=None): + '''Publish and receive heartbeat test.''' + parser = argparse.ArgumentParser() + parser.add_argument('mode', choices=['publish', 'subscribe']) + + namespace = parser.parse_args(arguments) + logging.basicConfig(level=logging.INFO) + + session = ftrack_api.Session() + + message_count = 100 + sleep_time_per_message = 1 + + if namespace.mode == 'publish': + max_atempts = 100 + retry_interval = 0.1 + atempt = 0 + while not session.event_hub.connected: + print ( + 'Session is not yet connected to event hub, sleeping for 0.1s' + ) + time.sleep(retry_interval) + + atempt = atempt + 1 + if atempt > max_atempts: + raise Exception( + 'Unable to connect to server within {0} seconds'.format( + max_atempts * retry_interval + ) + ) + + print('Sending {0} messages...'.format(message_count)) + + for counter in range(1, message_count + 1): + session.event_hub.publish( + Event(topic=TOPIC, data=dict(counter=counter)) + ) + print('Sent message {0}'.format(counter)) + + if counter < message_count: + time.sleep(sleep_time_per_message) + + elif namespace.mode == 'subscribe': + session.event_hub.subscribe('topic={0}'.format(TOPIC), callback) + session.event_hub.wait( + duration=( + ((message_count - 1) * sleep_time_per_message) + 15 + ) + ) + + if len(RECEIVED) != message_count: + print( + '>> Failed to receive all messages. Dropped {0} <<' + .format(message_count - len(RECEIVED)) + ) + return False + + # Give time to flush all buffers. + time.sleep(5) + + return True + + +if __name__ == '__main__': + result = main(sys.argv[1:]) + if not result: + raise SystemExit(1) + else: + raise SystemExit(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py new file mode 100644 index 00000000000..d9496fe0703 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py @@ -0,0 +1,36 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.event.base + + +def test_string_representation(): + '''String representation.''' + event = ftrack_api.event.base.Event('test', id='some-id') + assert str(event) == ( + "" + ) + + +def test_stop(): + '''Set stopped flag on event.''' + event = ftrack_api.event.base.Event('test', id='some-id') + + assert event.is_stopped() is False + + event.stop() + assert event.is_stopped() is True + + +def test_is_stopped(): + '''Report stopped status of event.''' + event = ftrack_api.event.base.Event('test', id='some-id') + + assert event.is_stopped() is False + + event.stop() + assert event.is_stopped() is True + + event.stop() + assert event.is_stopped() is True diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py new file mode 100644 index 00000000000..4cf68b58f0d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py @@ -0,0 +1,174 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import operator +import inspect + +import pytest + +from ftrack_api.event.expression import ( + Expression, All, Any, Not, Condition, Parser +) +from ftrack_api.exception import ParseError + + +@pytest.fixture() +def candidate(): + '''Return common candidate to test expressions against.''' + return { + 'id': 10, + 'name': 'value', + 'change': { + 'name': 'value', + 'new_value': 10 + } + } + + +@pytest.mark.parametrize('expression, expected', [ + pytest.mark.xfail(('', Expression())), + ('invalid', ParseError), + ('key=value nor other=value', ParseError), + ('key=value', Condition('key', operator.eq, 'value')), + ('key="value"', Condition('key', operator.eq, 'value')), + ( + 'a=b and ((c=d or e!=f) and not g.h > 10)', + All([ + Condition('a', operator.eq, 'b'), + All([ + Any([ + Condition('c', operator.eq, 'd'), + Condition('e', operator.ne, 'f') + ]), + Not( + Condition('g.h', operator.gt, 10) + ) + ]) + ]) + ) +], ids=[ + 'empty expression', + 'invalid expression', + 'invalid conjunction', + 'basic condition', + 'basic quoted condition', + 'complex condition' +]) +def test_parser_parse(expression, expected): + '''Parse expression into Expression instances.''' + parser = Parser() + + if inspect.isclass(expected)and issubclass(expected, Exception): + with pytest.raises(expected): + parser.parse(expression) + else: + assert str(parser.parse(expression)) == str(expected) + + +@pytest.mark.parametrize('expression, expected', [ + (Expression(), ''), + (All([Expression(), Expression()]), ' ]>'), + (Any([Expression(), Expression()]), ' ]>'), + (Not(Expression()), '>'), + (Condition('key', '=', 'value'), '') +], ids=[ + 'Expression', + 'All', + 'Any', + 'Not', + 'Condition' +]) +def test_string_representation(expression, expected): + '''String representation of expression.''' + assert str(expression) == expected + + +@pytest.mark.parametrize('expression, expected', [ + # Expression + (Expression(), True), + + # All + (All(), True), + (All([Expression(), Expression()]), True), + (All([Expression(), Condition('test', operator.eq, 'value')]), False), + + # Any + (Any(), False), + (Any([Expression(), Condition('test', operator.eq, 'value')]), True), + (Any([ + Condition('test', operator.eq, 'value'), + Condition('other', operator.eq, 'value') + ]), False), + + # Not + (Not(Expression()), False), + (Not(Not(Expression())), True) +], ids=[ + 'Expression-always matches', + + 'All-no expressions always matches', + 'All-all match', + 'All-not all match', + + 'Any-no expressions never matches', + 'Any-some match', + 'Any-none match', + + 'Not-invert positive match', + 'Not-double negative is positive match' +]) +def test_match(expression, candidate, expected): + '''Determine if candidate matches expression.''' + assert expression.match(candidate) is expected + + +def parametrize_test_condition_match(metafunc): + '''Parametrize condition_match tests.''' + identifiers = [] + data = [] + + matrix = { + # Operator, match, no match + operator.eq: { + 'match': 10, 'no-match': 20, + 'wildcard-match': 'valu*', 'wildcard-no-match': 'values*' + }, + operator.ne: {'match': 20, 'no-match': 10}, + operator.ge: {'match': 10, 'no-match': 20}, + operator.le: {'match': 10, 'no-match': 0}, + operator.gt: {'match': 0, 'no-match': 10}, + operator.lt: {'match': 20, 'no-match': 10} + } + + for operator_function, values in matrix.items(): + for value_label, value in values.items(): + if value_label.startswith('wildcard'): + key_options = { + 'plain': 'name', + 'nested': 'change.name' + } + else: + key_options = { + 'plain': 'id', + 'nested': 'change.new_value' + } + + for key_label, key in key_options.items(): + identifiers.append('{} operator {} key {}'.format( + operator_function.__name__, key_label, value_label + )) + + data.append(( + key, operator_function, value, + 'no-match' not in value_label + )) + + metafunc.parametrize( + 'key, operator, value, expected', data, ids=identifiers + ) + + +def test_condition_match(key, operator, value, candidate, expected): + '''Determine if candidate matches condition expression.''' + condition = Condition(key, operator, value) + assert condition.match(candidate) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py new file mode 100644 index 00000000000..6f1920dddf4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py @@ -0,0 +1,701 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect +import json +import os +import time +import subprocess +import sys + +import pytest + +import ftrack_api.event.hub +import ftrack_api.event.subscriber +from ftrack_api.event.base import Event +import ftrack_api.exception + + +class MockClass(object): + '''Mock class for testing.''' + + def method(self): + '''Mock method for testing.''' + + +def mockFunction(): + '''Mock function for testing.''' + + +class MockConnection(object): + '''Mock connection for testing.''' + + @property + def connected(self): + '''Return whether connected.''' + return True + + def close(self): + '''Close mock connection.''' + pass + + +def assert_callbacks(hub, callbacks): + '''Assert hub has exactly *callbacks* subscribed.''' + # Subscribers always starts with internal handle_reply subscriber. + subscribers = hub._subscribers[:] + subscribers.pop(0) + + if len(subscribers) != len(callbacks): + raise AssertionError( + 'Number of subscribers ({0}) != number of callbacks ({1})' + .format(len(subscribers), len(callbacks)) + ) + + for index, subscriber in enumerate(subscribers): + if subscriber.callback != callbacks[index]: + raise AssertionError( + 'Callback at {0} != subscriber callback at same index.' + .format(index) + ) + + +@pytest.fixture() +def event_hub(request, session): + '''Return event hub to test against. + + Hub is automatically connected at start of test and disconnected at end. + + ''' + hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + hub.connect() + + def cleanup(): + '''Cleanup.''' + if hub.connected: + hub.disconnect() + + request.addfinalizer(cleanup) + + return hub + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), + ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000') +], ids=[ + 'with port', + 'without port' +]) +def test_get_server_url(server_url, expected): + '''Return server url.''' + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.get_server_url() == expected + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', 'test.ftrackapp.com'), + ('https://test.ftrackapp.com:9000', 'test.ftrackapp.com:9000') +], ids=[ + 'with port', + 'without port' +]) +def test_get_network_location(server_url, expected): + '''Return network location of server url.''' + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.get_network_location() == expected + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', True), + ('http://test.ftrackapp.com', False) +], ids=[ + 'secure', + 'not secure' +]) +def test_secure_property(server_url, expected, mocker): + '''Return whether secure connection used.''' + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.secure is expected + + +def test_connected_property(session): + '''Return connected state.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + assert event_hub.connected is False + + event_hub.connect() + assert event_hub.connected is True + + event_hub.disconnect() + assert event_hub.connected is False + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), + ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000'), + ('test.ftrackapp.com', ValueError), + ('https://:9000', ValueError), +], ids=[ + 'with port', + 'without port', + 'missing scheme', + 'missing hostname' +]) +def test_initialise_against_server_url(server_url, expected): + '''Initialise against server url.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + else: + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.get_server_url() == expected + + +def test_connect(session): + '''Connect.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + event_hub.connect() + + assert event_hub.connected is True + event_hub.disconnect() + + +def test_connect_when_already_connected(event_hub): + '''Fail to connect when already connected''' + assert event_hub.connected is True + + with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: + event_hub.connect() + + assert 'Already connected' in str(error) + + +def test_connect_failure(session, mocker): + '''Fail to connect to server.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + def force_fail(*args, **kwargs): + '''Force connection failure.''' + raise Exception('Forced fail.') + + mocker.patch('websocket.create_connection', force_fail) + with pytest.raises(ftrack_api.exception.EventHubConnectionError): + event_hub.connect() + + +def test_connect_missing_required_transport(session, mocker, caplog): + '''Fail to connect to server that does not provide correct transport.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + original_get_socket_io_session = event_hub._get_socket_io_session + + def _get_socket_io_session(): + '''Patched to return no transports.''' + session = original_get_socket_io_session() + return ftrack_api.event.hub.SocketIoSession( + session[0], session[1], [] + ) + + mocker.patch.object( + event_hub, '_get_socket_io_session', _get_socket_io_session + ) + + with pytest.raises(ftrack_api.exception.EventHubConnectionError): + event_hub.connect() + + logs = caplog.records() + assert ( + 'Server does not support websocket sessions.' in str(logs[-1].exc_info) + ) + + +def test_disconnect(event_hub): + '''Disconnect and unsubscribe all subscribers.''' + event_hub.disconnect() + assert len(event_hub._subscribers) == 0 + assert event_hub.connected is False + + +def test_disconnect_without_unsubscribing(event_hub): + '''Disconnect without unsubscribing all subscribers.''' + event_hub.disconnect(unsubscribe=False) + assert len(event_hub._subscribers) > 0 + assert event_hub.connected is False + + +def test_close_connection_from_manually_connected_hub(session_no_autoconnect_hub): + '''Close connection from manually connected hub.''' + session_no_autoconnect_hub.event_hub.connect() + session_no_autoconnect_hub.close() + assert session_no_autoconnect_hub.event_hub.connected is False + + +def test_disconnect_when_not_connected(session): + '''Fail to disconnect when not connected''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: + event_hub.disconnect() + + assert 'Not currently connected' in str(error) + + +def test_reconnect(event_hub): + '''Reconnect successfully.''' + assert event_hub.connected is True + event_hub.reconnect() + assert event_hub.connected is True + + +def test_reconnect_when_not_connected(session): + '''Reconnect successfully even if not already connected.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + assert event_hub.connected is False + + event_hub.reconnect() + assert event_hub.connected is True + + event_hub.disconnect() + + +def test_fail_to_reconnect(session, mocker): + '''Fail to reconnect.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + event_hub.connect() + assert event_hub.connected is True + + def force_fail(*args, **kwargs): + '''Force connection failure.''' + raise Exception('Forced fail.') + + mocker.patch('websocket.create_connection', force_fail) + + attempts = 2 + with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: + event_hub.reconnect(attempts=attempts, delay=0.5) + + assert 'Failed to reconnect to event server' in str(error) + assert 'after {} attempts'.format(attempts) in str(error) + + +def test_wait(event_hub): + '''Wait for event and handle as they arrive.''' + called = {'callback': False} + + def callback(event): + called['callback'] = True + + event_hub.subscribe('topic=test-subscribe', callback) + + event_hub.publish(Event(topic='test-subscribe')) + + # Until wait, the event should not have been processed even if received. + time.sleep(1) + assert called == {'callback': False} + + event_hub.wait(2) + assert called == {'callback': True} + + +def test_wait_interrupted_by_disconnect(event_hub): + '''Interrupt wait loop with disconnect event.''' + wait_time = 5 + start = time.time() + + # Inject event directly for test purposes. + event = Event(topic='ftrack.meta.disconnected') + event_hub._event_queue.put(event) + + event_hub.wait(wait_time) + + assert time.time() - start < wait_time + + +@pytest.mark.parametrize('identifier, registered', [ + ('registered-test-subscriber', True), + ('unregistered-test-subscriber', False) +], ids=[ + 'registered', + 'missing' +]) +def test_get_subscriber_by_identifier(event_hub, identifier, registered): + '''Return subscriber by identifier.''' + def callback(event): + pass + + subscriber = { + 'id': 'registered-test-subscriber' + } + + event_hub.subscribe('topic=test-subscribe', callback, subscriber) + retrieved = event_hub.get_subscriber_by_identifier(identifier) + + if registered: + assert isinstance(retrieved, ftrack_api.event.subscriber.Subscriber) + assert retrieved.metadata.get('id') == subscriber['id'] + else: + assert retrieved is None + + +def test_subscribe(event_hub): + '''Subscribe to topics.''' + called = {'a': False, 'b': False} + + def callback_a(event): + called['a'] = True + + def callback_b(event): + called['b'] = True + + event_hub.subscribe('topic=test-subscribe', callback_a) + event_hub.subscribe('topic=test-subscribe-other', callback_b) + + event_hub.publish(Event(topic='test-subscribe')) + event_hub.wait(2) + + assert called == {'a': True, 'b': False} + + +def test_subscribe_before_connected(session): + '''Subscribe to topic before connected.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + called = {'callback': False} + + def callback(event): + called['callback'] = True + + identifier = 'test-subscriber' + event_hub.subscribe( + 'topic=test-subscribe', callback, subscriber={'id': identifier} + ) + assert event_hub.get_subscriber_by_identifier(identifier) is not None + + event_hub.connect() + + try: + event_hub.publish(Event(topic='test-subscribe')) + event_hub.wait(2) + finally: + event_hub.disconnect() + + assert called == {'callback': True} + + +def test_duplicate_subscriber(event_hub): + '''Fail to subscribe same subscriber more than once.''' + subscriber = {'id': 'test-subscriber'} + event_hub.subscribe('topic=test', None, subscriber=subscriber) + + with pytest.raises(ftrack_api.exception.NotUniqueError) as error: + event_hub.subscribe('topic=test', None, subscriber=subscriber) + + assert '{0} already exists'.format(subscriber['id']) in str(error) + + +def test_unsubscribe(event_hub): + '''Unsubscribe a specific callback.''' + def callback_a(event): + pass + + def callback_b(event): + pass + + identifier_a = event_hub.subscribe('topic=test', callback_a) + identifier_b = event_hub.subscribe('topic=test', callback_b) + + assert_callbacks(event_hub, [callback_a, callback_b]) + + event_hub.unsubscribe(identifier_a) + + # Unsubscribe requires confirmation event so wait here to give event a + # chance to process. + time.sleep(5) + + assert_callbacks(event_hub, [callback_b]) + + +def test_unsubscribe_whilst_disconnected(event_hub): + '''Unsubscribe whilst disconnected.''' + identifier = event_hub.subscribe('topic=test', None) + event_hub.disconnect(unsubscribe=False) + + event_hub.unsubscribe(identifier) + assert_callbacks(event_hub, []) + + +def test_unsubscribe_missing_subscriber(event_hub): + '''Fail to unsubscribe a non-subscribed subscriber.''' + identifier = 'non-subscribed-subscriber' + with pytest.raises(ftrack_api.exception.NotFoundError) as error: + event_hub.unsubscribe(identifier) + + assert ( + 'missing subscriber with identifier {}'.format(identifier) + in str(error) + ) + + +@pytest.mark.parametrize('event_data', [ + dict(source=dict(id='1', user=dict(username='auto'))), + dict(source=dict(user=dict(username='auto'))), + dict(source=dict(id='1')), + dict() +], ids=[ + 'pre-prepared', + 'missing id', + 'missing user', + 'no source' +]) +def test_prepare_event(session, event_data): + '''Prepare event.''' + # Replace username `auto` in event data with API user. + try: + if event_data['source']['user']['username'] == 'auto': + event_data['source']['user']['username'] = session.api_user + except KeyError: + pass + + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + event_hub.id = '1' + + event = Event('test', id='event-id', **event_data) + expected = Event( + 'test', id='event-id', source=dict(id='1', user=dict(username=session.api_user)) + ) + event_hub._prepare_event(event) + assert event == expected + + +def test_prepare_reply_event(session): + '''Prepare reply event.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + source_event = Event('source', source=dict(id='source-id')) + reply_event = Event('reply') + + event_hub._prepare_reply_event(reply_event, source_event) + assert source_event['source']['id'] in reply_event['target'] + assert reply_event['in_reply_to_event'] == source_event['id'] + + event_hub._prepare_reply_event(reply_event, source_event, {'id': 'source'}) + assert reply_event['source'] == {'id': 'source'} + + +def test_publish(event_hub): + '''Publish asynchronous event.''' + called = {'callback': False} + + def callback(event): + called['callback'] = True + + event_hub.subscribe('topic=test-subscribe', callback) + + event_hub.publish(Event(topic='test-subscribe')) + event_hub.wait(2) + + assert called == {'callback': True} + + +def test_publish_raising_error(event_hub): + '''Raise error, when configured, on failed publish.''' + # Note that the event hub currently only fails publish when not connected. + # All other errors are inconsistently swallowed. + event_hub.disconnect() + event = Event(topic='a-topic', data=dict(status='fail')) + + with pytest.raises(Exception): + event_hub.publish(event, on_error='raise') + + +def test_publish_ignoring_error(event_hub): + '''Ignore error, when configured, on failed publish.''' + # Note that the event hub currently only fails publish when not connected. + # All other errors are inconsistently swallowed. + event_hub.disconnect() + event = Event(topic='a-topic', data=dict(status='fail')) + event_hub.publish(event, on_error='ignore') + + +def test_publish_logs_other_errors(event_hub, caplog, mocker): + '''Log publish errors other than connection error.''' + # Mock connection to force error. + mocker.patch.object(event_hub, '_connection', MockConnection()) + + event = Event(topic='a-topic', data=dict(status='fail')) + event_hub.publish(event) + + expected = 'Error sending event {0}.'.format(event) + messages = [record.getMessage().strip() for record in caplog.records()] + assert expected in messages, 'Expected log message missing in output.' + + +def test_synchronous_publish(event_hub): + '''Publish event synchronously and collect results.''' + def callback_a(event): + return 'A' + + def callback_b(event): + return 'B' + + def callback_c(event): + return 'C' + + event_hub.subscribe('topic=test', callback_a, priority=50) + event_hub.subscribe('topic=test', callback_b, priority=60) + event_hub.subscribe('topic=test', callback_c, priority=70) + + results = event_hub.publish(Event(topic='test'), synchronous=True) + assert results == ['A', 'B', 'C'] + + +def test_publish_with_reply(event_hub): + '''Publish asynchronous event with on reply handler.''' + + def replier(event): + '''Replier.''' + return 'Replied' + + event_hub.subscribe('topic=test', replier) + + called = {'callback': None} + + def on_reply(event): + called['callback'] = event['data'] + + event_hub.publish(Event(topic='test'), on_reply=on_reply) + event_hub.wait(2) + + assert called['callback'] == 'Replied' + + +def test_publish_with_multiple_replies(event_hub): + '''Publish asynchronous event and retrieve multiple replies.''' + + def replier_one(event): + '''Replier.''' + return 'One' + + def replier_two(event): + '''Replier.''' + return 'Two' + + event_hub.subscribe('topic=test', replier_one) + event_hub.subscribe('topic=test', replier_two) + + called = {'callback': []} + + def on_reply(event): + called['callback'].append(event['data']) + + event_hub.publish(Event(topic='test'), on_reply=on_reply) + event_hub.wait(2) + + assert sorted(called['callback']) == ['One', 'Two'] + + +@pytest.mark.slow +def test_server_heartbeat_response(): + '''Maintain connection by responding to server heartbeat request.''' + test_script = os.path.join( + os.path.dirname(__file__), 'event_hub_server_heartbeat.py' + ) + + # Start subscriber that will listen for all three messages. + subscriber = subprocess.Popen([sys.executable, test_script, 'subscribe']) + + # Give subscriber time to connect to server. + time.sleep(10) + + # Start publisher to publish three messages. + publisher = subprocess.Popen([sys.executable, test_script, 'publish']) + + publisher.wait() + subscriber.wait() + + assert subscriber.returncode == 0 + + +def test_stop_event(event_hub): + '''Stop processing of subsequent local handlers when stop flag set.''' + called = { + 'a': False, + 'b': False, + 'c': False + } + + def callback_a(event): + called['a'] = True + + def callback_b(event): + called['b'] = True + event.stop() + + def callback_c(event): + called['c'] = True + + event_hub.subscribe('topic=test', callback_a, priority=50) + event_hub.subscribe('topic=test', callback_b, priority=60) + event_hub.subscribe('topic=test', callback_c, priority=70) + + event_hub.publish(Event(topic='test')) + event_hub.wait(2) + + assert called == { + 'a': True, + 'b': True, + 'c': False + } + + +def test_encode(session): + '''Encode event data.''' + encoded = session.event_hub._encode( + dict(name='ftrack.event', args=[Event('test')]) + ) + assert 'inReplyToEvent' in encoded + assert 'in_reply_to_event' not in encoded + + +def test_decode(session): + '''Decode event data.''' + decoded = session.event_hub._decode( + json.dumps({ + 'inReplyToEvent': 'id' + }) + ) + + assert 'in_reply_to_event' in decoded + assert 'inReplyToEvent' not in decoded diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py new file mode 100644 index 00000000000..dc8ac69fd92 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py @@ -0,0 +1,33 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.event.subscriber +from ftrack_api.event.base import Event + + +def test_string_representation(): + '''String representation.''' + subscriber = ftrack_api.event.subscriber.Subscriber( + 'topic=test', lambda x: None, {'meta': 'info'}, 100 + ) + + assert str(subscriber) == ( + '' + ) + + +@pytest.mark.parametrize('expression, event, expected', [ + ('topic=test', Event(topic='test'), True), + ('topic=test', Event(topic='other-test'), False) +], ids=[ + 'interested', + 'not interested' +]) +def test_interested_in(expression, event, expected): + '''Determine if subscriber interested in event.''' + subscriber = ftrack_api.event.subscriber.Subscriber( + expression, lambda x: None, {'meta': 'info'}, 100 + ) + assert subscriber.interested_in(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py new file mode 100644 index 00000000000..1535309f257 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py @@ -0,0 +1,28 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.event.subscription +from ftrack_api.event.base import Event + + +def test_string_representation(): + '''String representation is subscription expression.''' + expression = 'topic=some-topic' + subscription = ftrack_api.event.subscription.Subscription(expression) + + assert str(subscription) == expression + + +@pytest.mark.parametrize('expression, event, expected', [ + ('topic=test', Event(topic='test'), True), + ('topic=test', Event(topic='other-test'), False) +], ids=[ + 'match', + 'no match' +]) +def test_includes(expression, event, expected): + '''Subscription includes event.''' + subscription = ftrack_api.event.subscription.Subscription(expression) + assert subscription.includes(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py new file mode 100644 index 00000000000..bc98f15de24 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py new file mode 100644 index 00000000000..51c896f96ba --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py @@ -0,0 +1,36 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.resource_identifier_transformer.base as _transformer + + +@pytest.fixture() +def transformer(session): + '''Return instance of ResourceIdentifierTransformer.''' + return _transformer.ResourceIdentifierTransformer(session) + + +@pytest.mark.parametrize('resource_identifier, context, expected', [ + ('identifier', None, 'identifier'), + ('identifier', {'user': {'username': 'user'}}, 'identifier') +], ids=[ + 'no context', + 'basic context' +]) +def test_encode(transformer, resource_identifier, context, expected): + '''Encode resource identifier.''' + assert transformer.encode(resource_identifier, context) == expected + + +@pytest.mark.parametrize('resource_identifier, context, expected', [ + ('identifier', None, 'identifier'), + ('identifier', {'user': {'username': 'user'}}, 'identifier') +], ids=[ + 'no context', + 'basic context' +]) +def test_decode(transformer, resource_identifier, context, expected): + '''Encode resource identifier.''' + assert transformer.decode(resource_identifier, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py new file mode 100644 index 00000000000..bc98f15de24 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py new file mode 100644 index 00000000000..dbf91ead208 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py @@ -0,0 +1,31 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.structure.base + + +class Concrete(ftrack_api.structure.base.Structure): + '''Concrete implementation to allow testing non-abstract methods.''' + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information. + + ''' + return 'resource_identifier' + + +@pytest.mark.parametrize('sequence, expected', [ + ({'padding': None}, '%d'), + ({'padding': 4}, '%04d') +], ids=[ + 'no padding', + 'padded' +]) +def test_get_sequence_expression(sequence, expected): + '''Get sequence expression from sequence.''' + structure = Concrete() + assert structure._get_sequence_expression(sequence) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py new file mode 100644 index 00000000000..01ccb35ac85 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py @@ -0,0 +1,49 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest +import mock + +import ftrack_api +import ftrack_api.structure.entity_id + + +@pytest.fixture(scope='session') +def structure(): + '''Return structure.''' + return ftrack_api.structure.entity_id.EntityIdStructure() + + +# Note: When it is possible to use indirect=True on just a few arguments, the +# called functions here can change to standard fixtures. +# https://github.com/pytest-dev/pytest/issues/579 + +def valid_entity(): + '''Return valid entity.''' + session = ftrack_api.Session() + + entity = session.create('FileComponent', { + 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', + 'name': 'file_component', + 'file_type': '.png' + }) + + return entity + + +@pytest.mark.parametrize('entity, context, expected', [ + (valid_entity(), {}, 'f6cd40cb-d1c0-469f-a2d5-10369be8a724'), + (mock.Mock(), {}, Exception) +], ids=[ + 'valid-entity', + 'non-entity' +]) +def test_get_resource_identifier(structure, entity, context, expected): + '''Get resource identifier.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + structure.get_resource_identifier(entity, context) + else: + assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py new file mode 100644 index 00000000000..ef81da2d65d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py @@ -0,0 +1,115 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest + +import ftrack_api +import ftrack_api.structure.id + + +@pytest.fixture(scope='session') +def structure(): + '''Return structure.''' + return ftrack_api.structure.id.IdStructure(prefix='path') + + +# Note: When it is possible to use indirect=True on just a few arguments, the +# called functions here can change to standard fixtures. +# https://github.com/pytest-dev/pytest/issues/579 + +def file_component(container=None): + '''Return file component.''' + session = ftrack_api.Session() + + entity = session.create('FileComponent', { + 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', + 'name': '0001', + 'file_type': '.png', + 'container': container + }) + + return entity + + +def sequence_component(padding=0): + '''Return sequence component with *padding*.''' + session = ftrack_api.Session() + + entity = session.create('SequenceComponent', { + 'id': 'ff17edad-2129-483b-8b59-d1a654c8497b', + 'name': 'sequence_component', + 'file_type': '.png', + 'padding': padding + }) + + return entity + + +def container_component(): + '''Return container component.''' + session = ftrack_api.Session() + + entity = session.create('ContainerComponent', { + 'id': '03ab9967-f86c-4b55-8252-cd187d0c244a', + 'name': 'container_component' + }) + + return entity + + +def unsupported_entity(): + '''Return an unsupported entity.''' + session = ftrack_api.Session() + + entity = session.create('User', { + 'username': 'martin' + }) + + return entity + + +@pytest.mark.parametrize('entity, context, expected', [ + ( + file_component(), {}, + 'path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png' + ), + ( + file_component(container_component()), {}, + 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/' + 'f6cd40cb-d1c0-469f-a2d5-10369be8a724.png' + ), + ( + file_component(sequence_component()), {}, + 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png' + ), + ( + sequence_component(padding=0), {}, + 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png' + ), + ( + sequence_component(padding=4), {}, + 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png' + ), + ( + container_component(), {}, + 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a' + ), + (unsupported_entity(), {}, NotImplementedError) +], ids=[ + 'file-component', + 'file-component-in-container', + 'file-component-in-sequence', + 'unpadded-sequence-component', + 'padded-sequence-component', + 'container-component', + 'unsupported-entity' +]) +def test_get_resource_identifier(structure, entity, context, expected): + '''Get resource identifier.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + structure.get_resource_identifier(entity, context) + else: + assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py new file mode 100644 index 00000000000..e294e04a70a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py @@ -0,0 +1,33 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest +import mock + +import ftrack_api.structure.origin + + +@pytest.fixture(scope='session') +def structure(): + '''Return structure.''' + return ftrack_api.structure.origin.OriginStructure() + + +@pytest.mark.parametrize('entity, context, expected', [ + (mock.Mock(), {'source_resource_identifier': 'identifier'}, 'identifier'), + (mock.Mock(), {}, ValueError), + (mock.Mock(), None, ValueError) +], ids=[ + 'valid-context', + 'invalid-context', + 'unspecified-context' +]) +def test_get_resource_identifier(structure, entity, context, expected): + '''Get resource identifier.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + structure.get_resource_identifier(entity, context) + else: + assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py new file mode 100644 index 00000000000..dd72f8ec3fa --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py @@ -0,0 +1,309 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import uuid + +import pytest + +import ftrack_api +import ftrack_api.structure.standard + + +@pytest.fixture(scope='session') +def new_project(request): + '''Return new empty project.''' + session = ftrack_api.Session() + + project_schema = session.query('ProjectSchema').first() + project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) + project = session.create('Project', { + 'name': project_name, + 'full_name': project_name + '_full', + 'project_schema': project_schema + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(project) + session.commit() + + request.addfinalizer(cleanup) + + return project + + +def new_container_component(): + '''Return container component.''' + session = ftrack_api.Session() + + entity = session.create('ContainerComponent', { + 'name': 'container_component' + }) + + return entity + + +def new_sequence_component(): + '''Return sequence component.''' + session = ftrack_api.Session() + + entity = session.create_component( + '/tmp/foo/%04d.jpg [1-10]', location=None, data={'name': 'baz'} + ) + + return entity + + +def new_file_component(name='foo', container=None): + '''Return file component with *name* and *container*.''' + if container: + session = container.session + else: + session = ftrack_api.Session() + + entity = session.create('FileComponent', { + 'name': name, + 'file_type': '.png', + 'container': container + }) + + return entity + + +# Reusable fixtures. +file_component = new_file_component() +container_component = new_container_component() +sequence_component = new_sequence_component() + + +# Note: to improve test performance the same project is reused throughout the +# tests. This means that all hierarchical names must be unique, otherwise an +# IntegrityError will be raised on the server. + +@pytest.mark.parametrize( + 'component, hierarchy, expected, structure, asset_name', + [ + ( + file_component, + [], + '{project_name}/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [], + '{project_name}/foobar/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure( + project_versions_prefix='foobar' + ), + 'my_new_asset' + ), + ( + file_component, + ['baz1', 'bar'], + '{project_name}/baz1/bar/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + sequence_component, + ['baz2', 'bar'], + '{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + sequence_component['members'][3], + ['baz3', 'bar'], + '{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + container_component, + ['baz4', 'bar'], + '{project_name}/baz4/bar/my_new_asset/v001/container_component', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + new_file_component(container=container_component), + ['baz5', 'bar'], + ( + '{project_name}/baz5/bar/my_new_asset/v001/container_component/' + 'foo.png' + ), + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [u'björn'], + '{project_name}/bjorn/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [u'björn!'], + '{project_name}/bjorn_/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + new_file_component(name=u'fää'), + [], + '{project_name}/my_new_asset/v001/faa.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + new_file_component(name=u'fo/o'), + [], + '{project_name}/my_new_asset/v001/fo_o.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [], + '{project_name}/aao/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + u'åäö' + ), + ( + file_component, + [], + '{project_name}/my_ne____w_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + u'my_ne!!!!w_asset' + ), + ( + file_component, + [u'björn2'], + u'{project_name}/björn2/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure( + illegal_character_substitute=None + ), + 'my_new_asset' + ), + ( + file_component, + [u'bj!rn'], + '{project_name}/bj^rn/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure( + illegal_character_substitute='^' + ), + 'my_new_asset' + ) + ], ids=[ + 'file_component_on_project', + 'file_component_on_project_with_prefix', + 'file_component_with_hierarchy', + 'sequence_component', + 'sequence_component_member', + 'container_component', + 'container_component_member', + 'slugify_non_ascii_hierarchy', + 'slugify_illegal_hierarchy', + 'slugify_non_ascii_component_name', + 'slugify_illegal_component_name', + 'slugify_non_ascii_asset_name', + 'slugify_illegal_asset_name', + 'slugify_none', + 'slugify_other_character' + ] +) +def test_get_resource_identifier( + component, hierarchy, expected, structure, asset_name, new_project +): + '''Get resource identifier.''' + session = component.session + + # Create structure, asset and version. + context_id = new_project['id'] + for name in hierarchy: + context_id = session.create('Folder', { + 'name': name, + 'project_id': new_project['id'], + 'parent_id': context_id + })['id'] + + asset = session.create( + 'Asset', {'name': asset_name, 'context_id': context_id} + ) + version = session.create('AssetVersion', {'asset': asset}) + + # Update component with version. + if component['container']: + component['container']['version'] = version + else: + component['version'] = version + + session.commit() + + assert structure.get_resource_identifier(component) == expected.format( + project_name=new_project['name'] + ) + + +def test_unsupported_entity(user): + '''Fail to get resource identifier for unsupported entity.''' + structure = ftrack_api.structure.standard.StandardStructure() + with pytest.raises(NotImplementedError): + structure.get_resource_identifier(user) + + +def test_component_without_version_relation(new_project): + '''Get an identifer for component without a version relation.''' + session = new_project.session + + asset = session.create( + 'Asset', {'name': 'foo', 'context_id': new_project['id']} + ) + version = session.create('AssetVersion', {'asset': asset}) + + session.commit() + + file_component = new_file_component() + file_component['version_id'] = version['id'] + + structure = ftrack_api.structure.standard.StandardStructure() + structure.get_resource_identifier(file_component) + + +def test_component_without_committed_version_relation(): + '''Fail to get an identifer for component without a committed version.''' + file_component = new_file_component() + session = file_component.session + version = session.create('AssetVersion', {}) + + file_component['version'] = version + + structure = ftrack_api.structure.standard.StandardStructure() + + with pytest.raises(ftrack_api.exception.StructureError): + structure.get_resource_identifier(file_component) + + +@pytest.mark.xfail( + raises=ftrack_api.exception.ServerError, + reason='Due to user permission errors.' +) +def test_component_without_committed_asset_relation(): + '''Fail to get an identifer for component without a committed asset.''' + file_component = new_file_component() + session = file_component.session + version = session.create('AssetVersion', {}) + + file_component['version'] = version + + session.commit() + + structure = ftrack_api.structure.standard.StandardStructure() + + with pytest.raises(ftrack_api.exception.StructureError): + structure.get_resource_identifier(file_component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py new file mode 100644 index 00000000000..555adb2d891 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py @@ -0,0 +1,146 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.attribute +import ftrack_api.exception + + +@pytest.mark.parametrize('attributes', [ + [], + [ftrack_api.attribute.Attribute('test')] +], ids=[ + 'no initial attributes', + 'with initial attributes' +]) +def test_initialise_attributes_collection(attributes): + '''Initialise attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes(attributes) + assert sorted(list(attribute_collection)) == sorted(attributes) + + +def test_add_attribute_to_attributes_collection(): + '''Add valid attribute to attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + assert attribute_collection.keys() == [] + attribute_collection.add(attribute) + assert attribute_collection.keys() == ['test'] + + +def test_add_duplicate_attribute_to_attributes_collection(): + '''Fail to add attribute with duplicate name to attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + attribute_collection.add(attribute) + with pytest.raises(ftrack_api.exception.NotUniqueError): + attribute_collection.add(attribute) + + +def test_remove_attribute_from_attributes_collection(): + '''Remove attribute from attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + attribute_collection.add(attribute) + assert len(attribute_collection) == 1 + + attribute_collection.remove(attribute) + assert len(attribute_collection) == 0 + + +def test_remove_missing_attribute_from_attributes_collection(): + '''Fail to remove attribute not present in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + with pytest.raises(KeyError): + attribute_collection.remove(attribute) + + +def test_get_attribute_from_attributes_collection(): + '''Get attribute from attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + attribute_collection.add(attribute) + + retrieved_attribute = attribute_collection.get('test') + + assert retrieved_attribute is attribute + + +def test_get_missing_attribute_from_attributes_collection(): + '''Get attribute not present in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + assert attribute_collection.get('test') is None + + +@pytest.mark.parametrize('attributes, expected', [ + ([], []), + ([ftrack_api.attribute.Attribute('test')], ['test']) +], ids=[ + 'no initial attributes', + 'with initial attributes' +]) +def test_attribute_collection_keys(attributes, expected): + '''Retrieve keys for attribute collection.''' + attribute_collection = ftrack_api.attribute.Attributes(attributes) + assert sorted(attribute_collection.keys()) == sorted(expected) + + +@pytest.mark.parametrize('attribute, expected', [ + (None, False), + (ftrack_api.attribute.Attribute('b'), True), + (ftrack_api.attribute.Attribute('c'), False) +], ids=[ + 'none attribute', + 'present attribute', + 'missing attribute' +]) +def test_attributes_collection_contains(attribute, expected): + '''Check presence in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes([ + ftrack_api.attribute.Attribute('a'), + ftrack_api.attribute.Attribute('b') + ]) + + assert (attribute in attribute_collection) is expected + + +@pytest.mark.parametrize('attributes, expected', [ + ([], 0), + ([ftrack_api.attribute.Attribute('test')], 1), + ( + [ + ftrack_api.attribute.Attribute('a'), + ftrack_api.attribute.Attribute('b') + ], + 2 + ) +], ids=[ + 'no attributes', + 'single attribute', + 'multiple attributes' +]) +def test_attributes_collection_count(attributes, expected): + '''Count attributes in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes(attributes) + assert len(attribute_collection) == expected + + +def test_iterate_over_attributes_collection(): + '''Iterate over attributes collection.''' + attributes = [ + ftrack_api.attribute.Attribute('a'), + ftrack_api.attribute.Attribute('b') + ] + + attribute_collection = ftrack_api.attribute.Attributes(attributes) + for attribute in attribute_collection: + attributes.remove(attribute) + + assert len(attributes) == 0 + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py new file mode 100644 index 00000000000..7915737253f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py @@ -0,0 +1,416 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import uuid +import tempfile + +import pytest + +import ftrack_api.cache + + +@pytest.fixture(params=['proxy', 'layered', 'memory', 'file', 'serialised']) +def cache(request): + '''Return cache.''' + if request.param == 'proxy': + cache = ftrack_api.cache.ProxyCache( + ftrack_api.cache.MemoryCache() + ) + + elif request.param == 'layered': + cache = ftrack_api.cache.LayeredCache( + [ftrack_api.cache.MemoryCache()] + ) + + elif request.param == 'memory': + cache = ftrack_api.cache.MemoryCache() + + elif request.param == 'file': + cache_path = os.path.join( + tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + ) + + cache = ftrack_api.cache.FileCache(cache_path) + + def cleanup(): + '''Cleanup.''' + try: + os.remove(cache_path) + except OSError: + # BSD DB (Mac OSX) implementation of the interface will append + # a .db extension. + os.remove(cache_path + '.db') + + request.addfinalizer(cleanup) + + elif request.param == 'serialised': + cache = ftrack_api.cache.SerialisedCache( + ftrack_api.cache.MemoryCache(), + encode=lambda value: value, + decode=lambda value: value + ) + + else: + raise ValueError( + 'Unrecognised cache fixture type {0!r}'.format(request.param) + ) + + return cache + + + +class Class(object): + '''Class for testing.''' + + def method(self, key): + '''Method for testing.''' + + +def function(mutable, x, y=2): + '''Function for testing.''' + mutable['called'] = True + return {'result': x + y} + + +def assert_memoised_call( + memoiser, function, expected, args=None, kw=None, memoised=True +): + '''Assert *function* call via *memoiser* was *memoised*.''' + mapping = {'called': False} + if args is not None: + args = (mapping,) + args + else: + args = (mapping,) + + result = memoiser.call(function, args, kw) + + assert result == expected + assert mapping['called'] is not memoised + + +def test_get(cache): + '''Retrieve item from cache.''' + cache.set('key', 'value') + assert cache.get('key') == 'value' + + +def test_get_missing_key(cache): + '''Fail to retrieve missing item from cache.''' + with pytest.raises(KeyError): + cache.get('key') + + +def test_set(cache): + '''Set item in cache.''' + with pytest.raises(KeyError): + cache.get('key') + + cache.set('key', 'value') + assert cache.get('key') == 'value' + + +def test_remove(cache): + '''Remove item from cache.''' + cache.set('key', 'value') + cache.remove('key') + + with pytest.raises(KeyError): + cache.get('key') + + +def test_remove_missing_key(cache): + '''Fail to remove missing key.''' + with pytest.raises(KeyError): + cache.remove('key') + + +def test_keys(cache): + '''Retrieve keys of items in cache.''' + assert cache.keys() == [] + cache.set('a', 'a_value') + cache.set('b', 'b_value') + cache.set('c', 'c_value') + assert sorted(cache.keys()) == sorted(['a', 'b', 'c']) + + +def test_clear(cache): + '''Remove items from cache.''' + cache.set('a', 'a_value') + cache.set('b', 'b_value') + cache.set('c', 'c_value') + + assert cache.keys() + cache.clear() + + assert not cache.keys() + + +def test_clear_using_pattern(cache): + '''Remove items that match pattern from cache.''' + cache.set('matching_key', 'value') + cache.set('another_matching_key', 'value') + cache.set('key_not_matching', 'value') + + assert cache.keys() + cache.clear(pattern='.*matching_key$') + + assert cache.keys() == ['key_not_matching'] + + +def test_clear_encountering_missing_key(cache, mocker): + '''Clear missing key.''' + # Force reporting keys that are not actually valid for test purposes. + mocker.patch.object(cache, 'keys', lambda: ['missing']) + assert cache.keys() == ['missing'] + + # Should not error even though key not valid. + cache.clear() + + # The key was not successfully removed so should still be present. + assert cache.keys() == ['missing'] + + +def test_layered_cache_propagates_value_on_get(): + '''Layered cache propagates value on get.''' + caches = [ + ftrack_api.cache.MemoryCache(), + ftrack_api.cache.MemoryCache(), + ftrack_api.cache.MemoryCache() + ] + + cache = ftrack_api.cache.LayeredCache(caches) + + # Set item on second level cache only. + caches[1].set('key', 'value') + + # Retrieving key via layered cache should propagate it automatically to + # higher level caches only. + assert cache.get('key') == 'value' + assert caches[0].get('key') == 'value' + + with pytest.raises(KeyError): + caches[2].get('key') + + +def test_layered_cache_remove_at_depth(): + '''Remove key that only exists at depth in LayeredCache.''' + caches = [ + ftrack_api.cache.MemoryCache(), + ftrack_api.cache.MemoryCache() + ] + + cache = ftrack_api.cache.LayeredCache(caches) + + # Set item on second level cache only. + caches[1].set('key', 'value') + + # Removing key that only exists at depth should not raise key error. + cache.remove('key') + + # Ensure key was removed. + assert not cache.keys() + + +def test_expand_references(): + '''Test that references are expanded from serialized cache.''' + + cache_path = os.path.join( + tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + ) + + def make_cache(session, cache_path): + '''Create a serialised file cache.''' + serialized_file_cache = ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=session.encode, + decode=session.decode + ) + + return serialized_file_cache + + # Populate the serialized file cache. + session = ftrack_api.Session( + cache=lambda session, cache_path=cache_path:make_cache( + session, cache_path + ) + ) + + expanded_results = dict() + + query_string = 'select asset.parent from AssetVersion where asset is_not None limit 10' + + for sequence in session.query(query_string): + asset = sequence.get('asset') + + expanded_results.setdefault( + asset.get('id'), asset.get('parent') + ) + + # Fetch the data from cache. + new_session = ftrack_api.Session( + cache=lambda session, cache_path=cache_path:make_cache( + session, cache_path + ) + ) + + + new_session_two = ftrack_api.Session( + cache=lambda session, cache_path=cache_path:make_cache( + session, cache_path + ) + ) + + + # Make sure references are merged. + for sequence in new_session.query(query_string): + asset = sequence.get('asset') + + assert ( + asset.get('parent') == expanded_results[asset.get('id')] + ) + + # Use for fetching directly using get. + assert ( + new_session_two.get(asset.entity_type, asset.get('id')).get('parent') == + expanded_results[asset.get('id')] + ) + + + +@pytest.mark.parametrize('items, key', [ + (({},), '{}'), + (({}, {}), '{}{}') +], ids=[ + 'single object', + 'multiple objects' +]) +def test_string_key_maker_key(items, key): + '''Generate key using string key maker.''' + key_maker = ftrack_api.cache.StringKeyMaker() + assert key_maker.key(*items) == key + + +@pytest.mark.parametrize('items, key', [ + ( + ({},), + '\x01\x01' + ), + ( + ({'a': 'b'}, [1, 2]), + '\x01' + '\x80\x02U\x01a.' '\x02' '\x80\x02U\x01b.' + '\x01' + '\x00' + '\x03' + '\x80\x02K\x01.' '\x00' '\x80\x02K\x02.' + '\x03' + ), + ( + (function,), + '\x04function\x00unit.test_cache' + ), + ( + (Class,), + '\x04Class\x00unit.test_cache' + ), + ( + (Class.method,), + '\x04method\x00Class\x00unit.test_cache' + ), + ( + (callable,), + '\x04callable' + ) +], ids=[ + 'single mapping', + 'multiple objects', + 'function', + 'class', + 'method', + 'builtin' +]) +def test_object_key_maker_key(items, key): + '''Generate key using string key maker.''' + key_maker = ftrack_api.cache.ObjectKeyMaker() + assert key_maker.key(*items) == key + + +def test_memoised_call(): + '''Call memoised function.''' + memoiser = ftrack_api.cache.Memoiser() + + # Initial call should not be memoised so function is executed. + assert_memoised_call( + memoiser, function, args=(1,), expected={'result': 3}, memoised=False + ) + + # Identical call should be memoised so function is not executed again. + assert_memoised_call( + memoiser, function, args=(1,), expected={'result': 3}, memoised=True + ) + + # Differing call is not memoised so function is executed. + assert_memoised_call( + memoiser, function, args=(3,), expected={'result': 5}, memoised=False + ) + + +def test_memoised_call_variations(): + '''Call memoised function with identical arguments using variable format.''' + memoiser = ftrack_api.cache.Memoiser() + expected = {'result': 3} + + # Call function once to ensure is memoised. + assert_memoised_call( + memoiser, function, args=(1,), expected=expected, memoised=False + ) + + # Each of the following calls should equate to the same key and make + # use of the memoised value. + for args, kw in [ + ((), {'x': 1}), + ((), {'x': 1, 'y': 2}), + ((1,), {'y': 2}), + ((1,), {}) + ]: + assert_memoised_call( + memoiser, function, args=args, kw=kw, expected=expected + ) + + # The following calls should all be treated as new variations and so + # not use any memoised value. + assert_memoised_call( + memoiser, function, kw={'x': 2}, expected={'result': 4}, memoised=False + ) + assert_memoised_call( + memoiser, function, kw={'x': 3, 'y': 2}, expected={'result': 5}, + memoised=False + ) + assert_memoised_call( + memoiser, function, args=(4, ), kw={'y': 2}, expected={'result': 6}, + memoised=False + ) + assert_memoised_call( + memoiser, function, args=(5, ), expected={'result': 7}, memoised=False + ) + + +def test_memoised_mutable_return_value(): + '''Avoid side effects for returned mutable arguments when memoising.''' + memoiser = ftrack_api.cache.Memoiser() + arguments = ({'called': False}, 1) + + result_a = memoiser.call(function, arguments) + assert result_a == {'result': 3} + assert arguments[0]['called'] + + # Modify mutable externally and check that stored memoised value is + # unchanged. + del result_a['result'] + + arguments[0]['called'] = False + result_b = memoiser.call(function, arguments) + + assert result_b == {'result': 3} + assert not arguments[0]['called'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py new file mode 100644 index 00000000000..15c3e5cf395 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py @@ -0,0 +1,574 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import copy +import uuid + +import mock +import pytest + +import ftrack_api.collection +import ftrack_api.symbol +import ftrack_api.inspection +import ftrack_api.exception +import ftrack_api.operation + + +def create_mock_entity(session): + '''Return new mock entity for *session*.''' + entity = mock.MagicMock() + entity.session = session + entity.primary_key_attributes = ['id'] + entity['id'] = str(uuid.uuid4()) + return entity + + +@pytest.fixture +def mock_entity(session): + '''Return mock entity.''' + return create_mock_entity(session) + + +@pytest.fixture +def mock_entities(session): + '''Return list of two mock entities.''' + return [ + create_mock_entity(session), + create_mock_entity(session) + ] + + +@pytest.fixture +def mock_attribute(): + '''Return mock attribute.''' + attribute = mock.MagicMock() + attribute.name = 'test' + return attribute + + +def test_collection_initialisation_does_not_modify_entity_state( + mock_entity, mock_attribute, mock_entities +): + '''Initialising collection does not modify entity state.''' + ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + assert ftrack_api.inspection.state(mock_entity) is ftrack_api.symbol.NOT_SET + + +def test_immutable_collection_initialisation( + mock_entity, mock_attribute, mock_entities +): + '''Initialise immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + assert list(collection) == mock_entities + assert collection.mutable is False + + +def test_collection_shallow_copy( + mock_entity, mock_attribute, mock_entities, session +): + '''Shallow copying collection should avoid indirect mutation.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with mock_entity.session.operation_recording(False): + collection_copy = copy.copy(collection) + new_entity = create_mock_entity(session) + collection_copy.append(new_entity) + + assert list(collection) == mock_entities + assert list(collection_copy) == mock_entities + [new_entity] + + +def test_collection_insert( + mock_entity, mock_attribute, mock_entities, session +): + '''Insert a value into collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + new_entity = create_mock_entity(session) + collection.insert(0, new_entity) + assert list(collection) == [new_entity] + mock_entities + + +def test_collection_insert_duplicate( + mock_entity, mock_attribute, mock_entities +): + '''Fail to insert a duplicate value into collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): + collection.insert(0, mock_entities[1]) + + +def test_immutable_collection_insert( + mock_entity, mock_attribute, mock_entities, session +): + '''Fail to insert a value into immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + with pytest.raises(ftrack_api.exception.ImmutableCollectionError): + collection.insert(0, create_mock_entity(session)) + + +def test_collection_set_item( + mock_entity, mock_attribute, mock_entities, session +): + '''Set item at index in collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + new_entity = create_mock_entity(session) + collection[0] = new_entity + assert list(collection) == [new_entity, mock_entities[1]] + + +def test_collection_re_set_item( + mock_entity, mock_attribute, mock_entities +): + '''Re-set value at exact same index in collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + collection[0] = mock_entities[0] + assert list(collection) == mock_entities + + +def test_collection_set_duplicate_item( + mock_entity, mock_attribute, mock_entities +): + '''Fail to set a duplicate value into collection at different index.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): + collection[0] = mock_entities[1] + + +def test_immutable_collection_set_item( + mock_entity, mock_attribute, mock_entities +): + '''Fail to set item at index in immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + with pytest.raises(ftrack_api.exception.ImmutableCollectionError): + collection[0] = mock_entities[0] + + +def test_collection_delete_item( + mock_entity, mock_attribute, mock_entities +): + '''Remove item at index from collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + del collection[0] + assert list(collection) == [mock_entities[1]] + + +def test_collection_delete_item_at_invalid_index( + mock_entity, mock_attribute, mock_entities +): + '''Fail to remove item at missing index from immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with pytest.raises(IndexError): + del collection[4] + + +def test_immutable_collection_delete_item( + mock_entity, mock_attribute, mock_entities +): + '''Fail to remove item at index from immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + with pytest.raises(ftrack_api.exception.ImmutableCollectionError): + del collection[0] + + +def test_collection_count( + mock_entity, mock_attribute, mock_entities, session +): + '''Count items in collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + assert len(collection) == 2 + + collection.append(create_mock_entity(session)) + assert len(collection) == 3 + + del collection[0] + assert len(collection) == 2 + + +@pytest.mark.parametrize('other, expected', [ + ([], False), + ([1, 2], True), + ([1, 2, 3], False), + ([1], False) +], ids=[ + 'empty', + 'same', + 'additional', + 'missing' +]) +def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): + '''Determine collection equality against another collection.''' + # Temporarily override determination of entity identity so that it works + # against simple scalar values for purpose of test. + mocker.patch.object( + ftrack_api.inspection, 'identity', lambda entity: str(entity) + ) + + collection_a = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=[1, 2] + ) + + collection_b = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=other + ) + assert (collection_a == collection_b) is expected + + +def test_collection_not_equal_to_non_collection( + mocker, mock_entity, mock_attribute +): + '''Collection not equal to a non-collection.''' + # Temporarily override determination of entity identity so that it works + # against simple scalar values for purpose of test. + mocker.patch.object( + ftrack_api.inspection, 'identity', lambda entity: str(entity) + ) + + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=[1, 2] + ) + + assert (collection != {}) is True + + +def test_collection_notify_on_modification( + mock_entity, mock_attribute, mock_entities, session +): + '''Record UpdateEntityOperation on collection modification.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + assert len(session.recorded_operations) == 0 + + collection.append(create_mock_entity(session)) + assert len(session.recorded_operations) == 1 + operation = session.recorded_operations.pop() + assert isinstance(operation, ftrack_api.operation.UpdateEntityOperation) + assert operation.new_value == collection + + +def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): + '''Shallow copying mapped collection proxy avoids indirect mutation.''' + metadata = new_project['metadata'] + + with new_project.session.operation_recording(False): + metadata_copy = copy.copy(metadata) + metadata_copy[unique_name] = True + + assert unique_name not in metadata + assert unique_name in metadata_copy + + +def test_mapped_collection_proxy_mutable_property(new_project): + '''Mapped collection mutable property maps to underlying collection.''' + metadata = new_project['metadata'] + + assert metadata.mutable is True + assert metadata.collection.mutable is True + + metadata.mutable = False + assert metadata.collection.mutable is False + + +def test_mapped_collection_proxy_attribute_property( + new_project, mock_attribute +): + '''Mapped collection attribute property maps to underlying collection.''' + metadata = new_project['metadata'] + + assert metadata.attribute is metadata.collection.attribute + + metadata.attribute = mock_attribute + assert metadata.collection.attribute is mock_attribute + + +def test_mapped_collection_proxy_get_item(new_project, unique_name): + '''Retrieve item in mapped collection proxy.''' + session = new_project.session + + # Prepare data. + metadata = new_project['metadata'] + value = 'value' + metadata[unique_name] = value + session.commit() + + # Check in clean session retrieval of value. + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [unique_name] + assert retrieved['metadata'][unique_name] == value + + +def test_mapped_collection_proxy_set_item(new_project, unique_name): + '''Set new item in mapped collection proxy.''' + session = new_project.session + + metadata = new_project['metadata'] + assert unique_name not in metadata + + value = 'value' + metadata[unique_name] = value + assert metadata[unique_name] == value + + # Check change persisted correctly. + session.commit() + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [unique_name] + assert retrieved['metadata'][unique_name] == value + + +def test_mapped_collection_proxy_update_item(new_project, unique_name): + '''Update existing item in mapped collection proxy.''' + session = new_project.session + + # Prepare a pre-existing value. + metadata = new_project['metadata'] + value = 'value' + metadata[unique_name] = value + session.commit() + + # Set new value. + new_value = 'new_value' + metadata[unique_name] = new_value + + # Confirm change persisted correctly. + session.commit() + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [unique_name] + assert retrieved['metadata'][unique_name] == new_value + + +def test_mapped_collection_proxy_delete_item(new_project, unique_name): + '''Remove existing item from mapped collection proxy.''' + session = new_project.session + + # Prepare a pre-existing value to remove. + metadata = new_project['metadata'] + value = 'value' + metadata[unique_name] = value + session.commit() + + # Now remove value. + del new_project['metadata'][unique_name] + assert unique_name not in new_project['metadata'] + + # Confirm change persisted correctly. + session.commit() + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [] + assert unique_name not in retrieved['metadata'] + + +def test_mapped_collection_proxy_delete_missing_item(new_project, unique_name): + '''Fail to remove item for missing key from mapped collection proxy.''' + metadata = new_project['metadata'] + assert unique_name not in metadata + with pytest.raises(KeyError): + del metadata[unique_name] + + +def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): + '''Iterate over keys in mapped collection proxy.''' + metadata = new_project['metadata'] + metadata.update({ + 'a': 'value-a', + 'b': 'value-b', + 'c': 'value-c' + }) + + # Commit here as otherwise cleanup operation will fail because transaction + # will include updating metadata to refer to a deleted entity. + new_project.session.commit() + + iterated = set() + for key in metadata: + iterated.add(key) + + assert iterated == set(['a', 'b', 'c']) + + +def test_mapped_collection_proxy_count(new_project, unique_name): + '''Count items in mapped collection proxy.''' + metadata = new_project['metadata'] + metadata.update({ + 'a': 'value-a', + 'b': 'value-b', + 'c': 'value-c' + }) + + # Commit here as otherwise cleanup operation will fail because transaction + # will include updating metadata to refer to a deleted entity. + new_project.session.commit() + + assert len(metadata) == 3 + + +def test_mapped_collection_on_create(session, unique_name, project): + '''Test that it is possible to set relational attributes on create''' + metadata = { + 'a': 'value-a', + 'b': 'value-b', + 'c': 'value-c' + } + + task_id = session.create( + 'Task', { + 'name': unique_name, + 'parent': project, + 'metadata': metadata, + + } + ).get('id') + + session.commit() + + # Reset the session and check that we have the expected + # values. + session.reset() + + task = session.get( + 'Task', task_id + ) + + for key, value in metadata.items(): + assert value == task['metadata'][key] + + +def test_collection_refresh(new_asset_version, new_component): + '''Test collection reload.''' + session_two = ftrack_api.Session(auto_connect_event_hub=False) + + query_string = 'select components from AssetVersion where id is "{0}"'.format( + new_asset_version.get('id') + ) + + # Fetch the new asset version in a new session. + new_asset_version_two = session_two.query( + query_string + ).one() + + # Modify our asset version + new_asset_version.get('components').append( + new_component + ) + + new_asset_version.session.commit() + + # Query the same asset version again and make sure we get the newly + # populated data. + session_two.query( + query_string + ).all() + + assert ( + new_asset_version.get('components') == new_asset_version_two.get('components') + ) + + # Make a local change to our asset version + new_asset_version_two.get('components').pop() + + # Query the same asset version again and make sure our local changes + # are not overwritten. + + session_two.query( + query_string + ).all() + + assert len(new_asset_version_two.get('components')) == 0 + + +def test_mapped_collection_reload(new_asset_version): + '''Test mapped collection reload.''' + session_two = ftrack_api.Session(auto_connect_event_hub=False) + + query_string = 'select metadata from AssetVersion where id is "{0}"'.format( + new_asset_version.get('id') + ) + + # Fetch the new asset version in a new session. + new_asset_version_two = session_two.query( + query_string + ).one() + + # Modify our asset version + new_asset_version['metadata']['test'] = str(uuid.uuid4()) + + new_asset_version.session.commit() + + # Query the same asset version again and make sure we get the newly + # populated data. + session_two.query( + query_string + ).all() + + assert ( + new_asset_version['metadata']['test'] == new_asset_version_two['metadata']['test'] + ) + + local_data = str(uuid.uuid4()) + + new_asset_version_two['metadata']['test'] = local_data + + # Modify our asset version again + new_asset_version['metadata']['test'] = str(uuid.uuid4()) + + new_asset_version.session.commit() + + # Query the same asset version again and make sure our local changes + # are not overwritten. + session_two.query( + query_string + ).all() + + assert ( + new_asset_version_two['metadata']['test'] == local_data + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py new file mode 100644 index 00000000000..7a9b0fadaa4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py @@ -0,0 +1,251 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import uuid + +import pytest + +import ftrack_api + +@pytest.fixture( + params=[ + 'AssetVersion', 'Shot', 'AssetVersionList', 'TypedContextList', 'User', + 'Asset' + ] +) +def new_entity_and_custom_attribute(request, session): + '''Return tuple with new entity, custom attribute name and value.''' + if request.param == 'AssetVersion': + entity = session.create( + request.param, { + 'asset': session.query('Asset').first() + } + ) + return (entity, 'versiontest', 123) + + elif request.param == 'Shot': + sequence = session.query('Sequence').first() + entity = session.create( + request.param, { + 'parent_id': sequence['id'], + 'project_id': sequence['project_id'], + 'name': str(uuid.uuid1()) + } + ) + return (entity, 'fstart', 1005) + + elif request.param == 'Asset': + shot = session.query('Shot').first() + entity = session.create( + request.param, { + 'context_id': shot['project_id'], + 'name': str(uuid.uuid1()) + } + ) + return (entity, 'htest', 1005) + + elif request.param in ('AssetVersionList', 'TypedContextList'): + entity = session.create( + request.param, { + 'project_id': session.query('Project').first()['id'], + 'category_id': session.query('ListCategory').first()['id'], + 'name': str(uuid.uuid1()) + } + ) + return (entity, 'listbool', True) + + elif request.param == 'User': + entity = session.create( + request.param, { + 'first_name': 'Custom attribute test', + 'last_name': 'Custom attribute test', + 'username': str(uuid.uuid1()) + } + ) + return (entity, 'teststring', 'foo') + + +@pytest.mark.parametrize( + 'entity_type, entity_model_name, custom_attribute_name', + [ + ('Task', 'task', 'customNumber'), + ('AssetVersion', 'assetversion', 'NumberField') + ], + ids=[ + 'task', + 'asset_version' + ] +) +def test_read_set_custom_attribute( + session, entity_type, entity_model_name, custom_attribute_name +): + '''Retrieve custom attribute value set on instance.''' + custom_attribute_value = session.query( + 'CustomAttributeValue where configuration.key is ' + '{custom_attribute_name}' + .format( + custom_attribute_name=custom_attribute_name + ) + ).first() + + entity = session.query( + 'select custom_attributes from {entity_type} where id is ' + '{entity_id}'.format( + entity_type=entity_type, + entity_id=custom_attribute_value['entity_id'], + ) + ).first() + + assert custom_attribute_value + + assert entity['id'] == entity['custom_attributes'].collection.entity['id'] + assert entity is entity['custom_attributes'].collection.entity + assert ( + entity['custom_attributes'][custom_attribute_name] == + custom_attribute_value['value'] + ) + + assert custom_attribute_name in entity['custom_attributes'].keys() + + +@pytest.mark.parametrize( + 'entity_type, custom_attribute_name', + [ + ('Task', 'customNumber'), + ('Shot', 'fstart'), + ( + 'AssetVersion', 'NumberField' + ) + ], + ids=[ + 'task', + 'shot', + 'asset_version' + ] +) +def test_write_set_custom_attribute_value( + session, entity_type, custom_attribute_name +): + '''Overwrite existing instance level custom attribute value.''' + entity = session.query( + 'select custom_attributes from {entity_type} where ' + 'custom_attributes.configuration.key is {custom_attribute_name}'.format( + entity_type=entity_type, + custom_attribute_name=custom_attribute_name + ) + ).first() + + entity['custom_attributes'][custom_attribute_name] = 42 + + assert entity['custom_attributes'][custom_attribute_name] == 42 + + session.commit() + + +@pytest.mark.parametrize( + 'entity_type, custom_attribute_name', + [ + ('Task', 'fstart'), + ('Shot', 'Not existing'), + ('AssetVersion', 'fstart') + ], + ids=[ + 'task', + 'shot', + 'asset_version' + ] +) +def test_read_custom_attribute_that_does_not_exist( + session, entity_type, custom_attribute_name +): + '''Fail to read value from a custom attribute that does not exist.''' + entity = session.query( + 'select custom_attributes from {entity_type}'.format( + entity_type=entity_type + ) + ).first() + + with pytest.raises(KeyError): + entity['custom_attributes'][custom_attribute_name] + + +@pytest.mark.parametrize( + 'entity_type, custom_attribute_name', + [ + ('Task', 'fstart'), + ('Shot', 'Not existing'), + ('AssetVersion', 'fstart') + ], + ids=[ + 'task', + 'shot', + 'asset_version' + ] +) +def test_write_custom_attribute_that_does_not_exist( + session, entity_type, custom_attribute_name +): + '''Fail to write a value to a custom attribute that does not exist.''' + entity = session.query( + 'select custom_attributes from {entity_type}'.format( + entity_type=entity_type + ) + ).first() + + with pytest.raises(KeyError): + entity['custom_attributes'][custom_attribute_name] = 42 + + +def test_set_custom_attribute_on_new_but_persisted_version( + session, new_asset_version +): + '''Set custom attribute on new persisted version.''' + new_asset_version['custom_attributes']['versiontest'] = 5 + session.commit() + + +@pytest.mark.xfail( + raises=ftrack_api.exception.ServerError, + reason='Due to user permission errors.' +) +def test_batch_create_entity_and_custom_attributes( + new_entity_and_custom_attribute +): + '''Write custom attribute value and entity in the same batch.''' + entity, name, value = new_entity_and_custom_attribute + session = entity.session + entity['custom_attributes'][name] = value + + assert entity['custom_attributes'][name] == value + session.commit() + + assert entity['custom_attributes'][name] == value + + +def test_refresh_custom_attribute(new_asset_version): + '''Test custom attribute refresh.''' + session_two = ftrack_api.Session() + + query_string = 'select custom_attributes from AssetVersion where id is "{0}"'.format( + new_asset_version.get('id') + ) + + asset_version_two = session_two.query( + query_string + ).first() + + new_asset_version['custom_attributes']['versiontest'] = 42 + + new_asset_version.session.commit() + + asset_version_two = session_two.query( + query_string + ).first() + + assert ( + new_asset_version['custom_attributes']['versiontest'] == + asset_version_two['custom_attributes']['versiontest'] + ) + + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py new file mode 100644 index 00000000000..c53dda9630f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py @@ -0,0 +1,129 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import tempfile + +import pytest + +import ftrack_api.data + + +@pytest.fixture() +def content(): + '''Return initial content.''' + return 'test data' + + +@pytest.fixture(params=['file', 'file_wrapper', 'string']) +def data(request, content): + '''Return cache.''' + + if request.param == 'string': + data_object = ftrack_api.data.String(content) + + elif request.param == 'file': + file_handle, path = tempfile.mkstemp() + file_object = os.fdopen(file_handle, 'r+') + file_object.write(content) + file_object.flush() + file_object.close() + + data_object = ftrack_api.data.File(path, 'r+') + + def cleanup(): + '''Cleanup.''' + data_object.close() + os.remove(path) + + request.addfinalizer(cleanup) + + elif request.param == 'file_wrapper': + file_handle, path = tempfile.mkstemp() + file_object = os.fdopen(file_handle, 'r+') + file_object.write(content) + file_object.seek(0) + + data_object = ftrack_api.data.FileWrapper(file_object) + + def cleanup(): + '''Cleanup.''' + data_object.close() + os.remove(path) + + request.addfinalizer(cleanup) + + else: + raise ValueError('Unrecognised parameter: {0}'.format(request.param)) + + return data_object + + +def test_read(data, content): + '''Return content from current position up to *limit*.''' + assert data.read(5) == content[:5] + assert data.read() == content[5:] + + +def test_write(data, content): + '''Write content at current position.''' + assert data.read() == content + data.write('more test data') + data.seek(0) + assert data.read() == content + 'more test data' + + +def test_flush(data): + '''Flush buffers ensuring data written.''' + # TODO: Implement better test than just calling function. + data.flush() + + +def test_seek(data, content): + '''Move internal pointer to *position*.''' + data.seek(5) + assert data.read() == content[5:] + + +def test_tell(data): + '''Return current position of internal pointer.''' + assert data.tell() == 0 + data.seek(5) + assert data.tell() == 5 + + +def test_close(data): + '''Flush buffers and prevent further access.''' + data.close() + with pytest.raises(ValueError) as error: + data.read() + + assert 'I/O operation on closed file' in str(error.value) + + +class Dummy(ftrack_api.data.Data): + '''Dummy string.''' + + def read(self, limit=None): + '''Return content from current position up to *limit*.''' + + def write(self, content): + '''Write content at current position.''' + + +def test_unsupported_tell(): + '''Fail when tell unsupported.''' + data = Dummy() + with pytest.raises(NotImplementedError) as error: + data.tell() + + assert 'Tell not supported' in str(error.value) + + +def test_unsupported_seek(): + '''Fail when seek unsupported.''' + data = Dummy() + with pytest.raises(NotImplementedError) as error: + data.seek(5) + + assert 'Seek not supported' in str(error.value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py new file mode 100644 index 00000000000..ae565cb3f50 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py @@ -0,0 +1,70 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import termcolor + +import ftrack_api.formatter + + +def colored(text, *args, **kwargs): + '''Pass through so there are no escape sequences in output.''' + return text + + +def test_format(user, mocker): + '''Return formatted representation of entity.''' + mocker.patch.object(termcolor, 'colored', colored) + + result = ftrack_api.formatter.format(user) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('User\n') + assert ' username: jenkins' in result + assert ' email: ' in result + + +def test_format_using_custom_formatters(user): + '''Return formatted representation of entity using custom formatters.''' + result = ftrack_api.formatter.format( + user, formatters={ + 'header': lambda text: '*{0}*'.format(text), + 'label': lambda text: '-{0}'.format(text) + } + ) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('*User*\n') + assert ' -username: jenkins' in result + assert ' -email: ' in result + + +def test_format_filtering(new_user, mocker): + '''Return formatted representation using custom filter.''' + mocker.patch.object(termcolor, 'colored', colored) + + with new_user.session.auto_populating(False): + result = ftrack_api.formatter.format( + new_user, + attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] + ) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('User\n') + assert ' username: {0}'.format(new_user['username']) in result + assert ' email: ' not in result + + +def test_format_recursive(user, mocker): + '''Return formatted recursive representation.''' + mocker.patch.object(termcolor, 'colored', colored) + + user.session.populate(user, 'timelogs.user') + + with user.session.auto_populating(False): + result = ftrack_api.formatter.format(user, recursive=True) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('User\n') + assert ' username: jenkins' + assert ' timelogs: Timelog' in result + assert ' user: User{...}' in result diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py new file mode 100644 index 00000000000..57b44613a84 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py @@ -0,0 +1,101 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import ftrack_api.inspection +import ftrack_api.symbol + + +def test_identity(user): + '''Retrieve identity of *user*.''' + identity = ftrack_api.inspection.identity(user) + assert identity[0] == 'User' + assert identity[1] == ['d07ae5d0-66e1-11e1-b5e9-f23c91df25eb'] + + +def test_primary_key(user): + '''Retrieve primary key of *user*.''' + primary_key = ftrack_api.inspection.primary_key(user) + assert primary_key == { + 'id': 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb' + } + + +def test_created_entity_state(session, unique_name): + '''Created entity has CREATED state.''' + new_user = session.create('User', {'username': unique_name}) + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED + + # Even after a modification the state should remain as CREATED. + new_user['username'] = 'changed' + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED + + +def test_retrieved_entity_state(user): + '''Retrieved entity has NOT_SET state.''' + assert ftrack_api.inspection.state(user) is ftrack_api.symbol.NOT_SET + + +def test_modified_entity_state(user): + '''Modified entity has MODIFIED state.''' + user['username'] = 'changed' + assert ftrack_api.inspection.state(user) is ftrack_api.symbol.MODIFIED + + +def test_deleted_entity_state(session, user): + '''Deleted entity has DELETED state.''' + session.delete(user) + assert ftrack_api.inspection.state(user) is ftrack_api.symbol.DELETED + + +def test_post_commit_entity_state(session, unique_name): + '''Entity has NOT_SET state post commit.''' + new_user = session.create('User', {'username': unique_name}) + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED + + session.commit() + + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.NOT_SET + + +def test_states(session, unique_name, user): + '''Determine correct states for multiple entities.''' + # NOT_SET + user_a = session.create('User', {'username': unique_name}) + session.commit() + + # CREATED + user_b = session.create('User', {'username': unique_name}) + user_b['username'] = 'changed' + + # MODIFIED + user_c = user + user_c['username'] = 'changed' + + # DELETED + user_d = session.create('User', {'username': unique_name}) + session.delete(user_d) + + # Assert states. + states = ftrack_api.inspection.states([user_a, user_b, user_c, user_d]) + + assert states == [ + ftrack_api.symbol.NOT_SET, + ftrack_api.symbol.CREATED, + ftrack_api.symbol.MODIFIED, + ftrack_api.symbol.DELETED + ] + + +def test_states_for_no_entities(): + '''Return empty list of states when no entities passed.''' + states = ftrack_api.inspection.states([]) + assert states == [] + + +def test_skip_operations_for_non_inspected_entities(session, unique_name): + '''Skip operations for non inspected entities.''' + user_a = session.create('User', {'username': unique_name + '-1'}) + user_b = session.create('User', {'username': unique_name + '-2'}) + + states = ftrack_api.inspection.states([user_a]) + assert states == [ftrack_api.symbol.CREATED] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py new file mode 100644 index 00000000000..702bfae355d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py @@ -0,0 +1,79 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.operation + + +def test_operations_initialise(): + '''Initialise empty operations stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + +def test_operations_push(): + '''Push new operation onto stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + operation = ftrack_api.operation.Operation() + operations.push(operation) + assert list(operations)[-1] is operation + + +def test_operations_pop(): + '''Pop and return operation from stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + operations.push(ftrack_api.operation.Operation()) + operations.push(ftrack_api.operation.Operation()) + operation = ftrack_api.operation.Operation() + operations.push(operation) + + assert len(operations) == 3 + popped = operations.pop() + assert popped is operation + assert len(operations) == 2 + + +def test_operations_count(): + '''Count operations in stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + operations.push(ftrack_api.operation.Operation()) + assert len(operations) == 1 + + operations.pop() + assert len(operations) == 0 + + +def test_operations_clear(): + '''Clear operations stack.''' + operations = ftrack_api.operation.Operations() + operations.push(ftrack_api.operation.Operation()) + operations.push(ftrack_api.operation.Operation()) + operations.push(ftrack_api.operation.Operation()) + assert len(operations) == 3 + + operations.clear() + assert len(operations) == 0 + + +def test_operations_iter(): + '''Iterate over operations stack.''' + operations = ftrack_api.operation.Operations() + operation_a = ftrack_api.operation.Operation() + operation_b = ftrack_api.operation.Operation() + operation_c = ftrack_api.operation.Operation() + + operations.push(operation_a) + operations.push(operation_b) + operations.push(operation_c) + + assert len(operations) == 3 + for operation, expected in zip( + operations, [operation_a, operation_b, operation_c] + ): + assert operation is expected + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py new file mode 100644 index 00000000000..247b496d963 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py @@ -0,0 +1,48 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api + + +class Class(object): + '''Class.''' + + +class Mixin(object): + '''Mixin.''' + + def method(self): + '''Method.''' + return True + + +def test_mixin(): + '''Mixin class to instance.''' + instance_a = Class() + instance_b = Class() + + assert not hasattr(instance_a, 'method') + assert not hasattr(instance_b, 'method') + + ftrack_api.mixin(instance_a, Mixin) + + assert hasattr(instance_a, 'method') + assert instance_a.method() is True + assert not hasattr(instance_b, 'method') + + +def test_mixin_same_class_multiple_times(): + '''Mixin class to instance multiple times.''' + instance = Class() + assert not hasattr(instance, 'method') + assert len(instance.__class__.mro()) == 2 + + ftrack_api.mixin(instance, Mixin) + assert hasattr(instance, 'method') + assert instance.method() is True + assert len(instance.__class__.mro()) == 4 + + ftrack_api.mixin(instance, Mixin) + assert hasattr(instance, 'method') + assert instance.method() is True + assert len(instance.__class__.mro()) == 4 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py new file mode 100644 index 00000000000..252c813a9b5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py @@ -0,0 +1,192 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import textwrap +import logging +import re + +import pytest + +import ftrack_api.plugin + + +@pytest.fixture() +def valid_plugin(temporary_path): + '''Return path to directory containing a valid plugin.''' + with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + def register(*args, **kw): + print "Registered", args, kw + ''')) + + return temporary_path + + +@pytest.fixture() +def python_non_plugin(temporary_path): + '''Return path to directory containing Python file that is non plugin.''' + with open(os.path.join(temporary_path, 'non.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + print "Not a plugin" + + def not_called(): + print "Not called" + ''')) + + return temporary_path + + +@pytest.fixture() +def non_plugin(temporary_path): + '''Return path to directory containing file that is non plugin.''' + with open(os.path.join(temporary_path, 'non.txt'), 'w') as file_object: + file_object.write('Never seen') + + return temporary_path + + +@pytest.fixture() +def broken_plugin(temporary_path): + '''Return path to directory containing broken plugin.''' + with open(os.path.join(temporary_path, 'broken.py'), 'w') as file_object: + file_object.write('syntax error') + + return temporary_path + + +@pytest.fixture() +def plugin(request, temporary_path): + '''Return path containing a plugin with requested specification.''' + specification = request.param + output = re.sub('(\w+)=\w+', '"\g<1>={}".format(\g<1>)', specification) + output = re.sub('\*args', 'args', output) + output = re.sub('\*\*kwargs', 'sorted(kwargs.items())', output) + + with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: + content = textwrap.dedent(''' + def register({}): + print {} + '''.format(specification, output)) + file_object.write(content) + + return temporary_path + + +def test_discover_empty_paths(capsys): + '''Discover no plugins when paths are empty.''' + ftrack_api.plugin.discover([' ']) + output, error = capsys.readouterr() + assert not output + assert not error + + +def test_discover_valid_plugin(valid_plugin, capsys): + '''Discover valid plugin.''' + ftrack_api.plugin.discover([valid_plugin], (1, 2), {'3': 4}) + output, error = capsys.readouterr() + assert 'Registered (1, 2) {\'3\': 4}' in output + + +def test_discover_python_non_plugin(python_non_plugin, capsys): + '''Discover Python non plugin.''' + ftrack_api.plugin.discover([python_non_plugin]) + output, error = capsys.readouterr() + assert 'Not a plugin' in output + assert 'Not called' not in output + + +def test_discover_non_plugin(non_plugin, capsys): + '''Discover non plugin.''' + ftrack_api.plugin.discover([non_plugin]) + output, error = capsys.readouterr() + assert not output + assert not error + + +def test_discover_broken_plugin(broken_plugin, caplog): + '''Discover broken plugin.''' + ftrack_api.plugin.discover([broken_plugin]) + + records = caplog.records() + assert len(records) == 1 + assert records[0].levelno is logging.WARNING + assert 'Failed to load plugin' in records[0].message + + +@pytest.mark.parametrize( + 'plugin, positional, keyword, expected', + [ + ( + 'a, b=False, c=False, d=False', + (1, 2), {'c': True, 'd': True, 'e': True}, + '1 b=2 c=True d=True' + ), + ( + '*args', + (1, 2), {'b': True, 'c': False}, + '(1, 2)' + ), + ( + '**kwargs', + tuple(), {'b': True, 'c': False}, + '[(\'b\', True), (\'c\', False)]' + ), + ( + 'a=False, b=False', + (True,), {'b': True}, + 'a=True b=True' + ), + ( + 'a, c=False, *args', + (1, 2, 3, 4), {}, + '1 c=2 (3, 4)' + ), + ( + 'a, c=False, **kwargs', + tuple(), {'a': 1, 'b': 2, 'c': 3, 'd': 4}, + '1 c=3 [(\'b\', 2), (\'d\', 4)]' + ), + ], + indirect=['plugin'], + ids=[ + 'mixed-explicit', + 'variable-args-only', + 'variable-kwargs-only', + 'keyword-from-positional', + 'trailing-variable-args', + 'trailing-keyword-args' + ] +) +def test_discover_plugin_with_specific_signature( + plugin, positional, keyword, expected, capsys +): + '''Discover plugin passing only supported arguments.''' + ftrack_api.plugin.discover( + [plugin], positional, keyword + ) + output, error = capsys.readouterr() + assert expected in output + + +def test_discover_plugin_varying_signatures(temporary_path, capsys): + '''Discover multiple plugins with varying signatures.''' + with open(os.path.join(temporary_path, 'plugin_a.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + def register(a): + print (a,) + ''')) + + with open(os.path.join(temporary_path, 'plugin_b.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + def register(a, b=False): + print (a,), {'b': b} + ''')) + + ftrack_api.plugin.discover( + [temporary_path], (True,), {'b': True} + ) + + output, error = capsys.readouterr() + assert '(True,)'in output + assert '(True,) {\'b\': True}' in output diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py new file mode 100644 index 00000000000..f8e3f9dec33 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py @@ -0,0 +1,164 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import math + +import pytest + +import ftrack_api +import ftrack_api.query +import ftrack_api.exception + + +def test_index(session): + '''Index into query result.''' + results = session.query('User') + assert isinstance(results[2], session.types['User']) + + +def test_len(session): + '''Return count of results using len.''' + results = session.query('User where username is jenkins') + assert len(results) == 1 + + +def test_all(session): + '''Return all results using convenience method.''' + results = session.query('User').all() + assert isinstance(results, list) + assert len(results) + + +def test_implicit_iteration(session): + '''Implicitly iterate through query result.''' + results = session.query('User') + assert isinstance(results, ftrack_api.query.QueryResult) + + records = [] + for record in results: + records.append(record) + + assert len(records) == len(results) + + +def test_one(session): + '''Return single result using convenience method.''' + user = session.query('User where username is jenkins').one() + assert user['username'] == 'jenkins' + + +def test_one_fails_for_no_results(session): + '''Fail to fetch single result when no results available.''' + with pytest.raises(ftrack_api.exception.NoResultFoundError): + session.query('User where username is does_not_exist').one() + + +def test_one_fails_for_multiple_results(session): + '''Fail to fetch single result when multiple results available.''' + with pytest.raises(ftrack_api.exception.MultipleResultsFoundError): + session.query('User').one() + + +def test_one_with_existing_limit(session): + '''Fail to return single result when existing limit in expression.''' + with pytest.raises(ValueError): + session.query('User where username is jenkins limit 0').one() + + +def test_one_with_existing_offset(session): + '''Fail to return single result when existing offset in expression.''' + with pytest.raises(ValueError): + session.query('User where username is jenkins offset 2').one() + + +def test_one_with_prefetched_data(session): + '''Return single result ignoring prefetched data.''' + query = session.query('User where username is jenkins') + query.all() + + user = query.one() + assert user['username'] == 'jenkins' + + +def test_first(session): + '''Return first result using convenience method.''' + users = session.query('User').all() + + user = session.query('User').first() + assert user == users[0] + + +def test_first_returns_none_when_no_results(session): + '''Return None when no results available.''' + user = session.query('User where username is does_not_exist').first() + assert user is None + + +def test_first_with_existing_limit(session): + '''Fail to return first result when existing limit in expression.''' + with pytest.raises(ValueError): + session.query('User where username is jenkins limit 0').first() + + +def test_first_with_existing_offset(session): + '''Return first result whilst respecting custom offset.''' + users = session.query('User').all() + + user = session.query('User offset 2').first() + assert user == users[2] + + +def test_first_with_prefetched_data(session): + '''Return first result ignoring prefetched data.''' + query = session.query('User where username is jenkins') + query.all() + + user = query.first() + assert user['username'] == 'jenkins' + + +def test_paging(session, mocker): + '''Page through results.''' + mocker.patch.object(session, 'call', wraps=session.call) + + page_size = 5 + query = session.query('User limit 50', page_size=page_size) + records = query.all() + + assert session.call.call_count == ( + math.ceil(len(records) / float(page_size)) + ) + + +def test_paging_respects_offset_and_limit(session, mocker): + '''Page through results respecting offset and limit.''' + users = session.query('User').all() + + mocker.patch.object(session, 'call', wraps=session.call) + + page_size = 6 + query = session.query('User offset 2 limit 8', page_size=page_size) + records = query.all() + + assert session.call.call_count == 2 + assert len(records) == 8 + assert records == users[2:10] + + +def test_paging_respects_limit_smaller_than_page_size(session, mocker): + '''Use initial limit when less than page size.''' + mocker.patch.object(session, 'call', wraps=session.call) + + page_size = 100 + query = session.query('User limit 10', page_size=page_size) + records = query.all() + + assert session.call.call_count == 1 + session.call.assert_called_once_with( + [{ + 'action': 'query', + 'expression': 'select id from User offset 0 limit 10' + }] + ) + + assert len(records) == 10 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py new file mode 100644 index 00000000000..5087efcc088 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py @@ -0,0 +1,1519 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import tempfile +import functools +import uuid +import textwrap +import datetime +import json +import random + +import pytest +import mock +import arrow +import requests + +import ftrack_api +import ftrack_api.cache +import ftrack_api.inspection +import ftrack_api.symbol +import ftrack_api.exception +import ftrack_api.session +import ftrack_api.collection + + +@pytest.fixture(params=['memory', 'persisted']) +def cache(request): + '''Return cache.''' + if request.param == 'memory': + cache = None # There is already a default Memory cache present. + elif request.param == 'persisted': + cache_path = os.path.join( + tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + ) + + cache = lambda session: ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=functools.partial( + session.encode, entity_attribute_strategy='persisted_only' + ), + decode=session.decode + ) + + def cleanup(): + '''Cleanup.''' + try: + os.remove(cache_path) + except OSError: + # BSD DB (Mac OSX) implementation of the interface will append + # a .db extension. + os.remove(cache_path + '.db') + + request.addfinalizer(cleanup) + + return cache + + +@pytest.fixture() +def temporary_invalid_schema_cache(request): + '''Return schema cache path to invalid schema cache file.''' + schema_cache_path = os.path.join( + tempfile.gettempdir(), + 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) + ) + + with open(schema_cache_path, 'w') as file_: + file_.write('${invalid json}') + + def cleanup(): + '''Cleanup.''' + os.remove(schema_cache_path) + + request.addfinalizer(cleanup) + + return schema_cache_path + + +@pytest.fixture() +def temporary_valid_schema_cache(request, mocked_schemas): + '''Return schema cache path to valid schema cache file.''' + schema_cache_path = os.path.join( + tempfile.gettempdir(), + 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) + ) + + with open(schema_cache_path, 'w') as file_: + json.dump(mocked_schemas, file_, indent=4) + + def cleanup(): + '''Cleanup.''' + os.remove(schema_cache_path) + + request.addfinalizer(cleanup) + + return schema_cache_path + + +class SelectiveCache(ftrack_api.cache.ProxyCache): + '''Proxy cache that should not cache newly created entities.''' + + def set(self, key, value): + '''Set *value* for *key*.''' + if isinstance(value, ftrack_api.entity.base.Entity): + if ( + ftrack_api.inspection.state(value) + is ftrack_api.symbol.CREATED + ): + return + + super(SelectiveCache, self).set(key, value) + + +def test_get_entity(session, user): + '''Retrieve an entity by type and id.''' + matching = session.get(*ftrack_api.inspection.identity(user)) + assert matching == user + + +def test_get_non_existant_entity(session): + '''Retrieve a non-existant entity by type and id.''' + matching = session.get('User', 'non-existant-id') + assert matching is None + + +def test_get_entity_of_invalid_type(session): + '''Fail to retrieve an entity using an invalid type.''' + with pytest.raises(KeyError): + session.get('InvalidType', 'id') + + +def test_create(session): + '''Create entity.''' + user = session.create('User', {'username': 'martin'}) + with session.auto_populating(False): + assert user['id'] is not ftrack_api.symbol.NOT_SET + assert user['username'] == 'martin' + assert user['email'] is ftrack_api.symbol.NOT_SET + + +def test_create_using_only_defaults(session): + '''Create entity using defaults only.''' + user = session.create('User') + with session.auto_populating(False): + assert user['id'] is not ftrack_api.symbol.NOT_SET + assert user['username'] is ftrack_api.symbol.NOT_SET + + +def test_create_using_server_side_defaults(session): + '''Create entity using server side defaults.''' + user = session.create('User') + with session.auto_populating(False): + assert user['id'] is not ftrack_api.symbol.NOT_SET + assert user['username'] is ftrack_api.symbol.NOT_SET + + session.commit() + assert user['username'] is not ftrack_api.symbol.NOT_SET + + +def test_create_overriding_defaults(session): + '''Create entity overriding defaults.''' + uid = str(uuid.uuid4()) + user = session.create('User', {'id': uid}) + with session.auto_populating(False): + assert user['id'] == uid + + +def test_create_with_reference(session): + '''Create entity with a reference to another.''' + status = session.query('Status')[0] + task = session.create('Task', {'status': status}) + assert task['status'] is status + + +def test_ensure_new_entity(session, unique_name): + '''Ensure entity, creating first.''' + entity = session.ensure('User', {'username': unique_name}) + assert entity['username'] == unique_name + + +def test_ensure_entity_with_non_string_data_types(session): + '''Ensure entity against non-string data types, creating first.''' + datetime = arrow.get() + + task = session.query('Task').first() + user = session.query( + 'User where username is {}'.format(session.api_user) + ).first() + + first = session.ensure( + 'Timelog', + { + 'start': datetime, + 'duration': 10, + 'user_id': user['id'], + 'context_id': task['id'] + } + ) + + with mock.patch.object(session, 'create') as mocked: + session.ensure( + 'Timelog', + { + 'start': datetime, + 'duration': 10, + 'user_id': user['id'], + 'context_id': task['id'] + } + ) + assert not mocked.called + + assert first['start'] == datetime + assert first['duration'] == 10 + + +def test_ensure_entity_with_identifying_keys(session, unique_name): + '''Ensure entity, checking using keys subset and then creating.''' + entity = session.ensure( + 'User', {'username': unique_name, 'email': 'test@example.com'}, + identifying_keys=['username'] + ) + assert entity['username'] == unique_name + + +def test_ensure_entity_with_invalid_identifying_keys(session, unique_name): + '''Fail to ensure entity when identifying key missing from data.''' + with pytest.raises(KeyError): + session.ensure( + 'User', {'username': unique_name, 'email': 'test@example.com'}, + identifying_keys=['invalid'] + ) + + +def test_ensure_entity_with_missing_identifying_keys(session): + '''Fail to ensure entity when no identifying keys determined.''' + with pytest.raises(ValueError): + session.ensure('User', {}) + + +def test_ensure_existing_entity(session, unique_name): + '''Ensure existing entity.''' + entity = session.ensure('User', {'first_name': unique_name}) + + # Second call should not commit any new entity, just retrieve the existing. + with mock.patch.object(session, 'create') as mocked: + retrieved = session.ensure('User', {'first_name': unique_name}) + assert not mocked.called + assert retrieved == entity + + +def test_ensure_update_existing_entity(session, unique_name): + '''Ensure and update existing entity.''' + entity = session.ensure( + 'User', {'first_name': unique_name, 'email': 'anon@example.com'} + ) + assert entity['email'] == 'anon@example.com' + + # Second call should commit updates. + retrieved = session.ensure( + 'User', {'first_name': unique_name, 'email': 'test@example.com'}, + identifying_keys=['first_name'] + ) + assert retrieved == entity + assert retrieved['email'] == 'test@example.com' + + +def test_reconstruct_entity(session): + '''Reconstruct entity.''' + uid = str(uuid.uuid4()) + data = { + 'id': uid, + 'username': 'martin', + 'email': 'martin@example.com' + } + user = session.create('User', data, reconstructing=True) + + for attribute in user.attributes: + # No local attributes should be set. + assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET + + # Only remote attributes that had explicit values should be set. + value = attribute.get_remote_value(user) + if attribute.name in data: + assert value == data[attribute.name] + else: + assert value is ftrack_api.symbol.NOT_SET + + +def test_reconstruct_entity_does_not_apply_defaults(session): + '''Reconstruct entity does not apply defaults.''' + # Note: Use private method to avoid merge which requires id be set. + user = session._create('User', {}, reconstructing=True) + with session.auto_populating(False): + assert user['id'] is ftrack_api.symbol.NOT_SET + + +def test_reconstruct_empty_entity(session): + '''Reconstruct empty entity.''' + # Note: Use private method to avoid merge which requires id be set. + user = session._create('User', {}, reconstructing=True) + + for attribute in user.attributes: + # No local attributes should be set. + assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET + + # No remote attributes should be set. + assert attribute.get_remote_value(user) is ftrack_api.symbol.NOT_SET + + +def test_delete_operation_ordering(session, unique_name): + '''Delete entities in valid order.''' + # Construct entities. + project_schema = session.query('ProjectSchema').first() + project = session.create('Project', { + 'name': unique_name, + 'full_name': unique_name, + 'project_schema': project_schema + }) + + sequence = session.create('Sequence', { + 'name': unique_name, + 'parent': project + }) + + session.commit() + + # Delete in order that should succeed. + session.delete(sequence) + session.delete(project) + + session.commit() + + +def test_create_then_delete_operation_ordering(session, unique_name): + '''Create and delete entity in one transaction.''' + entity = session.create('User', {'username': unique_name}) + session.delete(entity) + session.commit() + + +def test_create_and_modify_to_have_required_attribute(session, unique_name): + '''Create and modify entity to have required attribute in transaction.''' + entity = session.create('Scope', {}) + other = session.create('Scope', {'name': unique_name}) + entity['name'] = '{0}2'.format(unique_name) + session.commit() + + +def test_ignore_in_create_entity_payload_values_set_to_not_set( + mocker, unique_name, session +): + '''Ignore in commit, created entity data set to NOT_SET''' + mocked = mocker.patch.object(session, 'call') + + # Should ignore 'email' attribute in payload. + new_user = session.create( + 'User', {'username': unique_name, 'email': 'test'} + ) + new_user['email'] = ftrack_api.symbol.NOT_SET + session.commit() + payloads = mocked.call_args[0][0] + assert len(payloads) == 1 + + +def test_ignore_operation_that_modifies_attribute_to_not_set( + mocker, session, user +): + '''Ignore in commit, operation that sets attribute value to NOT_SET''' + mocked = mocker.patch.object(session, 'call') + + # Should result in no call to server. + user['email'] = ftrack_api.symbol.NOT_SET + session.commit() + + assert not mocked.called + + +def test_operation_optimisation_on_commit(session, mocker): + '''Optimise operations on commit.''' + mocked = mocker.patch.object(session, 'call') + + user_a = session.create('User', {'username': 'bob'}) + user_a['username'] = 'foo' + user_a['email'] = 'bob@example.com' + + user_b = session.create('User', {'username': 'martin'}) + user_b['email'] = 'martin@ftrack.com' + + user_a['email'] = 'bob@example.com' + user_a['first_name'] = 'Bob' + + user_c = session.create('User', {'username': 'neverexist'}) + user_c['email'] = 'ignore@example.com' + session.delete(user_c) + + user_a_entity_key = ftrack_api.inspection.primary_key(user_a).values() + user_b_entity_key = ftrack_api.inspection.primary_key(user_b).values() + + session.commit() + + # The above operations should have translated into three payloads to call + # (two creates and one update). + payloads = mocked.call_args[0][0] + assert len(payloads) == 3 + + assert payloads[0]['action'] == 'create' + assert payloads[0]['entity_key'] == user_a_entity_key + assert set(payloads[0]['entity_data'].keys()) == set([ + '__entity_type__', 'id', 'resource_type', 'username' + ]) + + assert payloads[1]['action'] == 'create' + assert payloads[1]['entity_key'] == user_b_entity_key + assert set(payloads[1]['entity_data'].keys()) == set([ + '__entity_type__', 'id', 'resource_type', 'username', 'email' + ]) + + assert payloads[2]['action'] == 'update' + assert payloads[2]['entity_key'] == user_a_entity_key + assert set(payloads[2]['entity_data'].keys()) == set([ + '__entity_type__', 'email', 'first_name' + ]) + + +def test_state_collection(session, unique_name, user): + '''Session state collection holds correct entities.''' + # NOT_SET + user_a = session.create('User', {'username': unique_name}) + session.commit() + + # CREATED + user_b = session.create('User', {'username': unique_name}) + user_b['username'] = 'changed' + + # MODIFIED + user_c = user + user_c['username'] = 'changed' + + # DELETED + user_d = session.create('User', {'username': unique_name}) + session.delete(user_d) + + assert session.created == [user_b] + assert session.modified == [user_c] + assert session.deleted == [user_d] + + +def test_get_entity_with_composite_primary_key(session, new_project): + '''Retrieve entity that uses a composite primary key.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + # Avoid cache. + new_session = ftrack_api.Session() + retrieved_entity = new_session.get( + 'Metadata', ftrack_api.inspection.primary_key(entity).values() + ) + + assert retrieved_entity == entity + + +def test_get_entity_with_incomplete_composite_primary_key(session, new_project): + '''Fail to retrieve entity using incomplete composite primary key.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + # Avoid cache. + new_session = ftrack_api.Session() + with pytest.raises(ValueError): + new_session.get( + 'Metadata', ftrack_api.inspection.primary_key(entity).values()[0] + ) + + +def test_populate_entity(session, new_user): + '''Populate entity that uses single primary key.''' + with session.auto_populating(False): + assert new_user['email'] is ftrack_api.symbol.NOT_SET + + session.populate(new_user, 'email') + assert new_user['email'] is not ftrack_api.symbol.NOT_SET + + +def test_populate_entities(session, unique_name): + '''Populate multiple entities that use single primary key.''' + users = [] + for index in range(3): + users.append( + session.create( + 'User', {'username': '{0}-{1}'.format(unique_name, index)} + ) + ) + + session.commit() + + with session.auto_populating(False): + for user in users: + assert user['email'] is ftrack_api.symbol.NOT_SET + + session.populate(users, 'email') + + for user in users: + assert user['email'] is not ftrack_api.symbol.NOT_SET + + +def test_populate_entity_with_composite_primary_key(session, new_project): + '''Populate entity that uses a composite primary key.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + # Avoid cache. + new_session = ftrack_api.Session() + retrieved_entity = new_session.get( + 'Metadata', ftrack_api.inspection.primary_key(entity).values() + ) + + # Manually change already populated remote value so can test it gets reset + # on populate call. + retrieved_entity.attributes.get('value').set_remote_value( + retrieved_entity, 'changed' + ) + + new_session.populate(retrieved_entity, 'value') + assert retrieved_entity['value'] == 'value' + + +@pytest.mark.parametrize('server_information, compatible', [ + ({}, False), + ({'version': '3.3.11'}, True), + ({'version': '3.3.12'}, True), + ({'version': '3.4'}, True), + ({'version': '3.4.1'}, True), + ({'version': '3.5.16'}, True), + ({'version': '3.3.10'}, False) +], ids=[ + 'No information', + 'Valid current version', + 'Valid higher version', + 'Valid higher version', + 'Valid higher version', + 'Valid higher version', + 'Invalid lower version' +]) +def test_check_server_compatibility( + server_information, compatible, session +): + '''Check server compatibility.''' + with mock.patch.dict( + session._server_information, server_information, clear=True + ): + if compatible: + session.check_server_compatibility() + else: + with pytest.raises(ftrack_api.exception.ServerCompatibilityError): + session.check_server_compatibility() + + +def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): + '''Encode entity using "all" entity_attribute_strategy.''' + new_bar = mocked_schema_session.create( + 'Bar', + { + 'name': 'myBar', + 'id': 'bar_unique_id' + } + ) + + new_foo = mocked_schema_session.create( + 'Foo', + { + 'id': 'a_unique_id', + 'string': 'abc', + 'integer': 42, + 'number': 12345678.9, + 'boolean': False, + 'date': arrow.get('2015-11-18 15:24:09'), + 'bars': [new_bar] + } + ) + + encoded = mocked_schema_session.encode( + new_foo, entity_attribute_strategy='all' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Foo", + "bars": [{"__entity_type__": "Bar", "id": "bar_unique_id"}], + "boolean": false, + "date": {"__type__": "datetime", "value": "2015-11-18T15:24:09+00:00"}, + "id": "a_unique_id", + "integer": 42, + "number": 12345678.9, + "string": "abc"} + ''').replace('\n', '') + + +def test_encode_entity_using_only_set_attributes_strategy( + mocked_schema_session +): + '''Encode entity using "set_only" entity_attribute_strategy.''' + new_foo = mocked_schema_session.create( + 'Foo', + { + 'id': 'a_unique_id', + 'string': 'abc', + 'integer': 42 + } + ) + + encoded = mocked_schema_session.encode( + new_foo, entity_attribute_strategy='set_only' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Foo", + "id": "a_unique_id", + "integer": 42, + "string": "abc"} + ''').replace('\n', '') + + +def test_encode_computed_attribute_using_persisted_only_attributes_strategy( + mocked_schema_session +): + '''Encode computed attribute, "persisted_only" entity_attribute_strategy.''' + new_bar = mocked_schema_session._create( + 'Bar', + { + 'name': 'myBar', + 'id': 'bar_unique_id', + 'computed_value': 'FOO' + }, + reconstructing=True + ) + + encoded = mocked_schema_session.encode( + new_bar, entity_attribute_strategy='persisted_only' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Bar", + "id": "bar_unique_id", + "name": "myBar"} + ''').replace('\n', '') + + +def test_encode_entity_using_only_modified_attributes_strategy( + mocked_schema_session +): + '''Encode entity using "modified_only" entity_attribute_strategy.''' + new_foo = mocked_schema_session._create( + 'Foo', + { + 'id': 'a_unique_id', + 'string': 'abc', + 'integer': 42 + }, + reconstructing=True + ) + + new_foo['string'] = 'Modified' + + encoded = mocked_schema_session.encode( + new_foo, entity_attribute_strategy='modified_only' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Foo", + "id": "a_unique_id", + "string": "Modified"} + ''').replace('\n', '') + + +def test_encode_entity_using_invalid_strategy(session, new_task): + '''Fail to encode entity using invalid strategy.''' + with pytest.raises(ValueError): + session.encode(new_task, entity_attribute_strategy='invalid') + + +def test_encode_operation_payload(session): + '''Encode operation payload.''' + sequence_component = session.create_component( + "/path/to/sequence.%d.jpg [1]", location=None + ) + file_component = sequence_component["members"][0] + + encoded = session.encode([ + ftrack_api.session.OperationPayload({ + 'action': 'create', + 'entity_data': { + '__entity_type__': u'FileComponent', + u'container': sequence_component, + 'id': file_component['id'] + }, + 'entity_key': [file_component['id']], + 'entity_type': u'FileComponent' + }), + ftrack_api.session.OperationPayload({ + 'action': 'update', + 'entity_data': { + '__entity_type__': u'SequenceComponent', + u'members': ftrack_api.collection.Collection( + sequence_component, + sequence_component.attributes.get('members'), + data=[file_component] + ) + }, + 'entity_key': [sequence_component['id']], + 'entity_type': u'SequenceComponent' + }) + ]) + + expected = textwrap.dedent(''' + [{{"action": "create", + "entity_data": {{"__entity_type__": "FileComponent", + "container": {{"__entity_type__": "SequenceComponent", + "id": "{0[id]}"}}, + "id": "{1[id]}"}}, + "entity_key": ["{1[id]}"], + "entity_type": "FileComponent"}}, + {{"action": "update", + "entity_data": {{"__entity_type__": "SequenceComponent", + "members": [{{"__entity_type__": "FileComponent", "id": "{1[id]}"}}]}}, + "entity_key": ["{0[id]}"], + "entity_type": "SequenceComponent"}}] + '''.format(sequence_component, file_component)).replace('\n', '') + + assert encoded == expected + + +def test_decode_partial_entity( + session, new_task +): + '''Decode partially encoded entity.''' + encoded = session.encode( + new_task, entity_attribute_strategy='set_only' + ) + + entity = session.decode(encoded) + + assert entity == new_task + assert entity is not new_task + + +def test_reset(mocker): + '''Reset session.''' + plugin_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') + ) + session = ftrack_api.Session(plugin_paths=[plugin_path]) + + assert hasattr(session.types.get('User'), 'stub') + location = session.query('Location where name is "test.location"').one() + assert location.accessor is not ftrack_api.symbol.NOT_SET + + mocked_close = mocker.patch.object(session._request, 'close') + mocked_fetch = mocker.patch.object(session, '_load_schemas') + + session.reset() + + # Assert custom entity type maintained. + assert hasattr(session.types.get('User'), 'stub') + + # Assert location plugin re-configured. + location = session.query('Location where name is "test.location"').one() + assert location.accessor is not ftrack_api.symbol.NOT_SET + + # Assert connection not closed and no schema fetch issued. + assert not mocked_close.called + assert not mocked_fetch.called + + +def test_rollback_scalar_attribute_change(session, new_user): + '''Rollback scalar attribute change via session.''' + assert not session.recorded_operations + current_first_name = new_user['first_name'] + + new_user['first_name'] = 'NewName' + assert new_user['first_name'] == 'NewName' + assert session.recorded_operations + + session.rollback() + + assert not session.recorded_operations + assert new_user['first_name'] == current_first_name + + +def test_rollback_collection_attribute_change(session, new_user): + '''Rollback collection attribute change via session.''' + assert not session.recorded_operations + current_timelogs = new_user['timelogs'] + assert list(current_timelogs) == [] + + timelog = session.create('Timelog', {}) + new_user['timelogs'].append(timelog) + assert list(new_user['timelogs']) == [timelog] + assert session.recorded_operations + + session.rollback() + + assert not session.recorded_operations + assert list(new_user['timelogs']) == [] + + +def test_rollback_entity_creation(session): + '''Rollback entity creation via session.''' + assert not session.recorded_operations + + new_user = session.create('User') + assert session.recorded_operations + assert new_user in session.created + + session.rollback() + + assert not session.recorded_operations + assert new_user not in session.created + assert new_user not in session._local_cache.values() + + +def test_rollback_entity_deletion(session, new_user): + '''Rollback entity deletion via session.''' + assert not session.recorded_operations + + session.delete(new_user) + assert session.recorded_operations + assert new_user in session.deleted + + session.rollback() + assert not session.recorded_operations + assert new_user not in session.deleted + assert new_user in session._local_cache.values() + + +# Caching +# ------------------------------------------------------------------------------ + + +def test_get_entity_bypassing_cache(session, user, mocker): + '''Retrieve an entity by type and id bypassing cache.''' + mocker.patch.object(session, 'call', wraps=session.call) + + session.cache.remove( + session.cache_key_maker.key(ftrack_api.inspection.identity(user)) + ) + + matching = session.get(*ftrack_api.inspection.identity(user)) + + # Check a different instance returned. + assert matching is not user + + # Check instances have the same identity. + assert matching == user + + # Check cache was bypassed and server was called. + assert session.call.called + + +def test_get_entity_from_cache(cache, task, mocker): + '''Retrieve an entity by type and id from cache.''' + session = ftrack_api.Session(cache=cache) + + # Prepare cache. + session.merge(task) + + # Disable server calls. + mocker.patch.object(session, 'call') + + # Retrieve entity from cache. + entity = session.get(*ftrack_api.inspection.identity(task)) + + assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity == task + assert entity is not task + + # Check that no call was made to server. + assert not session.call.called + + +def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): + '''Retrieve an entity tree from cache.''' + session = ftrack_api.Session(cache=cache) + + # Prepare cache. + # TODO: Maybe cache should be prepopulated for a better check here. + session.query( + 'select children, children.children, children.children.children, ' + 'children.children.children.assignments, ' + 'children.children.children.assignments.resource ' + 'from Project where id is "{0}"' + .format(new_project_tree['id']) + ).one() + + # Disable server calls. + mocker.patch.object(session, 'call') + + # Retrieve entity from cache. + entity = session.get(*ftrack_api.inspection.identity(new_project_tree)) + + assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity == new_project_tree + assert entity is not new_project_tree + + # Check tree. + with session.auto_populating(False): + for sequence in entity['children']: + for shot in sequence['children']: + for task in shot['children']: + assignments = task['assignments'] + for assignment in assignments: + resource = assignment['resource'] + + assert resource is not ftrack_api.symbol.NOT_SET + + # Check that no call was made to server. + assert not session.call.called + + +def test_get_metadata_from_cache(session, mocker, cache, new_task): + '''Retrieve an entity along with its metadata from cache.''' + new_task['metadata']['key'] = 'value' + session.commit() + + fresh_session = ftrack_api.Session(cache=cache) + + # Prepare cache. + fresh_session.query( + 'select metadata.key, metadata.value from ' + 'Task where id is "{0}"' + .format(new_task['id']) + ).all() + + # Disable server calls. + mocker.patch.object(fresh_session, 'call') + + # Retrieve entity from cache. + entity = fresh_session.get(*ftrack_api.inspection.identity(new_task)) + + assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity == new_task + assert entity is not new_task + + # Check metadata cached correctly. + with fresh_session.auto_populating(False): + metadata = entity['metadata'] + assert metadata['key'] == 'value' + + assert not fresh_session.call.called + + +def test_merge_circular_reference(cache, temporary_file): + '''Merge circular reference into cache.''' + session = ftrack_api.Session(cache=cache) + # The following will test the condition as a FileComponent will be created + # with corresponding ComponentLocation. The server will return the file + # component data with the component location embedded. The component + # location will in turn have an embedded reference to the file component. + # If the merge does not prioritise the primary keys of the instance then + # any cache that relies on using the identity of the file component will + # fail. + component = session.create_component(path=temporary_file) + assert component + + +def test_create_with_selective_cache(session): + '''Create entity does not store entity in selective cache.''' + cache = ftrack_api.cache.MemoryCache() + session.cache.caches.append(SelectiveCache(cache)) + try: + user = session.create('User', {'username': 'martin'}) + cache_key = session.cache_key_maker.key( + ftrack_api.inspection.identity(user) + ) + + with pytest.raises(KeyError): + cache.get(cache_key) + + finally: + session.cache.caches.pop() + + +def test_correct_file_type_on_sequence_component(session): + '''Create sequence component with correct file type.''' + path = '/path/to/image/sequence.%04d.dpx [1-10]' + sequence_component = session.create_component(path) + + assert sequence_component['file_type'] == '.dpx' + + +def test_read_schemas_from_cache( + session, temporary_valid_schema_cache +): + '''Read valid content from schema cache.''' + expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' + + schemas, hash_ = session._read_schemas_from_cache( + temporary_valid_schema_cache + ) + + assert expected_hash == hash_ + + +def test_fail_to_read_schemas_from_invalid_cache( + session, temporary_invalid_schema_cache +): + '''Fail to read invalid content from schema cache.''' + with pytest.raises(ValueError): + session._read_schemas_from_cache( + temporary_invalid_schema_cache + ) + + +def test_write_schemas_to_cache( + session, temporary_valid_schema_cache +): + '''Write valid content to schema cache.''' + expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' + schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) + + session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) + + schemas, hash_ = session._read_schemas_from_cache( + temporary_valid_schema_cache + ) + + assert expected_hash == hash_ + + +def test_fail_to_write_invalid_schemas_to_cache( + session, temporary_valid_schema_cache +): + '''Fail to write invalid content to schema cache.''' + # Datetime not serialisable by default. + invalid_content = datetime.datetime.now() + + with pytest.raises(TypeError): + session._write_schemas_to_cache( + invalid_content, temporary_valid_schema_cache + ) + + +def test_load_schemas_from_valid_cache( + mocker, session, temporary_valid_schema_cache, mocked_schemas +): + '''Load schemas from cache.''' + expected_schemas = session._load_schemas(temporary_valid_schema_cache) + + mocked = mocker.patch.object(session, 'call') + schemas = session._load_schemas(temporary_valid_schema_cache) + + assert schemas == expected_schemas + assert not mocked.called + + +def test_load_schemas_from_server_when_cache_invalid( + mocker, session, temporary_invalid_schema_cache +): + '''Load schemas from server when cache invalid.''' + mocked = mocker.patch.object(session, 'call', wraps=session.call) + + session._load_schemas(temporary_invalid_schema_cache) + assert mocked.called + + +def test_load_schemas_from_server_when_cache_outdated( + mocker, session, temporary_valid_schema_cache +): + '''Load schemas from server when cache outdated.''' + schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) + schemas.append({ + 'id': 'NewTest' + }) + session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) + + mocked = mocker.patch.object(session, 'call', wraps=session.call) + session._load_schemas(temporary_valid_schema_cache) + + assert mocked.called + + +def test_load_schemas_from_server_not_reporting_schema_hash( + mocker, session, temporary_valid_schema_cache +): + '''Load schemas from server when server does not report schema hash.''' + mocked_write = mocker.patch.object( + session, '_write_schemas_to_cache', + wraps=session._write_schemas_to_cache + ) + + server_information = session._server_information.copy() + server_information.pop('schema_hash') + mocker.patch.object( + session, '_server_information', new=server_information + ) + + session._load_schemas(temporary_valid_schema_cache) + + # Cache still written even if hash not reported. + assert mocked_write.called + + mocked = mocker.patch.object(session, 'call', wraps=session.call) + session._load_schemas(temporary_valid_schema_cache) + + # No hash reported by server so cache should have been bypassed. + assert mocked.called + + +def test_load_schemas_bypassing_cache( + mocker, session, temporary_valid_schema_cache +): + '''Load schemas bypassing cache when set to False.''' + with mocker.patch.object(session, 'call', wraps=session.call): + + session._load_schemas(temporary_valid_schema_cache) + assert session.call.call_count == 1 + + session._load_schemas(False) + assert session.call.call_count == 2 + + +def test_get_tasks_widget_url(session): + '''Tasks widget URL returns valid HTTP status.''' + url = session.get_widget_url('tasks') + response = requests.get(url) + response.raise_for_status() + + +def test_get_info_widget_url(session, task): + '''Info widget URL for *task* returns valid HTTP status.''' + url = session.get_widget_url('info', entity=task, theme='light') + response = requests.get(url) + response.raise_for_status() + + +def test_encode_media_from_path(session, video_path): + '''Encode media based on a file path.''' + job = session.encode_media(video_path) + + assert job.entity_type == 'Job' + + job_data = json.loads(job['data']) + assert 'output' in job_data + assert 'source_component_id' in job_data + assert 'keep_original' in job_data and job_data['keep_original'] is False + assert len(job_data['output']) + assert 'component_id' in job_data['output'][0] + assert 'format' in job_data['output'][0] + + +def test_encode_media_from_component(session, video_path): + '''Encode media based on a component.''' + location = session.query('Location where name is "ftrack.server"').one() + component = session.create_component( + video_path, + location=location + ) + session.commit() + + job = session.encode_media(component) + + assert job.entity_type == 'Job' + + job_data = json.loads(job['data']) + assert 'keep_original' in job_data and job_data['keep_original'] is True + + +def test_create_sequence_component_with_size(session, temporary_sequence): + '''Create a sequence component and verify that is has a size.''' + location = session.query('Location where name is "ftrack.server"').one() + component = session.create_component( + temporary_sequence + ) + + assert component['size'] > 0 + + +def test_plugin_arguments(mocker): + '''Pass plugin arguments to plugin discovery mechanism.''' + mock = mocker.patch( + 'ftrack_api.plugin.discover' + ) + session = ftrack_api.Session( + plugin_paths=[], plugin_arguments={"test": "value"} + ) + assert mock.called + mock.assert_called_once_with([], [session], {"test": "value"}) + +def test_remote_reset(session, new_user): + '''Reset user api key.''' + key_1 = session.reset_remote( + 'api_key', entity=new_user + ) + + key_2 = session.reset_remote( + 'api_key', entity=new_user + ) + + + assert key_1 != key_2 + + +@pytest.mark.parametrize('attribute', [ + ('id',), + ('email',) + +], ids=[ + 'Fail resetting primary key', + 'Fail resetting attribute without default value', +]) +def test_fail_remote_reset(session, user, attribute): + '''Fail trying to rest invalid attributes.''' + + with pytest.raises(ftrack_api.exception.ServerError): + session.reset_remote( + attribute, user + ) + + +def test_close(session): + '''Close session.''' + assert session.closed is False + session.close() + assert session.closed is True + + +def test_close_already_closed_session(session): + '''Close session that is already closed.''' + session.close() + assert session.closed is True + session.close() + assert session.closed is True + + +def test_server_call_after_close(session): + '''Fail to issue calls to server after session closed.''' + session.close() + assert session.closed is True + + with pytest.raises(ftrack_api.exception.ConnectionClosedError): + session.query('User').first() + + +def test_context_manager(session): + '''Use session as context manager.''' + with session: + assert session.closed is False + + assert session.closed is True + + +def test_delayed_job(session): + '''Test the delayed_job action''' + + with pytest.raises(ValueError): + session.delayed_job( + 'DUMMY_JOB' + ) + + +@pytest.mark.skip(reason='No configured ldap server.') +def test_delayed_job_ldap_sync(session): + '''Test the a delayed_job ldap sync action''' + result = session.delayed_job( + ftrack_api.symbol.JOB_SYNC_USERS_LDAP + ) + + assert isinstance( + result, ftrack_api.entity.job.Job + ) + + +def test_query_nested_custom_attributes(session, new_asset_version): + '''Query custom attributes nested and update a value and query again. + + This test will query custom attributes via 2 relations, then update the + value in one API session and read it back in another to verify that it gets + the new value. + + ''' + session_one = session + session_two = ftrack_api.Session( + auto_connect_event_hub=False + ) + + # Read the version via a relation in both sessions. + def get_versions(sessions): + versions = [] + for _session in sessions: + asset = _session.query( + 'select versions.custom_attributes from Asset where id is "{0}"'.format( + new_asset_version.get('asset_id') + ) + ).first() + + for version in asset['versions']: + if version.get('id') == new_asset_version.get('id'): + versions.append(version) + + return versions + + # Get version from both sessions. + versions = get_versions((session_one, session_two)) + + # Read attribute for both sessions. + for version in versions: + version['custom_attributes']['versiontest'] + + # Set attribute on session_one. + versions[0]['custom_attributes']['versiontest'] = random.randint( + 0, 99999 + ) + + session.commit() + + # Read version from server for session_two. + session_two_version = get_versions((session_two, ))[0] + + # Verify that value in session 2 is the same as set and committed in + # session 1. + assert ( + session_two_version['custom_attributes']['versiontest'] == + versions[0]['custom_attributes']['versiontest'] + ) + + +def test_query_nested(session): + '''Query components nested and update a value and query again. + + This test will query components via 2 relations, then update the + value in one API session and read it back in another to verify that it gets + the new value. + + ''' + session_one = session + session_two = ftrack_api.Session( + auto_connect_event_hub=False + ) + + query = ( + 'select versions.components.name from Asset where id is ' + '"12939d0c-6766-11e1-8104-f23c91df25eb"' + ) + + def get_version(session): + '''Return the test version from *session*.''' + asset = session.query(query).first() + asset_version = None + for version in asset['versions']: + if version['version'] == 8: + asset_version = version + break + + return asset_version + + asset_version = get_version(session_one) + asset_version2 = get_version(session_two) + + # This assert is not needed, but reading the collections are to ensure they + # are inflated. + assert ( + asset_version2['components'][0]['name'] == + asset_version['components'][0]['name'] + ) + + asset_version['components'][0]['name'] = str(uuid.uuid4()) + + session.commit() + + asset_version2 = get_version(session_two) + + assert ( + asset_version['components'][0]['name'] == + asset_version2['components'][0]['name'] + ) + + +def test_merge_iterations(session, mocker, project): + '''Ensure merge does not happen to many times when querying.''' + mocker.spy(session, '_merge') + + session.query( + 'select status from Task where project_id is {} limit 10'.format( + project['id'] + ) + ).all() + + assert session._merge.call_count < 75 + + +@pytest.mark.parametrize( + 'get_versions', + [ + lambda component, asset_version, asset: component['version']['asset']['versions'], + lambda component, asset_version, asset: asset_version['asset']['versions'], + lambda component, asset_version, asset: asset['versions'], + ], + ids=[ + 'from_component', + 'from_asset_version', + 'from_asset', + ] +) +def test_query_nested2(session, get_versions): + '''Query version.asset.versions from component and then add new version. + + This test will query versions via multiple relations and ensure a new + version appears when added to a different session and then is queried + again. + + ''' + session_one = session + session_two = ftrack_api.Session( + auto_connect_event_hub=False + ) + + # Get a random component that is linked to a version and asset. + component_id = session_two.query( + 'FileComponent where version.asset_id != None' + ).first()['id'] + + query = ( + 'select version.asset.versions from Component where id is "{}"'.format( + component_id + ) + ) + + component = session_one.query(query).one() + asset_version = component['version'] + asset = component['version']['asset'] + versions = component['version']['asset']['versions'] + length = len(versions) + + session_two.create('AssetVersion', { + 'asset_id': asset['id'] + }) + + session_two.commit() + + component = session_one.query(query).one() + versions = get_versions(component, asset_version, asset) + new_length = len(versions) + + assert length + 1 == new_length + + +def test_session_ready_reset_events(mocker): + '''Session ready and reset events.''' + plugin_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') + ) + session = ftrack_api.Session(plugin_paths=[plugin_path]) + + assert session._test_called_events['ftrack.api.session.ready'] is 1 + assert session._test_called_events['ftrack.api.session.reset'] is 0 + + session.reset() + assert session._test_called_events['ftrack.api.session.ready'] is 1 + assert session._test_called_events['ftrack.api.session.reset'] is 1 + + +def test_entity_reference(mocker, session): + '''Return entity reference that uniquely identifies entity.''' + mock_entity = mocker.Mock(entity_type="MockEntityType") + mock_auto_populating = mocker.patch.object(session, "auto_populating") + mock_primary_key = mocker.patch( + "ftrack_api.inspection.primary_key", return_value={"id": "mock-id"} + ) + + reference = session.entity_reference(mock_entity) + + assert reference == { + "__entity_type__": "MockEntityType", + "id": "mock-id" + } + + mock_auto_populating.assert_called_once_with(False) + mock_primary_key.assert_called_once_with(mock_entity) + + +def test__entity_reference(mocker, session): + '''Act as alias to entity_reference.''' + mock_entity = mocker.Mock(entity_type="MockEntityType") + mock_entity_reference = mocker.patch.object(session, "entity_reference") + mocker.patch("warnings.warn") + + session._entity_reference(mock_entity) + + mock_entity_reference.assert_called_once_with(mock_entity) + + +def test__entity_reference_issues_deprecation_warning(mocker, session): + '''Issue deprecation warning for usage of _entity_reference.''' + mocker.patch.object(session, "entity_reference") + mock_warn = mocker.patch("warnings.warn") + + session._entity_reference({}) + + mock_warn.assert_called_once_with( + ( + "Session._entity_reference is now available as public method " + "Session.entity_reference. The private method will be removed " + "in version 2.0." + ), + PendingDeprecationWarning + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py new file mode 100644 index 00000000000..cf8b014ee59 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py @@ -0,0 +1,74 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest +import ftrack_api.exception + + +def test_manually_create_multiple_timers_with_error(session, new_user): + '''Fail to create a second timer.''' + session.create('Timer', { + 'user': new_user + }) + + session.commit() + + with pytest.raises(ftrack_api.exception.ServerError): + session.create('Timer', { + 'user': new_user + }) + + session.commit() + + session.reset() + + +def test_create_multiple_timers_with_error(session, new_user): + '''Fail to create a second timer.''' + new_user.start_timer() + + with pytest.raises(ftrack_api.exception.NotUniqueError): + new_user.start_timer() + + session.reset() + + +def test_start_and_stop_a_timer(session, new_user, new_task): + '''Start a new timer and stop it to create a timelog.''' + new_user.start_timer(new_task) + + new_user.stop_timer() + + timelog = session.query( + 'Timelog where context_id = "{0}"'.format(new_task['id']) + ).one() + + assert timelog['user_id'] == new_user['id'], 'User id is correct.' + assert timelog['context_id'] == new_task['id'], 'Task id is correct.' + + +def test_start_a_timer_when_timer_is_running(session, new_user, new_task): + '''Start a timer when an existing timer is already running.''' + new_user.start_timer(new_task) + + # Create the second timer without context. + new_user.start_timer(force=True) + + # There should be only one existing timelog for this user. + timelogs = session.query( + 'Timelog where user_id = "{0}"'.format(new_user['id']) + ).all() + assert len(timelogs) == 1, 'One timelog exists.' + + timelog = session.query( + 'Timer where user_id = "{0}"'.format(new_user['id']) + ).one() + + # Make sure running timer has no context. + assert timelog['context_id'] is None, 'Timer does not have a context.' + + +def test_stop_timer_without_timer_running(session, new_user): + '''Stop a timer when no timer is running.''' + with pytest.raises(ftrack_api.exception.NoResultFoundError): + new_user.stop_timer() diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py similarity index 95% rename from openpype/modules/ftrack/scripts/sub_event_processor.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py index 0d94fa72644..51b45eb93b0 100644 --- a/openpype/modules/ftrack/scripts/sub_event_processor.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py @@ -4,8 +4,8 @@ import socket import datetime -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py similarity index 98% rename from openpype/modules/ftrack/scripts/sub_event_status.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_status.py index 24b9bfb789f..8a2733b635d 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py @@ -7,8 +7,8 @@ import datetime import ftrack_api -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py similarity index 96% rename from openpype/modules/ftrack/scripts/sub_event_storer.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py index 6e2990ef0be..a8649e0ccce 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py @@ -6,14 +6,14 @@ import pymongo import ftrack_api -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StorerEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from openpype.modules.ftrack.lib import get_ftrack_event_mongo_info +from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import OpenPypeMongoConnection from openpype.api import Logger diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py similarity index 97% rename from openpype/modules/ftrack/scripts/sub_legacy_server.py rename to openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py index ae6aefa9085..e3a623c3763 100644 --- a/openpype/modules/ftrack/scripts/sub_legacy_server.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py @@ -7,7 +7,7 @@ import ftrack_api from openpype.api import Logger from openpype.modules import ModulesManager -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer log = Logger().get_logger("Event Server Legacy") diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/default_modules/ftrack/scripts/sub_user_server.py similarity index 93% rename from openpype/modules/ftrack/scripts/sub_user_server.py rename to openpype/modules/default_modules/ftrack/scripts/sub_user_server.py index 971a31b703e..a3701a0950d 100644 --- a/openpype/modules/ftrack/scripts/sub_user_server.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_user_server.py @@ -2,8 +2,8 @@ import signal import socket -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, SocketBaseEventHub ) diff --git a/openpype/modules/ftrack/tray/__init__.py b/openpype/modules/default_modules/ftrack/tray/__init__.py similarity index 100% rename from openpype/modules/ftrack/tray/__init__.py rename to openpype/modules/default_modules/ftrack/tray/__init__.py diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/default_modules/ftrack/tray/ftrack_tray.py similarity index 100% rename from openpype/modules/ftrack/tray/ftrack_tray.py rename to openpype/modules/default_modules/ftrack/tray/ftrack_tray.py diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/default_modules/ftrack/tray/login_dialog.py similarity index 99% rename from openpype/modules/ftrack/tray/login_dialog.py rename to openpype/modules/default_modules/ftrack/tray/login_dialog.py index cc5689bee54..6384621c8e6 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/default_modules/ftrack/tray/login_dialog.py @@ -1,7 +1,7 @@ import os import requests from openpype import style -from openpype.modules.ftrack.lib import credentials +from openpype_modules.ftrack.lib import credentials from . import login_tools from openpype import resources from Qt import QtCore, QtGui, QtWidgets diff --git a/openpype/modules/ftrack/tray/login_tools.py b/openpype/modules/default_modules/ftrack/tray/login_tools.py similarity index 100% rename from openpype/modules/ftrack/tray/login_tools.py rename to openpype/modules/default_modules/ftrack/tray/login_tools.py diff --git a/openpype/modules/ftrack/python2_vendor/arrow b/openpype/modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf728..00000000000 --- a/openpype/modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab0..00000000000 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 9a66e93b48e037aac95b60d1c24281f369dc74ae Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:21:14 +0200 Subject: [PATCH 44/77] define function for modules directory paths --- openpype/modules/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 877c363f61e..1895281cb88 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -61,6 +61,14 @@ def __getattr__(self, attr_name): return self.__attributes__[attr_name] +def module_dirs(): + current_dir = os.path.abspath(os.path.dirname(__file__)) + dirpaths = [ + os.path.join(current_dir, "default_modules") + ] + return dirpaths + + def load_interfaces(force=False): if not force and "openpype_interfaces" in sys.modules: return From 9cdacdf397a8bdbaf379dda17a9ff50d7e6b463e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:46:56 +0200 Subject: [PATCH 45/77] use modified meta class for interface _OpenPypeInterfaceMeta --- openpype/modules/base.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1895281cb88..fc5a29a345a 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -170,7 +170,15 @@ def load_modules(force=False): setattr(openpype_modules, "project_manager_action", project_manager_action) -@six.add_metaclass(ABCMeta) + + +class _OpenPypeInterfaceMeta(ABCMeta): + """OpenPypeInterface meta class to print proper string.""" + def __str__(self): + return "<'OpenPypeInterface.{}'>".format(self.__name__) + + +@six.add_metaclass(_OpenPypeInterfaceMeta) class OpenPypeInterface: """Base class of Interface that can be used as Mixin with abstract parts. From d2fb85b2358cc46a6cf89bada705f0872a0e8fb6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:47:09 +0200 Subject: [PATCH 46/77] added dictionary access to modules --- openpype/modules/base.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index fc5a29a345a..662a7b02e1f 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -38,6 +38,15 @@ def __iter__(self): def __setattr__(self, attr_name, value): self.__attributes__[attr_name] = value + def __setitem__(self, key, value): + self.__setattr__(key, value) + + def __getitem__(self, key): + return getattr(self, key) + + def get(self, key, default=None): + return self.__attributes__.get(key, default) + def keys(self): return self.__attributes__.keys() From f6d1fd9740446e3fd5c3e23c1293c5ea07ddd768 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:47:43 +0200 Subject: [PATCH 47/77] dynamic loading of modules --- openpype/modules/base.py | 124 +++++++++++++++++++++++---------------- 1 file changed, 72 insertions(+), 52 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 662a7b02e1f..c812bdfc37d 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -70,10 +70,15 @@ def __getattr__(self, attr_name): return self.__attributes__[attr_name] -def module_dirs(): +def get_default_modules_dir(): current_dir = os.path.abspath(os.path.dirname(__file__)) + + return os.path.join(current_dir, "default_modules") + + +def get_module_dirs(): dirpaths = [ - os.path.join(current_dir, "default_modules") + get_default_modules_dir() ] return dirpaths @@ -90,18 +95,24 @@ def load_interfaces(force=False): log = PypeLogger.get_logger("InterfacesLoader") - current_dir = os.path.abspath(os.path.dirname(__file__)) + dirpaths = get_module_dirs() - interface_paths = [ - os.path.join(current_dir, "interfaces.py") - ] + interface_paths = [] + interface_paths.append( + os.path.join(get_default_modules_dir(), "interfaces.py") + ) + for dirpath in dirpaths: + for filename in os.listdir(dirpath): + if filename in ("__pycache__", ): + continue - for filename in os.listdir(current_dir): - full_path = os.path.join(current_dir, filename) - if os.path.isdir(full_path): - interface_paths.append( - os.path.join(full_path, "interfaces.py") - ) + full_path = os.path.join(dirpath, filename) + if not os.path.isdir(full_path): + continue + + interfaces_path = os.path.join(full_path, "interfaces.py") + if os.path.exists(interfaces_path): + interface_paths.append(interfaces_path) # print(interface_paths) for full_path in interface_paths: @@ -131,54 +142,63 @@ def load_interfaces(force=False): def load_modules(force=False): - if not force and "openpype_modules" in sys.modules: - return + # TODO add thread lock + + # First load interfaces + # - modules must not be imported before interfaces + load_interfaces(force) - from openpype.lib import modules_from_path + # Key under which will be modules imported in `sys.modules` + modules_key = "openpype_modules" - sys.modules["openpype_modules"] = openpype_modules = _ModuleClass( - "openpype_modules" + # Check if are modules already loaded or no + if not force and modules_key in sys.modules: + return + + # Import helper functions from lib + from openpype.lib import ( + import_filepath, + load_module_from_dirpath ) + # Change `sys.modules` + sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) + log = PypeLogger.get_logger("ModulesLoader") - # TODO import dynamically from defined paths - from . import ( - avalon_apps, - clockify, - deadline, - ftrack, - idle_manager, - log_viewer, - muster, - settings_module, - slack, - sync_server, - timers_manager, - webserver, - launcher_action, - standalonepublish_action, - project_manager_action - ) - setattr(openpype_modules, "avalon_apps", avalon_apps) - setattr(openpype_modules, "clockify", clockify) - setattr(openpype_modules, "deadline", deadline) - setattr(openpype_modules, "ftrack", ftrack) - setattr(openpype_modules, "idle_manager", idle_manager) - setattr(openpype_modules, "log_viewer", log_viewer) - setattr(openpype_modules, "muster", muster) - setattr(openpype_modules, "settings_module", settings_module) - setattr(openpype_modules, "sync_server", sync_server) - setattr(openpype_modules, "slack", slack) - setattr(openpype_modules, "timers_manager", timers_manager) - setattr(openpype_modules, "webserver", webserver) - setattr(openpype_modules, "launcher_action", launcher_action) - setattr( - openpype_modules, "standalonepublish_action", standalonepublish_action - ) - setattr(openpype_modules, "project_manager_action", project_manager_action) + # Look for OpenPype modules in paths defined with `get_module_dirs` + dirpaths = get_module_dirs() + + for dirpath in dirpaths: + if not os.path.exists(dirpath): + log.warning(( + "Could not find path when loading OpenPype modules \"{}\"" + ).format(dirpath)) + continue + + for filename in os.listdir(dirpath): + # Ignore filenames + if filename in ("__pycache__", ): + continue + + fullpath = os.path.join(dirpath, filename) + basename, ext = os.path.splitext(filename) + + module = None + # TODO add more logic how to define if folder is module or not + # - check manifest and content of manifest + if os.path.isdir(fullpath): + module = load_module_from_dirpath( + dirpath, filename, modules_key + ) + module_name = filename + elif ext in (".py", ): + module = import_filepath(fullpath) + module_name = basename + if module is not None: + setattr(openpype_modules, module_name, module) class _OpenPypeInterfaceMeta(ABCMeta): From a0e80dec6091b4e4949112911d506eabeafb1f9a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:48:07 +0200 Subject: [PATCH 48/77] skip collect_modules method --- openpype/modules/base.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c812bdfc37d..4dfe382030f 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -305,13 +305,10 @@ def __init__(self, _system_settings=None): self.initialize_modules() self.connect_modules() - def collect_modules(self): - load_interfaces() - load_modules() - def initialize_modules(self): """Import and initialize modules.""" - self.collect_modules() + # Make sure modules are loaded + load_modules() import openpype_modules From 49c649e36d3e9073d9899a22a11b11a515b91ea8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:48:16 +0200 Subject: [PATCH 49/77] added few docstrings --- openpype/modules/base.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 4dfe382030f..f0fb6c91faa 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -18,14 +18,23 @@ # Inherit from `object` for Python 2 hosts class _ModuleClass(object): + """Fake module class for storing OpenPype modules. + + Object of this class can be stored to `sys.modules` and used for storing + dynamically imported modules. + """ def __init__(self, name): # Call setattr on super class super(_ModuleClass, self).__setattr__("name", name) + + # Where modules and interfaces are stored super(_ModuleClass, self).__setattr__("__attributes__", dict()) super(_ModuleClass, self).__setattr__("__defaults__", set()) def __getattr__(self, attr_name): if attr_name not in self.__attributes__: + if attr_name in ("__path__"): + return None raise ImportError("No module named {}.{}".format( self.name, attr_name )) @@ -58,6 +67,12 @@ def items(self): class _InterfacesClass(_ModuleClass): + """Fake module class for storing OpenPype interfaces. + + MissingInterface object is returned if interfaces does not exists. + - this is because interfaces must be available even if are missing + implementation + """ def __getattr__(self, attr_name): if attr_name not in self.__attributes__: # Fake Interface if is not missing @@ -213,11 +228,19 @@ class OpenPypeInterface: This is way how OpenPype module or addon can tell that has implementation for specific part or for other module/addon. + + Child classes of OpenPypeInterface may be used as mixin in different + OpenPype modules which means they have to have implemented methods defined + in the interface. By default interface does not have any abstract parts. """ pass class MissingInteface(OpenPypeInterface): + """Class representing missing interface class. + + Used when interface is not available from currently registered paths. + """ pass From 4a5f015f4f939e94f79244eafb32054aa86ae003 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:57:04 +0200 Subject: [PATCH 50/77] renamed function 'load_module_from_dirpath' to 'import_module_from_dirpath' --- openpype/lib/__init__.py | 4 ++-- openpype/lib/python_module_tools.py | 10 +++++----- openpype/modules/base.py | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 52a6024feb7..9bcd0f7587f 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -56,7 +56,7 @@ modules_from_path, recursive_bases_from_class, classes_from_module, - load_module_from_dirpath + import_module_from_dirpath ) from .avalon_context import ( @@ -176,7 +176,7 @@ "modules_from_path", "recursive_bases_from_class", "classes_from_module", - "load_module_from_dirpath", + "import_module_from_dirpath", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 102ae7e71a9..59e7ad9123b 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -136,7 +136,7 @@ def classes_from_module(superclass, module): return classes -def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): +def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): full_module_name = "{}.{}".format(dst_module_name, module_name) if full_module_name in sys.modules: return sys.modules[full_module_name] @@ -152,7 +152,7 @@ def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): return module -def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): +def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): full_module_name = "{}.{}".format(dst_module_name, module_name) if full_module_name in sys.modules: return sys.modules[full_module_name] @@ -179,13 +179,13 @@ def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): return module -def load_module_from_dirpath(dirpath, folder_name, dst_module_name): +def import_module_from_dirpath(dirpath, folder_name, dst_module_name): if PY3: - module = _load_module_from_dirpath_py3( + module = _import_module_from_dirpath_py3( dirpath, folder_name, dst_module_name ) else: - module = _load_module_from_dirpath_py2( + module = _import_module_from_dirpath_py2( dirpath, folder_name, dst_module_name ) return module diff --git a/openpype/modules/base.py b/openpype/modules/base.py index f0fb6c91faa..fc53d3b27a0 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -173,7 +173,7 @@ def load_modules(force=False): # Import helper functions from lib from openpype.lib import ( import_filepath, - load_module_from_dirpath + import_module_from_dirpath ) # Change `sys.modules` @@ -203,7 +203,7 @@ def load_modules(force=False): # TODO add more logic how to define if folder is module or not # - check manifest and content of manifest if os.path.isdir(fullpath): - module = load_module_from_dirpath( + module = import_module_from_dirpath( dirpath, filename, modules_key ) module_name = filename From 05c6e450f59aae428669e2685d27a9614ab02b88 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 15:09:59 +0200 Subject: [PATCH 51/77] slighlty modified import function --- openpype/lib/python_module_tools.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 59e7ad9123b..c5849225ce6 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -137,23 +137,33 @@ def classes_from_module(superclass, module): def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) + if dst_module_name: + full_module_name = "{}.{}".format(dst_module_name, module_name) + dst_module = sys.modules[dst_module_name] + else: + full_module_name = module_name + dst_module = None + if full_module_name in sys.modules: return sys.modules[full_module_name] import imp - dst_module = sys.modules[dst_module_name] - fp, pathname, description = imp.find_module(module_name, [dirpath]) module = imp.load_module(full_module_name, fp, pathname, description) - setattr(dst_module, module_name, module) + if dst_module is not None: + setattr(dst_module, module_name, module) return module def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) + if dst_module_name: + full_module_name = "{}.{}".format(dst_module_name, module_name) + dst_module = sys.modules[dst_module_name] + else: + full_module_name = module_name + dst_module = None if full_module_name in sys.modules: return sys.modules[full_module_name] @@ -179,7 +189,7 @@ def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): return module -def import_module_from_dirpath(dirpath, folder_name, dst_module_name): +def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): if PY3: module = _import_module_from_dirpath_py3( dirpath, folder_name, dst_module_name From da4e4e750152c59b0139fe0b05110ba99e53a86c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 15:23:14 +0200 Subject: [PATCH 52/77] added docstrings --- openpype/lib/python_module_tools.py | 39 ++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index c5849225ce6..cb5f285ddd3 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -10,6 +10,15 @@ def import_filepath(filepath, module_name=None): + """Import python file as python module. + + Python 2 and Python 3 compatibility. + + Args: + filepath(str): Path to python file. + module_name(str): Name of loaded module. Only for Python 3. By default + is filled with filename of filepath. + """ if module_name is None: module_name = os.path.splitext(os.path.basename(filepath))[0] @@ -137,6 +146,7 @@ def classes_from_module(superclass, module): def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + """Import passed dirpath as python module using `imp`.""" if dst_module_name: full_module_name = "{}.{}".format(dst_module_name, module_name) dst_module = sys.modules[dst_module_name] @@ -158,38 +168,65 @@ def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + """Import passed dirpath as python module using Python 3 modules.""" if dst_module_name: full_module_name = "{}.{}".format(dst_module_name, module_name) dst_module = sys.modules[dst_module_name] else: full_module_name = module_name dst_module = None + + # Skip import if is already imported if full_module_name in sys.modules: return sys.modules[full_module_name] import importlib.util from importlib._bootstrap_external import PathFinder - dst_module = sys.modules[dst_module_name] + # Find loader for passed path and name loader = PathFinder.find_module(full_module_name, [dirpath]) + # Load specs of module spec = importlib.util.spec_from_loader( full_module_name, loader, origin=dirpath ) + # Create module based on specs module = importlib.util.module_from_spec(spec) + # Store module to destination module and `sys.modules` + # WARNING this mus be done before module execution if dst_module is not None: setattr(dst_module, module_name, module) sys.modules[full_module_name] = module + # Execute module import loader.exec_module(module) return module def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): + """Import passed directory as a python module. + + Python 2 and 3 compatible. + + Imported module can be assigned as a child attribute of already loaded + module from `sys.modules` if has support of `setattr`. That is not default + behavior of python modules so parent module must be a custom module with + that ability. + + It is not possible to reimport already cached module. If you need to + reimport module you have to remove it from caches manually. + + Args: + dirpath(str): Parent directory path of loaded folder. + folder_name(str): Folder name which should be imported inside passed + directory. + dst_module_name(str): Parent module name under which can be loaded + module added. + """ if PY3: module = _import_module_from_dirpath_py3( dirpath, folder_name, dst_module_name From 3579a62cb0a9bc422b3738cab1139b313ae8d5a8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 15:26:14 +0200 Subject: [PATCH 53/77] force to load openpype modules on install --- openpype/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/__init__.py b/openpype/__init__.py index a86d2bc2bea..70d2871468e 100644 --- a/openpype/__init__.py +++ b/openpype/__init__.py @@ -68,6 +68,10 @@ def patched_discover(superclass): def install(): """Install Pype to Avalon.""" from pyblish.lib import MessageHandler + from openpype.modules import load_modules + + # Make sure modules are loaded + load_modules() def modified_emit(obj, record): """Method replacing `emit` in Pyblish's MessageHandler.""" From 60c0a8adf7656cbf67b4ec528418105c0527f378 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 29 Jul 2021 10:49:58 +0200 Subject: [PATCH 54/77] removed code of submodules --- .../python2_vendor/arrow/.github/FUNDING.yml | 1 - .../.github/ISSUE_TEMPLATE/bug_report.md | 27 - .../.github/ISSUE_TEMPLATE/documentation.md | 17 - .../.github/ISSUE_TEMPLATE/feature_request.md | 17 - .../arrow/.github/pull_request_template.md | 22 - .../workflows/continuous_integration.yml | 123 - .../ftrack/python2_vendor/arrow/.gitignore | 211 - .../arrow/.pre-commit-config.yaml | 41 - .../ftrack/python2_vendor/arrow/CHANGELOG.rst | 598 --- .../ftrack/python2_vendor/arrow/LICENSE | 201 - .../ftrack/python2_vendor/arrow/MANIFEST.in | 3 - .../ftrack/python2_vendor/arrow/Makefile | 44 - .../ftrack/python2_vendor/arrow/README.rst | 133 - .../python2_vendor/arrow/arrow/__init__.py | 18 - .../python2_vendor/arrow/arrow/_version.py | 1 - .../ftrack/python2_vendor/arrow/arrow/api.py | 54 - .../python2_vendor/arrow/arrow/arrow.py | 1584 ------ .../python2_vendor/arrow/arrow/constants.py | 9 - .../python2_vendor/arrow/arrow/factory.py | 301 -- .../python2_vendor/arrow/arrow/formatter.py | 139 - .../python2_vendor/arrow/arrow/locales.py | 4267 ----------------- .../python2_vendor/arrow/arrow/parser.py | 596 --- .../ftrack/python2_vendor/arrow/arrow/util.py | 115 - .../ftrack/python2_vendor/arrow/docs/Makefile | 20 - .../ftrack/python2_vendor/arrow/docs/conf.py | 62 - .../python2_vendor/arrow/docs/index.rst | 566 --- .../ftrack/python2_vendor/arrow/docs/make.bat | 35 - .../python2_vendor/arrow/docs/releases.rst | 3 - .../python2_vendor/arrow/requirements.txt | 14 - .../ftrack/python2_vendor/arrow/setup.cfg | 2 - .../ftrack/python2_vendor/arrow/setup.py | 50 - .../python2_vendor/arrow/tests/__init__.py | 0 .../python2_vendor/arrow/tests/conftest.py | 76 - .../python2_vendor/arrow/tests/test_api.py | 28 - .../python2_vendor/arrow/tests/test_arrow.py | 2150 --------- .../arrow/tests/test_factory.py | 390 -- .../arrow/tests/test_formatter.py | 282 -- .../arrow/tests/test_locales.py | 1352 ------ .../python2_vendor/arrow/tests/test_parser.py | 1657 ------- .../python2_vendor/arrow/tests/test_util.py | 81 - .../python2_vendor/arrow/tests/utils.py | 16 - .../ftrack/python2_vendor/arrow/tox.ini | 53 - .../ftrack-python-api/.gitignore | 42 - .../ftrack-python-api/LICENSE.python | 254 - .../ftrack-python-api/LICENSE.txt | 176 - .../ftrack-python-api/MANIFEST.in | 4 - .../ftrack-python-api/README.rst | 34 - .../ftrack-python-api/bitbucket-pipelines.yml | 24 - .../ftrack-python-api/doc/_static/ftrack.css | 16 - .../doc/api_reference/accessor/base.rst | 8 - .../doc/api_reference/accessor/disk.rst | 8 - .../doc/api_reference/accessor/index.rst | 14 - .../doc/api_reference/accessor/server.rst | 8 - .../doc/api_reference/attribute.rst | 8 - .../doc/api_reference/cache.rst | 8 - .../doc/api_reference/collection.rst | 8 - .../api_reference/entity/asset_version.rst | 8 - .../doc/api_reference/entity/base.rst | 8 - .../doc/api_reference/entity/component.rst | 8 - .../doc/api_reference/entity/factory.rst | 8 - .../doc/api_reference/entity/index.rst | 14 - .../doc/api_reference/entity/job.rst | 8 - .../doc/api_reference/entity/location.rst | 8 - .../doc/api_reference/entity/note.rst | 8 - .../api_reference/entity/project_schema.rst | 8 - .../doc/api_reference/entity/user.rst | 8 - .../doc/api_reference/event/base.rst | 8 - .../doc/api_reference/event/expression.rst | 8 - .../doc/api_reference/event/hub.rst | 8 - .../doc/api_reference/event/index.rst | 14 - .../doc/api_reference/event/subscriber.rst | 8 - .../doc/api_reference/event/subscription.rst | 8 - .../doc/api_reference/exception.rst | 8 - .../doc/api_reference/formatter.rst | 8 - .../doc/api_reference/index.rst | 20 - .../doc/api_reference/inspection.rst | 8 - .../doc/api_reference/logging.rst | 8 - .../doc/api_reference/operation.rst | 8 - .../doc/api_reference/plugin.rst | 8 - .../doc/api_reference/query.rst | 8 - .../resource_identifier_transformer/base.rst | 10 - .../resource_identifier_transformer/index.rst | 16 - .../doc/api_reference/session.rst | 8 - .../doc/api_reference/structure/base.rst | 8 - .../doc/api_reference/structure/id.rst | 8 - .../doc/api_reference/structure/index.rst | 14 - .../doc/api_reference/structure/origin.rst | 8 - .../doc/api_reference/structure/standard.rst | 8 - .../doc/api_reference/symbol.rst | 8 - .../ftrack-python-api/doc/caching.rst | 175 - .../ftrack-python-api/doc/conf.py | 102 - .../ftrack-python-api/doc/docutils.conf | 2 - .../doc/environment_variables.rst | 56 - .../ftrack-python-api/doc/event_list.rst | 137 - .../example/assignments_and_allocations.rst | 82 - .../doc/example/component.rst | 23 - .../doc/example/custom_attribute.rst | 94 - .../doc/example/encode_media.rst | 53 - .../doc/example/entity_links.rst | 56 - .../ftrack-python-api/doc/example/index.rst | 52 - .../doc/example/invite_user.rst | 31 - .../ftrack-python-api/doc/example/job.rst | 97 - .../doc/example/link_attribute.rst | 55 - .../ftrack-python-api/doc/example/list.rst | 46 - .../manage_custom_attribute_configuration.rst | 320 -- .../doc/example/metadata.rst | 43 - .../ftrack-python-api/doc/example/note.rst | 169 - .../ftrack-python-api/doc/example/project.rst | 65 - .../doc/example/publishing.rst | 73 - .../doc/example/review_session.rst | 87 - .../ftrack-python-api/doc/example/scope.rst | 27 - .../doc/example/security_roles.rst | 73 - .../doc/example/sync_ldap_users.rst | 30 - .../doc/example/task_template.rst | 56 - .../doc/example/thumbnail.rst | 71 - .../ftrack-python-api/doc/example/timer.rst | 37 - .../doc/example/web_review.rst | 78 - .../ftrack-python-api/doc/glossary.rst | 76 - .../ftrack-python-api/doc/handling_events.rst | 315 -- .../image/configuring_plugins_directory.png | Bin 7313 -> 0 bytes .../ftrack-python-api/doc/index.rst | 42 - .../ftrack-python-api/doc/installing.rst | 77 - .../ftrack-python-api/doc/introduction.rst | 26 - .../doc/locations/configuring.rst | 87 - .../ftrack-python-api/doc/locations/index.rst | 18 - .../doc/locations/overview.rst | 143 - .../doc/locations/tutorial.rst | 193 - .../ftrack-python-api/doc/querying.rst | 263 - .../ftrack-python-api/doc/release/index.rst | 18 - .../doc/release/migrating_from_old_api.rst | 613 --- .../doc/release/migration.rst | 98 - .../doc/release/release_notes.rst | 1478 ------ .../doc/resource/example_plugin.py | 24 - .../doc/resource/example_plugin_safe.py | 0 .../resource/example_plugin_using_session.py | 37 - .../doc/security_and_authentication.rst | 38 - .../ftrack-python-api/doc/tutorial.rst | 156 - .../doc/understanding_sessions.rst | 281 -- .../doc/working_with_entities.rst | 434 -- .../ftrack-python-api/pytest.ini | 7 - .../resource/plugin/configure_locations.py | 39 - .../resource/plugin/construct_entity_type.py | 46 - .../ftrack-python-api/setup.cfg | 6 - .../python2_vendor/ftrack-python-api/setup.py | 81 - .../ftrack-python-api/source/__init__.py | 1 - .../source/ftrack_api/__init__.py | 32 - .../_centralized_storage_scenario.py | 656 --- .../source/ftrack_api/_python_ntpath.py | 534 --- .../source/ftrack_api/_version.py | 1 - .../source/ftrack_api/_weakref.py | 66 - .../source/ftrack_api/accessor/__init__.py | 2 - .../source/ftrack_api/accessor/base.py | 124 - .../source/ftrack_api/accessor/disk.py | 250 - .../source/ftrack_api/accessor/server.py | 240 - .../source/ftrack_api/attribute.py | 707 --- .../source/ftrack_api/cache.py | 579 --- .../source/ftrack_api/collection.py | 507 -- .../source/ftrack_api/data.py | 119 - .../source/ftrack_api/entity/__init__.py | 2 - .../source/ftrack_api/entity/asset_version.py | 91 - .../source/ftrack_api/entity/base.py | 402 -- .../source/ftrack_api/entity/component.py | 74 - .../source/ftrack_api/entity/factory.py | 435 -- .../source/ftrack_api/entity/job.py | 48 - .../source/ftrack_api/entity/location.py | 733 --- .../source/ftrack_api/entity/note.py | 105 - .../ftrack_api/entity/project_schema.py | 94 - .../source/ftrack_api/entity/user.py | 123 - .../source/ftrack_api/event/__init__.py | 2 - .../source/ftrack_api/event/base.py | 85 - .../source/ftrack_api/event/expression.py | 282 -- .../source/ftrack_api/event/hub.py | 1091 ----- .../source/ftrack_api/event/subscriber.py | 27 - .../source/ftrack_api/event/subscription.py | 23 - .../source/ftrack_api/exception.py | 392 -- .../source/ftrack_api/formatter.py | 131 - .../source/ftrack_api/inspection.py | 135 - .../source/ftrack_api/logging.py | 43 - .../source/ftrack_api/operation.py | 115 - .../source/ftrack_api/plugin.py | 121 - .../source/ftrack_api/query.py | 202 - .../__init__.py | 2 - .../resource_identifier_transformer/base.py | 50 - .../source/ftrack_api/session.py | 2515 ---------- .../source/ftrack_api/structure/__init__.py | 2 - .../source/ftrack_api/structure/base.py | 38 - .../source/ftrack_api/structure/entity_id.py | 12 - .../source/ftrack_api/structure/id.py | 91 - .../source/ftrack_api/structure/origin.py | 28 - .../source/ftrack_api/structure/standard.py | 217 - .../source/ftrack_api/symbol.py | 77 - .../test/fixture/media/colour_wheel.mov | Bin 17627 -> 0 bytes .../test/fixture/media/image-resized-10.png | Bin 115 -> 0 bytes .../test/fixture/media/image.png | Bin 883 -> 0 bytes .../fixture/plugin/configure_locations.py | 40 - .../fixture/plugin/construct_entity_type.py | 52 - .../fixture/plugin/count_session_event.py | 41 - .../ftrack-python-api/test/unit/__init__.py | 2 - .../test/unit/accessor/__init__.py | 2 - .../test/unit/accessor/test_disk.py | 267 -- .../test/unit/accessor/test_server.py | 41 - .../ftrack-python-api/test/unit/conftest.py | 539 --- .../test/unit/entity/__init__.py | 2 - .../test/unit/entity/test_asset_version.py | 54 - .../test/unit/entity/test_base.py | 14 - .../test/unit/entity/test_component.py | 70 - .../test/unit/entity/test_factory.py | 25 - .../test/unit/entity/test_job.py | 42 - .../test/unit/entity/test_location.py | 516 -- .../test/unit/entity/test_metadata.py | 135 - .../test/unit/entity/test_note.py | 67 - .../test/unit/entity/test_project_schema.py | 64 - .../test/unit/entity/test_scopes.py | 24 - .../test/unit/entity/test_user.py | 49 - .../test/unit/event/__init__.py | 2 - .../unit/event/event_hub_server_heartbeat.py | 92 - .../test/unit/event/test_base.py | 36 - .../test/unit/event/test_expression.py | 174 - .../test/unit/event/test_hub.py | 701 --- .../test/unit/event/test_subscriber.py | 33 - .../test/unit/event/test_subscription.py | 28 - .../__init__.py | 2 - .../test_base.py | 36 - .../test/unit/structure/__init__.py | 2 - .../test/unit/structure/test_base.py | 31 - .../test/unit/structure/test_entity_id.py | 49 - .../test/unit/structure/test_id.py | 115 - .../test/unit/structure/test_origin.py | 33 - .../test/unit/structure/test_standard.py | 309 -- .../test/unit/test_attribute.py | 146 - .../ftrack-python-api/test/unit/test_cache.py | 416 -- .../test/unit/test_collection.py | 574 --- .../test/unit/test_custom_attribute.py | 251 - .../ftrack-python-api/test/unit/test_data.py | 129 - .../test/unit/test_formatter.py | 70 - .../test/unit/test_inspection.py | 101 - .../test/unit/test_operation.py | 79 - .../test/unit/test_package.py | 48 - .../test/unit/test_plugin.py | 192 - .../ftrack-python-api/test/unit/test_query.py | 164 - .../test/unit/test_session.py | 1519 ------ .../ftrack-python-api/test/unit/test_timer.py | 74 - 242 files changed, 42119 deletions(-) delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml deleted file mode 100644 index c3608357a43..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml +++ /dev/null @@ -1 +0,0 @@ -open_collective: arrow diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index e4e242ee42d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -name: "🐞 Bug Report" -about: Find a bug? Create a report to help us improve. -title: '' -labels: 'bug' -assignees: '' ---- - - - -## Issue Description - - - -## System Info - -- 🖥 **OS name and version**: -- 🐍 **Python version**: -- 🏹 **Arrow version**: diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md deleted file mode 100644 index 753ed0c620a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -name: "📚 Documentation" -about: Find errors or problems in the docs (https://arrow.readthedocs.io)? -title: '' -labels: 'documentation' -assignees: '' ---- - - - -## Issue Description - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index fcab9213f59..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -name: "💡 Feature Request" -about: Have an idea for a new feature or improvement? -title: '' -labels: 'enhancement' -assignees: '' ---- - - - -## Feature Request - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md deleted file mode 100644 index 0e07c288af5..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md +++ /dev/null @@ -1,22 +0,0 @@ -## Pull Request Checklist - -Thank you for taking the time to improve Arrow! Before submitting your pull request, please check all *appropriate* boxes: - - -- [ ] 🧪 Added **tests** for changed code. -- [ ] 🛠️ All tests **pass** when run locally (run `tox` or `make test` to find out!). -- [ ] 🧹 All linting checks **pass** when run locally (run `tox -e lint` or `make lint` to find out!). -- [ ] 📚 Updated **documentation** for changed code. -- [ ] ⏩ Code is **up-to-date** with the `master` branch. - -If you have *any* questions about your code changes or any of the points above, please submit your questions along with the pull request and we will try our best to help! - -## Description of Changes - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml deleted file mode 100644 index d800f399c6f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml +++ /dev/null @@ -1,123 +0,0 @@ -name: tests - -on: - pull_request: # Run on all pull requests - push: # Run only on pushes to master - branches: - - master - schedule: # Run monthly - - cron: "0 0 1 * *" - -jobs: - test: - name: ${{ matrix.os }} (${{ matrix.python-version }}) - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - python-version: ["pypy3", "2.7", "3.5", "3.6", "3.7", "3.8", "3.9-dev"] - os: [ubuntu-latest, macos-latest, windows-latest] - exclude: - # pypy3 randomly fails on Windows builds - - os: windows-latest - python-version: "pypy3" - - steps: - # Check out latest code - - uses: actions/checkout@v2 - - # Configure pip cache - - name: Cache pip (Linux) - uses: actions/cache@v2 - if: startsWith(runner.os, 'Linux') - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Cache pip (macOS) - uses: actions/cache@v2 - if: startsWith(runner.os, 'macOS') - with: - path: ~/Library/Caches/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Cache pip (Windows) - uses: actions/cache@v2 - if: startsWith(runner.os, 'Windows') - with: - path: ~\AppData\Local\pip\Cache - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - # Set up Python - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - # Install dependencies - - name: Install dependencies - run: | - pip install -U pip setuptools wheel - pip install -U tox tox-gh-actions - - # Run tests - - name: Test with tox - run: tox - - # Upload coverage report - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - with: - file: coverage.xml - - lint: - runs-on: ubuntu-latest - - steps: - # Check out latest code - - uses: actions/checkout@v2 - - # Set up Python - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: "3.8" - - # Configure pip cache - - name: Cache pip - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - # Configure pre-commit cache - - name: Cache pre-commit - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: ${{ runner.os }}-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} - restore-keys: | - ${{ runner.os }}-pre-commit- - - # Install dependencies - - name: Install dependencies - run: | - pip install -U pip setuptools wheel - pip install -U tox - - # Lint code - - name: Lint code - run: tox -e lint - - # Lint docs - - name: Lint docs - run: tox -e docs diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore deleted file mode 100644 index 0448d0cf0c6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore +++ /dev/null @@ -1,211 +0,0 @@ -README.rst.new - -# Small entry point file for debugging tasks -test.py - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -local/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# Swap -[._]*.s[a-v][a-z] -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] - -# Session -Session.vim -Sessionx.vim - -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ - -.idea/ -.vscode/ - -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - -# Windows thumbnail cache files -Thumbs.db -Thumbs.db:encryptable -ehthumbs.db -ehthumbs_vista.db - -# Dump file -*.stackdump - -# Folder config file -[Dd]esktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msix -*.msm -*.msp - -# Windows shortcuts -*.lnk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml deleted file mode 100644 index 1f5128595ba..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml +++ /dev/null @@ -1,41 +0,0 @@ -default_language_version: - python: python3 -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: fix-encoding-pragma - exclude: ^arrow/_version.py - - id: requirements-txt-fixer - - id: check-ast - - id: check-yaml - - id: check-case-conflict - - id: check-docstring-first - - id: check-merge-conflict - - id: debug-statements - - repo: https://github.com/timothycrosley/isort - rev: 5.4.2 - hooks: - - id: isort - - repo: https://github.com/asottile/pyupgrade - rev: v2.7.2 - hooks: - - id: pyupgrade - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.6.0 - hooks: - - id: python-no-eval - - id: python-check-blanket-noqa - - id: rst-backticks - - repo: https://github.com/psf/black - rev: 20.8b1 - hooks: - - id: black - args: [--safe, --quiet] - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 - hooks: - - id: flake8 - additional_dependencies: [flake8-bugbear] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst deleted file mode 100644 index 0b55a4522c7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst +++ /dev/null @@ -1,598 +0,0 @@ -Changelog -========= - -0.17.0 (2020-10-2) -------------------- - -- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. This is the last major release to support Python 2.7 and Python 3.5. -- [NEW] Arrow now properly handles imaginary datetimes during DST shifts. For example: - -..code-block:: python - >>> just_before = arrow.get(2013, 3, 31, 1, 55, tzinfo="Europe/Paris") - >>> just_before.shift(minutes=+10) - - -..code-block:: python - >>> before = arrow.get("2018-03-10 23:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") - >>> after = arrow.get("2018-03-11 04:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") - >>> result=[(t, t.to("utc")) for t in arrow.Arrow.range("hour", before, after)] - >>> for r in result: - ... print(r) - ... - (, ) - (, ) - (, ) - (, ) - (, ) - -- [NEW] Added ``humanize`` week granularity translation for Tagalog. -- [CHANGE] Calls to the ``timestamp`` property now emit a ``DeprecationWarning``. In a future release, ``timestamp`` will be changed to a method to align with Python's datetime module. If you would like to continue using the property, please change your code to use the ``int_timestamp`` or ``float_timestamp`` properties instead. -- [CHANGE] Expanded and improved Catalan locale. -- [FIX] Fixed a bug that caused ``Arrow.range()`` to incorrectly cut off ranges in certain scenarios when using month, quarter, or year endings. -- [FIX] Fixed a bug that caused day of week token parsing to be case sensitive. -- [INTERNAL] A number of functions were reordered in arrow.py for better organization and grouping of related methods. This change will have no impact on usage. -- [INTERNAL] A minimum tox version is now enforced for compatibility reasons. Contributors must use tox >3.18.0 going forward. - -0.16.0 (2020-08-23) -------------------- - -- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.16.x and 0.17.x releases are the last to support Python 2.7 and 3.5. -- [NEW] Implemented `PEP 495 `_ to handle ambiguous datetimes. This is achieved by the addition of the ``fold`` attribute for Arrow objects. For example: - -.. code-block:: python - - >>> before = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm') - - >>> before.fold - 0 - >>> before.ambiguous - True - >>> after = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm', fold=1) - - >>> after = before.replace(fold=1) - - -- [NEW] Added ``normalize_whitespace`` flag to ``arrow.get``. This is useful for parsing log files and/or any files that may contain inconsistent spacing. For example: - -.. code-block:: python - - >>> arrow.get("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True) - - >>> arrow.get("2013-036 \t 04:05:06Z", normalize_whitespace=True) - - -0.15.8 (2020-07-23) -------------------- - -- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.15.x, 0.16.x, and 0.17.x releases are the last to support Python 2.7 and 3.5. -- [NEW] Added ``humanize`` week granularity translation for Czech. -- [FIX] ``arrow.get`` will now pick sane defaults when weekdays are passed with particular token combinations, see `#446 `_. -- [INTERNAL] Moved arrow to an organization. The repo can now be found `here `_. -- [INTERNAL] Started issuing deprecation warnings for Python 2.7 and 3.5. -- [INTERNAL] Added Python 3.9 to CI pipeline. - -0.15.7 (2020-06-19) -------------------- - -- [NEW] Added a number of built-in format strings. See the `docs `_ for a complete list of supported formats. For example: - -.. code-block:: python - - >>> arw = arrow.utcnow() - >>> arw.format(arrow.FORMAT_COOKIE) - 'Wednesday, 27-May-2020 10:30:35 UTC' - -- [NEW] Arrow is now fully compatible with Python 3.9 and PyPy3. -- [NEW] Added Makefile, tox.ini, and requirements.txt files to the distribution bundle. -- [NEW] Added French Canadian and Swahili locales. -- [NEW] Added ``humanize`` week granularity translation for Hebrew, Greek, Macedonian, Swedish, Slovak. -- [FIX] ms and μs timestamps are now normalized in ``arrow.get()``, ``arrow.fromtimestamp()``, and ``arrow.utcfromtimestamp()``. For example: - -.. code-block:: python - - >>> ts = 1591161115194556 - >>> arw = arrow.get(ts) - - >>> arw.timestamp - 1591161115 - -- [FIX] Refactored and updated Macedonian, Hebrew, Korean, and Portuguese locales. - -0.15.6 (2020-04-29) -------------------- - -- [NEW] Added support for parsing and formatting `ISO 8601 week dates `_ via a new token ``W``, for example: - -.. code-block:: python - - >>> arrow.get("2013-W29-6", "W") - - >>> utc=arrow.utcnow() - >>> utc - - >>> utc.format("W") - '2020-W04-4' - -- [NEW] Formatting with ``x`` token (microseconds) is now possible, for example: - -.. code-block:: python - - >>> dt = arrow.utcnow() - >>> dt.format("x") - '1585669870688329' - >>> dt.format("X") - '1585669870' - -- [NEW] Added ``humanize`` week granularity translation for German, Italian, Polish & Taiwanese locales. -- [FIX] Consolidated and simplified German locales. -- [INTERNAL] Moved testing suite from nosetest/Chai to pytest/pytest-mock. -- [INTERNAL] Converted xunit-style setup and teardown functions in tests to pytest fixtures. -- [INTERNAL] Setup Github Actions for CI alongside Travis. -- [INTERNAL] Help support Arrow's future development by donating to the project on `Open Collective `_. - -0.15.5 (2020-01-03) -------------------- - -- [WARN] Python 2 reached EOL on 2020-01-01. arrow will **drop support** for Python 2 in a future release to be decided (see `#739 `_). -- [NEW] Added bounds parameter to ``span_range``, ``interval`` and ``span`` methods. This allows you to include or exclude the start and end values. -- [NEW] ``arrow.get()`` can now create arrow objects from a timestamp with a timezone, for example: - -.. code-block:: python - - >>> arrow.get(1367900664, tzinfo=tz.gettz('US/Pacific')) - - -- [NEW] ``humanize`` can now combine multiple levels of granularity, for example: - -.. code-block:: python - - >>> later140 = arrow.utcnow().shift(seconds=+8400) - >>> later140.humanize(granularity="minute") - 'in 139 minutes' - >>> later140.humanize(granularity=["hour", "minute"]) - 'in 2 hours and 19 minutes' - -- [NEW] Added Hong Kong locale (``zh_hk``). -- [NEW] Added ``humanize`` week granularity translation for Dutch. -- [NEW] Numbers are now displayed when using the seconds granularity in ``humanize``. -- [CHANGE] ``range`` now supports both the singular and plural forms of the ``frames`` argument (e.g. day and days). -- [FIX] Improved parsing of strings that contain punctuation. -- [FIX] Improved behaviour of ``humanize`` when singular seconds are involved. - -0.15.4 (2019-11-02) -------------------- - -- [FIX] Fixed an issue that caused package installs to fail on Conda Forge. - -0.15.3 (2019-11-02) -------------------- - -- [NEW] ``factory.get()`` can now create arrow objects from a ISO calendar tuple, for example: - -.. code-block:: python - - >>> arrow.get((2013, 18, 7)) - - -- [NEW] Added a new token ``x`` to allow parsing of integer timestamps with milliseconds and microseconds. -- [NEW] Formatting now supports escaping of characters using the same syntax as parsing, for example: - -.. code-block:: python - - >>> arw = arrow.now() - >>> fmt = "YYYY-MM-DD h [h] m" - >>> arw.format(fmt) - '2019-11-02 3 h 32' - -- [NEW] Added ``humanize`` week granularity translations for Chinese, Spanish and Vietnamese. -- [CHANGE] Added ``ParserError`` to module exports. -- [FIX] Added support for midnight at end of day. See `#703 `_ for details. -- [INTERNAL] Created Travis build for macOS. -- [INTERNAL] Test parsing and formatting against full timezone database. - -0.15.2 (2019-09-14) -------------------- - -- [NEW] Added ``humanize`` week granularity translations for Portuguese and Brazilian Portuguese. -- [NEW] Embedded changelog within docs and added release dates to versions. -- [FIX] Fixed a bug that caused test failures on Windows only, see `#668 `_ for details. - -0.15.1 (2019-09-10) -------------------- - -- [NEW] Added ``humanize`` week granularity translations for Japanese. -- [FIX] Fixed a bug that caused Arrow to fail when passed a negative timestamp string. -- [FIX] Fixed a bug that caused Arrow to fail when passed a datetime object with ``tzinfo`` of type ``StaticTzInfo``. - -0.15.0 (2019-09-08) -------------------- - -- [NEW] Added support for DDD and DDDD ordinal date tokens. The following functionality is now possible: ``arrow.get("1998-045")``, ``arrow.get("1998-45", "YYYY-DDD")``, ``arrow.get("1998-045", "YYYY-DDDD")``. -- [NEW] ISO 8601 basic format for dates and times is now supported (e.g. ``YYYYMMDDTHHmmssZ``). -- [NEW] Added ``humanize`` week granularity translations for French, Russian and Swiss German locales. -- [CHANGE] Timestamps of type ``str`` are no longer supported **without a format string** in the ``arrow.get()`` method. This change was made to support the ISO 8601 basic format and to address bugs such as `#447 `_. - -The following will NOT work in v0.15.0: - -.. code-block:: python - - >>> arrow.get("1565358758") - >>> arrow.get("1565358758.123413") - -The following will work in v0.15.0: - -.. code-block:: python - - >>> arrow.get("1565358758", "X") - >>> arrow.get("1565358758.123413", "X") - >>> arrow.get(1565358758) - >>> arrow.get(1565358758.123413) - -- [CHANGE] When a meridian token (a|A) is passed and no meridians are available for the specified locale (e.g. unsupported or untranslated) a ``ParserError`` is raised. -- [CHANGE] The timestamp token (``X``) will now match float timestamps of type ``str``: ``arrow.get(“1565358758.123415”, “X”)``. -- [CHANGE] Strings with leading and/or trailing whitespace will no longer be parsed without a format string. Please see `the docs `_ for ways to handle this. -- [FIX] The timestamp token (``X``) will now only match on strings that **strictly contain integers and floats**, preventing incorrect matches. -- [FIX] Most instances of ``arrow.get()`` returning an incorrect ``Arrow`` object from a partial parsing match have been eliminated. The following issue have been addressed: `#91 `_, `#196 `_, `#396 `_, `#434 `_, `#447 `_, `#456 `_, `#519 `_, `#538 `_, `#560 `_. - -0.14.7 (2019-09-04) -------------------- - -- [CHANGE] ``ArrowParseWarning`` will no longer be printed on every call to ``arrow.get()`` with a datetime string. The purpose of the warning was to start a conversation about the upcoming 0.15.0 changes and we appreciate all the feedback that the community has given us! - -0.14.6 (2019-08-28) -------------------- - -- [NEW] Added support for ``week`` granularity in ``Arrow.humanize()``. For example, ``arrow.utcnow().shift(weeks=-1).humanize(granularity="week")`` outputs "a week ago". This change introduced two new untranslated words, ``week`` and ``weeks``, to all locale dictionaries, so locale contributions are welcome! -- [NEW] Fully translated the Brazilian Portugese locale. -- [CHANGE] Updated the Macedonian locale to inherit from a Slavic base. -- [FIX] Fixed a bug that caused ``arrow.get()`` to ignore tzinfo arguments of type string (e.g. ``arrow.get(tzinfo="Europe/Paris")``). -- [FIX] Fixed a bug that occurred when ``arrow.Arrow()`` was instantiated with a ``pytz`` tzinfo object. -- [FIX] Fixed a bug that caused Arrow to fail when passed a sub-second token, that when rounded, had a value greater than 999999 (e.g. ``arrow.get("2015-01-12T01:13:15.9999995")``). Arrow should now accurately propagate the rounding for large sub-second tokens. - -0.14.5 (2019-08-09) -------------------- - -- [NEW] Added Afrikaans locale. -- [CHANGE] Removed deprecated ``replace`` shift functionality. Users looking to pass plural properties to the ``replace`` function to shift values should use ``shift`` instead. -- [FIX] Fixed bug that occurred when ``factory.get()`` was passed a locale kwarg. - -0.14.4 (2019-07-30) -------------------- - -- [FIX] Fixed a regression in 0.14.3 that prevented a tzinfo argument of type string to be passed to the ``get()`` function. Functionality such as ``arrow.get("2019072807", "YYYYMMDDHH", tzinfo="UTC")`` should work as normal again. -- [CHANGE] Moved ``backports.functools_lru_cache`` dependency from ``extra_requires`` to ``install_requires`` for ``Python 2.7`` installs to fix `#495 `_. - -0.14.3 (2019-07-28) -------------------- - -- [NEW] Added full support for Python 3.8. -- [CHANGE] Added warnings for upcoming factory.get() parsing changes in 0.15.0. Please see `#612 `_ for full details. -- [FIX] Extensive refactor and update of documentation. -- [FIX] factory.get() can now construct from kwargs. -- [FIX] Added meridians to Spanish Locale. - -0.14.2 (2019-06-06) -------------------- - -- [CHANGE] Travis CI builds now use tox to lint and run tests. -- [FIX] Fixed UnicodeDecodeError on certain locales (#600). - -0.14.1 (2019-06-06) -------------------- - -- [FIX] Fixed ``ImportError: No module named 'dateutil'`` (#598). - -0.14.0 (2019-06-06) -------------------- - -- [NEW] Added provisional support for Python 3.8. -- [CHANGE] Removed support for EOL Python 3.4. -- [FIX] Updated setup.py with modern Python standards. -- [FIX] Upgraded dependencies to latest versions. -- [FIX] Enabled flake8 and black on travis builds. -- [FIX] Formatted code using black and isort. - -0.13.2 (2019-05-30) -------------------- - -- [NEW] Add is_between method. -- [FIX] Improved humanize behaviour for near zero durations (#416). -- [FIX] Correct humanize behaviour with future days (#541). -- [FIX] Documentation updates. -- [FIX] Improvements to German Locale. - -0.13.1 (2019-02-17) -------------------- - -- [NEW] Add support for Python 3.7. -- [CHANGE] Remove deprecation decorators for Arrow.range(), Arrow.span_range() and Arrow.interval(), all now return generators, wrap with list() to get old behavior. -- [FIX] Documentation and docstring updates. - -0.13.0 (2019-01-09) -------------------- - -- [NEW] Added support for Python 3.6. -- [CHANGE] Drop support for Python 2.6/3.3. -- [CHANGE] Return generator instead of list for Arrow.range(), Arrow.span_range() and Arrow.interval(). -- [FIX] Make arrow.get() work with str & tzinfo combo. -- [FIX] Make sure special RegEx characters are escaped in format string. -- [NEW] Added support for ZZZ when formatting. -- [FIX] Stop using datetime.utcnow() in internals, use datetime.now(UTC) instead. -- [FIX] Return NotImplemented instead of TypeError in arrow math internals. -- [NEW] Added Estonian Locale. -- [FIX] Small fixes to Greek locale. -- [FIX] TagalogLocale improvements. -- [FIX] Added test requirements to setup. -- [FIX] Improve docs for get, now and utcnow methods. -- [FIX] Correct typo in depreciation warning. - -0.12.1 ------- - -- [FIX] Allow universal wheels to be generated and reliably installed. -- [FIX] Make humanize respect only_distance when granularity argument is also given. - -0.12.0 ------- - -- [FIX] Compatibility fix for Python 2.x - -0.11.0 ------- - -- [FIX] Fix grammar of ArabicLocale -- [NEW] Add Nepali Locale -- [FIX] Fix month name + rename AustriaLocale -> AustrianLocale -- [FIX] Fix typo in Basque Locale -- [FIX] Fix grammar in PortugueseBrazilian locale -- [FIX] Remove pip --user-mirrors flag -- [NEW] Add Indonesian Locale - -0.10.0 ------- - -- [FIX] Fix getattr off by one for quarter -- [FIX] Fix negative offset for UTC -- [FIX] Update arrow.py - -0.9.0 ------ - -- [NEW] Remove duplicate code -- [NEW] Support gnu date iso 8601 -- [NEW] Add support for universal wheels -- [NEW] Slovenian locale -- [NEW] Slovak locale -- [NEW] Romanian locale -- [FIX] respect limit even if end is defined range -- [FIX] Separate replace & shift functions -- [NEW] Added tox -- [FIX] Fix supported Python versions in documentation -- [NEW] Azerbaijani locale added, locale issue fixed in Turkish. -- [FIX] Format ParserError's raise message - -0.8.0 ------ - -- [] - -0.7.1 ------ - -- [NEW] Esperanto locale (batisteo) - -0.7.0 ------ - -- [FIX] Parse localized strings #228 (swistakm) -- [FIX] Modify tzinfo parameter in ``get`` api #221 (bottleimp) -- [FIX] Fix Czech locale (PrehistoricTeam) -- [FIX] Raise TypeError when adding/subtracting non-dates (itsmeolivia) -- [FIX] Fix pytz conversion error (Kudo) -- [FIX] Fix overzealous time truncation in span_range (kdeldycke) -- [NEW] Humanize for time duration #232 (ybrs) -- [NEW] Add Thai locale (sipp11) -- [NEW] Adding Belarusian (be) locale (oire) -- [NEW] Search date in strings (beenje) -- [NEW] Note that arrow's tokens differ from strptime's. (offby1) - -0.6.0 ------ - -- [FIX] Added support for Python 3 -- [FIX] Avoid truncating oversized epoch timestamps. Fixes #216. -- [FIX] Fixed month abbreviations for Ukrainian -- [FIX] Fix typo timezone -- [FIX] A couple of dialect fixes and two new languages -- [FIX] Spanish locale: ``Miercoles`` should have acute accent -- [Fix] Fix Finnish grammar -- [FIX] Fix typo in 'Arrow.floor' docstring -- [FIX] Use read() utility to open README -- [FIX] span_range for week frame -- [NEW] Add minimal support for fractional seconds longer than six digits. -- [NEW] Adding locale support for Marathi (mr) -- [NEW] Add count argument to span method -- [NEW] Improved docs - -0.5.1 - 0.5.4 -------------- - -- [FIX] test the behavior of simplejson instead of calling for_json directly (tonyseek) -- [FIX] Add Hebrew Locale (doodyparizada) -- [FIX] Update documentation location (andrewelkins) -- [FIX] Update setup.py Development Status level (andrewelkins) -- [FIX] Case insensitive month match (cshowe) - -0.5.0 ------ - -- [NEW] struct_time addition. (mhworth) -- [NEW] Version grep (eirnym) -- [NEW] Default to ISO 8601 format (emonty) -- [NEW] Raise TypeError on comparison (sniekamp) -- [NEW] Adding Macedonian(mk) locale (krisfremen) -- [FIX] Fix for ISO seconds and fractional seconds (sdispater) (andrewelkins) -- [FIX] Use correct Dutch wording for "hours" (wbolster) -- [FIX] Complete the list of english locales (indorilftw) -- [FIX] Change README to reStructuredText (nyuszika7h) -- [FIX] Parse lower-cased 'h' (tamentis) -- [FIX] Slight modifications to Dutch locale (nvie) - -0.4.4 ------ - -- [NEW] Include the docs in the released tarball -- [NEW] Czech localization Czech localization for Arrow -- [NEW] Add fa_ir to locales -- [FIX] Fixes parsing of time strings with a final Z -- [FIX] Fixes ISO parsing and formatting for fractional seconds -- [FIX] test_fromtimestamp sp -- [FIX] some typos fixed -- [FIX] removed an unused import statement -- [FIX] docs table fix -- [FIX] Issue with specify 'X' template and no template at all to arrow.get -- [FIX] Fix "import" typo in docs/index.rst -- [FIX] Fix unit tests for zero passed -- [FIX] Update layout.html -- [FIX] In Norwegian and new Norwegian months and weekdays should not be capitalized -- [FIX] Fixed discrepancy between specifying 'X' to arrow.get and specifying no template - -0.4.3 ------ - -- [NEW] Turkish locale (Emre) -- [NEW] Arabic locale (Mosab Ahmad) -- [NEW] Danish locale (Holmars) -- [NEW] Icelandic locale (Holmars) -- [NEW] Hindi locale (Atmb4u) -- [NEW] Malayalam locale (Atmb4u) -- [NEW] Finnish locale (Stormpat) -- [NEW] Portuguese locale (Danielcorreia) -- [NEW] ``h`` and ``hh`` strings are now supported (Averyonghub) -- [FIX] An incorrect inflection in the Polish locale has been fixed (Avalanchy) -- [FIX] ``arrow.get`` now properly handles ``Date`` (Jaapz) -- [FIX] Tests are now declared in ``setup.py`` and the manifest (Pypingou) -- [FIX] ``__version__`` has been added to ``__init__.py`` (Sametmax) -- [FIX] ISO 8601 strings can be parsed without a separator (Ivandiguisto / Root) -- [FIX] Documentation is now more clear regarding some inputs on ``arrow.get`` (Eriktaubeneck) -- [FIX] Some documentation links have been fixed (Vrutsky) -- [FIX] Error messages for parse errors are now more descriptive (Maciej Albin) -- [FIX] The parser now correctly checks for separators in strings (Mschwager) - -0.4.2 ------ - -- [NEW] Factory ``get`` method now accepts a single ``Arrow`` argument. -- [NEW] Tokens SSSS, SSSSS and SSSSSS are supported in parsing. -- [NEW] ``Arrow`` objects have a ``float_timestamp`` property. -- [NEW] Vietnamese locale (Iu1nguoi) -- [NEW] Factory ``get`` method now accepts a list of format strings (Dgilland) -- [NEW] A MANIFEST.in file has been added (Pypingou) -- [NEW] Tests can be run directly from ``setup.py`` (Pypingou) -- [FIX] Arrow docs now list 'day of week' format tokens correctly (Rudolphfroger) -- [FIX] Several issues with the Korean locale have been resolved (Yoloseem) -- [FIX] ``humanize`` now correctly returns unicode (Shvechikov) -- [FIX] ``Arrow`` objects now pickle / unpickle correctly (Yoloseem) - -0.4.1 ------ - -- [NEW] Table / explanation of formatting & parsing tokens in docs -- [NEW] Brazilian locale (Augusto2112) -- [NEW] Dutch locale (OrangeTux) -- [NEW] Italian locale (Pertux) -- [NEW] Austrain locale (LeChewbacca) -- [NEW] Tagalog locale (Marksteve) -- [FIX] Corrected spelling and day numbers in German locale (LeChewbacca) -- [FIX] Factory ``get`` method should now handle unicode strings correctly (Bwells) -- [FIX] Midnight and noon should now parse and format correctly (Bwells) - -0.4.0 ------ - -- [NEW] Format-free ISO 8601 parsing in factory ``get`` method -- [NEW] Support for 'week' / 'weeks' in ``span``, ``range``, ``span_range``, ``floor`` and ``ceil`` -- [NEW] Support for 'weeks' in ``replace`` -- [NEW] Norwegian locale (Martinp) -- [NEW] Japanese locale (CortYuming) -- [FIX] Timezones no longer show the wrong sign when formatted (Bean) -- [FIX] Microseconds are parsed correctly from strings (Bsidhom) -- [FIX] Locale day-of-week is no longer off by one (Cynddl) -- [FIX] Corrected plurals of Ukrainian and Russian nouns (Catchagain) -- [CHANGE] Old 0.1 ``arrow`` module method removed -- [CHANGE] Dropped timestamp support in ``range`` and ``span_range`` (never worked correctly) -- [CHANGE] Dropped parsing of single string as tz string in factory ``get`` method (replaced by ISO 8601) - -0.3.5 ------ - -- [NEW] French locale (Cynddl) -- [NEW] Spanish locale (Slapresta) -- [FIX] Ranges handle multiple timezones correctly (Ftobia) - -0.3.4 ------ - -- [FIX] Humanize no longer sometimes returns the wrong month delta -- [FIX] ``__format__`` works correctly with no format string - -0.3.3 ------ - -- [NEW] Python 2.6 support -- [NEW] Initial support for locale-based parsing and formatting -- [NEW] ArrowFactory class, now proxied as the module API -- [NEW] ``factory`` api method to obtain a factory for a custom type -- [FIX] Python 3 support and tests completely ironed out - -0.3.2 ------ - -- [NEW] Python 3+ support - -0.3.1 ------ - -- [FIX] The old ``arrow`` module function handles timestamps correctly as it used to - -0.3.0 ------ - -- [NEW] ``Arrow.replace`` method -- [NEW] Accept timestamps, datetimes and Arrows for datetime inputs, where reasonable -- [FIX] ``range`` and ``span_range`` respect end and limit parameters correctly -- [CHANGE] Arrow objects are no longer mutable -- [CHANGE] Plural attribute name semantics altered: single -> absolute, plural -> relative -- [CHANGE] Plural names no longer supported as properties (e.g. ``arrow.utcnow().years``) - -0.2.1 ------ - -- [NEW] Support for localized humanization -- [NEW] English, Russian, Greek, Korean, Chinese locales - -0.2.0 ------ - -- **REWRITE** -- [NEW] Date parsing -- [NEW] Date formatting -- [NEW] ``floor``, ``ceil`` and ``span`` methods -- [NEW] ``datetime`` interface implementation -- [NEW] ``clone`` method -- [NEW] ``get``, ``now`` and ``utcnow`` API methods - -0.1.6 ------ - -- [NEW] Humanized time deltas -- [NEW] ``__eq__`` implemented -- [FIX] Issues with conversions related to daylight savings time resolved -- [CHANGE] ``__str__`` uses ISO formatting - -0.1.5 ------ - -- **Started tracking changes** -- [NEW] Parsing of ISO-formatted time zone offsets (e.g. '+02:30', '-05:00') -- [NEW] Resolved some issues with timestamps and delta / Olson time zones diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE deleted file mode 100644 index 2bef500de74..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 Chris Smith - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in deleted file mode 100644 index d9955ed96ae..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include LICENSE CHANGELOG.rst README.rst Makefile requirements.txt tox.ini -recursive-include tests *.py -recursive-include docs *.py *.rst *.bat Makefile diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile deleted file mode 100644 index f294985dc61..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -.PHONY: auto test docs clean - -auto: build38 - -build27: PYTHON_VER = python2.7 -build35: PYTHON_VER = python3.5 -build36: PYTHON_VER = python3.6 -build37: PYTHON_VER = python3.7 -build38: PYTHON_VER = python3.8 -build39: PYTHON_VER = python3.9 - -build27 build35 build36 build37 build38 build39: clean - virtualenv venv --python=$(PYTHON_VER) - . venv/bin/activate; \ - pip install -r requirements.txt; \ - pre-commit install - -test: - rm -f .coverage coverage.xml - . venv/bin/activate; pytest - -lint: - . venv/bin/activate; pre-commit run --all-files --show-diff-on-failure - -docs: - rm -rf docs/_build - . venv/bin/activate; cd docs; make html - -clean: clean-dist - rm -rf venv .pytest_cache ./**/__pycache__ - rm -f .coverage coverage.xml ./**/*.pyc - -clean-dist: - rm -rf dist build .egg .eggs arrow.egg-info - -build-dist: - . venv/bin/activate; \ - pip install -U setuptools twine wheel; \ - python setup.py sdist bdist_wheel - -upload-dist: - . venv/bin/activate; twine upload dist/* - -publish: test clean-dist build-dist upload-dist clean-dist diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst deleted file mode 100644 index 69f6c50d813..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst +++ /dev/null @@ -1,133 +0,0 @@ -Arrow: Better dates & times for Python -====================================== - -.. start-inclusion-marker-do-not-remove - -.. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master - :alt: Build Status - :target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster - -.. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg - :alt: Coverage - :target: https://codecov.io/gh/arrow-py/arrow - -.. image:: https://img.shields.io/pypi/v/arrow.svg - :alt: PyPI Version - :target: https://pypi.python.org/pypi/arrow - -.. image:: https://img.shields.io/pypi/pyversions/arrow.svg - :alt: Supported Python Versions - :target: https://pypi.python.org/pypi/arrow - -.. image:: https://img.shields.io/pypi/l/arrow.svg - :alt: License - :target: https://pypi.python.org/pypi/arrow - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :alt: Code Style: Black - :target: https://github.com/psf/black - - -**Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code. - -Arrow is named after the `arrow of time `_ and is heavily inspired by `moment.js `_ and `requests `_. - -Why use Arrow over built-in modules? ------------------------------------- - -Python's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective: - -- Too many modules: datetime, time, calendar, dateutil, pytz and more -- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc. -- Timezones and timestamp conversions are verbose and unpleasant -- Timezone naivety is the norm -- Gaps in functionality: ISO 8601 parsing, timespans, humanization - -Features --------- - -- Fully-implemented, drop-in replacement for datetime -- Supports Python 2.7, 3.5, 3.6, 3.7, 3.8 and 3.9 -- Timezone-aware and UTC by default -- Provides super-simple creation options for many common input scenarios -- :code:`shift` method with support for relative offsets, including weeks -- Formats and parses strings automatically -- Wide support for ISO 8601 -- Timezone conversion -- Timestamp available as a property -- Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year -- Humanizes and supports a growing list of contributed locales -- Extensible for your own Arrow-derived types - -Quick Start ------------ - -Installation -~~~~~~~~~~~~ - -To install Arrow, use `pip `_ or `pipenv `_: - -.. code-block:: console - - $ pip install -U arrow - -Example Usage -~~~~~~~~~~~~~ - -.. code-block:: python - - >>> import arrow - >>> arrow.get('2013-05-11T21:23:58.970460+07:00') - - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc = utc.shift(hours=-1) - >>> utc - - - >>> local = utc.to('US/Pacific') - >>> local - - - >>> local.timestamp - 1368303838 - - >>> local.format() - '2013-05-11 13:23:58 -07:00' - - >>> local.format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-11 13:23:58 -07:00' - - >>> local.humanize() - 'an hour ago' - - >>> local.humanize(locale='ko_kr') - '1시간 전' - -.. end-inclusion-marker-do-not-remove - -Documentation -------------- - -For full documentation, please visit `arrow.readthedocs.io `_. - -Contributing ------------- - -Contributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing: - -#. Find an issue or feature to tackle on the `issue tracker `_. Issues marked with the `"good first issue" label `_ may be a great place to start! -#. Fork `this repository `_ on GitHub and begin making changes in a branch. -#. Add a few tests to ensure that the bug was fixed or the feature works as expected. -#. Run the entire test suite and linting checks by running one of the following commands: :code:`tox` (if you have `tox `_ installed) **OR** :code:`make build38 && make test && make lint` (if you do not have Python 3.8 installed, replace :code:`build38` with the latest Python version on your system). -#. Submit a pull request and await feedback 😃. - -If you have any questions along the way, feel free to ask them `here `_. - -Support Arrow -------------- - -`Open Collective `_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective `_. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py deleted file mode 100644 index 2883527be89..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -from ._version import __version__ -from .api import get, now, utcnow -from .arrow import Arrow -from .factory import ArrowFactory -from .formatter import ( - FORMAT_ATOM, - FORMAT_COOKIE, - FORMAT_RFC822, - FORMAT_RFC850, - FORMAT_RFC1036, - FORMAT_RFC1123, - FORMAT_RFC2822, - FORMAT_RFC3339, - FORMAT_RSS, - FORMAT_W3C, -) -from .parser import ParserError diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py deleted file mode 100644 index fd86b3ee915..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.17.0" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py deleted file mode 100644 index a6b7be3de28..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Provides the default implementation of :class:`ArrowFactory ` -methods for use as a module API. - -""" - -from __future__ import absolute_import - -from arrow.factory import ArrowFactory - -# internal default factory. -_factory = ArrowFactory() - - -def get(*args, **kwargs): - """Calls the default :class:`ArrowFactory ` ``get`` method.""" - - return _factory.get(*args, **kwargs) - - -get.__doc__ = _factory.get.__doc__ - - -def utcnow(): - """Calls the default :class:`ArrowFactory ` ``utcnow`` method.""" - - return _factory.utcnow() - - -utcnow.__doc__ = _factory.utcnow.__doc__ - - -def now(tz=None): - """Calls the default :class:`ArrowFactory ` ``now`` method.""" - - return _factory.now(tz) - - -now.__doc__ = _factory.now.__doc__ - - -def factory(type): - """Returns an :class:`.ArrowFactory` for the specified :class:`Arrow ` - or derived type. - - :param type: the type, :class:`Arrow ` or derived. - - """ - - return ArrowFactory(type) - - -__all__ = ["get", "utcnow", "now", "factory"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py deleted file mode 100644 index 4fe95417891..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py +++ /dev/null @@ -1,1584 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Provides the :class:`Arrow ` class, an enhanced ``datetime`` -replacement. - -""" - -from __future__ import absolute_import - -import calendar -import sys -import warnings -from datetime import datetime, timedelta -from datetime import tzinfo as dt_tzinfo -from math import trunc - -from dateutil import tz as dateutil_tz -from dateutil.relativedelta import relativedelta - -from arrow import formatter, locales, parser, util - -if sys.version_info[:2] < (3, 6): # pragma: no cover - with warnings.catch_warnings(): - warnings.simplefilter("default", DeprecationWarning) - warnings.warn( - "Arrow will drop support for Python 2.7 and 3.5 in the upcoming v1.0.0 release. Please upgrade to " - "Python 3.6+ to continue receiving updates for Arrow.", - DeprecationWarning, - ) - - -class Arrow(object): - """An :class:`Arrow ` object. - - Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing - additional functionality. - - :param year: the calendar year. - :param month: the calendar month. - :param day: the calendar day. - :param hour: (optional) the hour. Defaults to 0. - :param minute: (optional) the minute, Defaults to 0. - :param second: (optional) the second, Defaults to 0. - :param microsecond: (optional) the microsecond. Defaults to 0. - :param tzinfo: (optional) A timezone expression. Defaults to UTC. - :param fold: (optional) 0 or 1, used to disambiguate repeated times. Defaults to 0. - - .. _tz-expr: - - Recognized timezone expressions: - - - A ``tzinfo`` object. - - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. - - A ``str`` in ISO 8601 style, as in '+07:00'. - - A ``str``, one of the following: 'local', 'utc', 'UTC'. - - Usage:: - - >>> import arrow - >>> arrow.Arrow(2013, 5, 5, 12, 30, 45) - - - """ - - resolution = datetime.resolution - - _ATTRS = ["year", "month", "day", "hour", "minute", "second", "microsecond"] - _ATTRS_PLURAL = ["{}s".format(a) for a in _ATTRS] - _MONTHS_PER_QUARTER = 3 - _SECS_PER_MINUTE = float(60) - _SECS_PER_HOUR = float(60 * 60) - _SECS_PER_DAY = float(60 * 60 * 24) - _SECS_PER_WEEK = float(60 * 60 * 24 * 7) - _SECS_PER_MONTH = float(60 * 60 * 24 * 30.5) - _SECS_PER_YEAR = float(60 * 60 * 24 * 365.25) - - def __init__( - self, - year, - month, - day, - hour=0, - minute=0, - second=0, - microsecond=0, - tzinfo=None, - **kwargs - ): - if tzinfo is None: - tzinfo = dateutil_tz.tzutc() - # detect that tzinfo is a pytz object (issue #626) - elif ( - isinstance(tzinfo, dt_tzinfo) - and hasattr(tzinfo, "localize") - and hasattr(tzinfo, "zone") - and tzinfo.zone - ): - tzinfo = parser.TzinfoParser.parse(tzinfo.zone) - elif util.isstr(tzinfo): - tzinfo = parser.TzinfoParser.parse(tzinfo) - - fold = kwargs.get("fold", 0) - - # use enfold here to cover direct arrow.Arrow init on 2.7/3.5 - self._datetime = dateutil_tz.enfold( - datetime(year, month, day, hour, minute, second, microsecond, tzinfo), - fold=fold, - ) - - # factories: single object, both original and from datetime. - - @classmethod - def now(cls, tzinfo=None): - """Constructs an :class:`Arrow ` object, representing "now" in the given - timezone. - - :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. - - Usage:: - - >>> arrow.now('Asia/Baku') - - - """ - - if tzinfo is None: - tzinfo = dateutil_tz.tzlocal() - - dt = datetime.now(tzinfo) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def utcnow(cls): - """Constructs an :class:`Arrow ` object, representing "now" in UTC - time. - - Usage:: - - >>> arrow.utcnow() - - - """ - - dt = datetime.now(dateutil_tz.tzutc()) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def fromtimestamp(cls, timestamp, tzinfo=None): - """Constructs an :class:`Arrow ` object from a timestamp, converted to - the given timezone. - - :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. - :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. - """ - - if tzinfo is None: - tzinfo = dateutil_tz.tzlocal() - elif util.isstr(tzinfo): - tzinfo = parser.TzinfoParser.parse(tzinfo) - - if not util.is_timestamp(timestamp): - raise ValueError( - "The provided timestamp '{}' is invalid.".format(timestamp) - ) - - timestamp = util.normalize_timestamp(float(timestamp)) - dt = datetime.fromtimestamp(timestamp, tzinfo) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def utcfromtimestamp(cls, timestamp): - """Constructs an :class:`Arrow ` object from a timestamp, in UTC time. - - :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. - - """ - - if not util.is_timestamp(timestamp): - raise ValueError( - "The provided timestamp '{}' is invalid.".format(timestamp) - ) - - timestamp = util.normalize_timestamp(float(timestamp)) - dt = datetime.utcfromtimestamp(timestamp) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dateutil_tz.tzutc(), - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def fromdatetime(cls, dt, tzinfo=None): - """Constructs an :class:`Arrow ` object from a ``datetime`` and - optional replacement timezone. - - :param dt: the ``datetime`` - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to ``dt``'s - timezone, or UTC if naive. - - If you only want to replace the timezone of naive datetimes:: - - >>> dt - datetime.datetime(2013, 5, 5, 0, 0, tzinfo=tzutc()) - >>> arrow.Arrow.fromdatetime(dt, dt.tzinfo or 'US/Pacific') - - - """ - - if tzinfo is None: - if dt.tzinfo is None: - tzinfo = dateutil_tz.tzutc() - else: - tzinfo = dt.tzinfo - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def fromdate(cls, date, tzinfo=None): - """Constructs an :class:`Arrow ` object from a ``date`` and optional - replacement timezone. Time values are set to 0. - - :param date: the ``date`` - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to UTC. - """ - - if tzinfo is None: - tzinfo = dateutil_tz.tzutc() - - return cls(date.year, date.month, date.day, tzinfo=tzinfo) - - @classmethod - def strptime(cls, date_str, fmt, tzinfo=None): - """Constructs an :class:`Arrow ` object from a date string and format, - in the style of ``datetime.strptime``. Optionally replaces the parsed timezone. - - :param date_str: the date string. - :param fmt: the format string. - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to the parsed - timezone if ``fmt`` contains a timezone directive, otherwise UTC. - - Usage:: - - >>> arrow.Arrow.strptime('20-01-2019 15:49:10', '%d-%m-%Y %H:%M:%S') - - - """ - - dt = datetime.strptime(date_str, fmt) - if tzinfo is None: - tzinfo = dt.tzinfo - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - tzinfo, - fold=getattr(dt, "fold", 0), - ) - - # factories: ranges and spans - - @classmethod - def range(cls, frame, start, end=None, tz=None, limit=None): - """Returns an iterator of :class:`Arrow ` objects, representing - points in time between two inputs. - - :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param tz: (optional) A :ref:`timezone expression `. Defaults to - ``start``'s timezone, or UTC if ``start`` is naive. - :param limit: (optional) A maximum number of tuples to return. - - **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to - return the entire range. Call with ``limit`` alone to return a maximum # of results from - the start. Call with both to cap a range at a maximum # of results. - - **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before - iterating. As such, either call with naive objects and ``tz``, or aware objects from the - same timezone and no ``tz``. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - Usage:: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print(repr(r)) - ... - - - - - - - **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned iterator:: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 13, 30) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print(repr(r)) - ... - - - - """ - - _, frame_relative, relative_steps = cls._get_frames(frame) - - tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) - - start = cls._get_datetime(start).replace(tzinfo=tzinfo) - end, limit = cls._get_iteration_params(end, limit) - end = cls._get_datetime(end).replace(tzinfo=tzinfo) - - current = cls.fromdatetime(start) - original_day = start.day - day_is_clipped = False - i = 0 - - while current <= end and i < limit: - i += 1 - yield current - - values = [getattr(current, f) for f in cls._ATTRS] - current = cls(*values, tzinfo=tzinfo).shift( - **{frame_relative: relative_steps} - ) - - if frame in ["month", "quarter", "year"] and current.day < original_day: - day_is_clipped = True - - if day_is_clipped and not cls._is_last_day_of_month(current): - current = current.replace(day=original_day) - - def span(self, frame, count=1, bounds="[)"): - """Returns two new :class:`Arrow ` objects, representing the timespan - of the :class:`Arrow ` object in a given timeframe. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param count: (optional) the number of frames to span. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in the span. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '[)' is used. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Usage:: - - >>> arrow.utcnow() - - - >>> arrow.utcnow().span('hour') - (, ) - - >>> arrow.utcnow().span('day') - (, ) - - >>> arrow.utcnow().span('day', count=2) - (, ) - - >>> arrow.utcnow().span('day', bounds='[]') - (, ) - - """ - - util.validate_bounds(bounds) - - frame_absolute, frame_relative, relative_steps = self._get_frames(frame) - - if frame_absolute == "week": - attr = "day" - elif frame_absolute == "quarter": - attr = "month" - else: - attr = frame_absolute - - index = self._ATTRS.index(attr) - frames = self._ATTRS[: index + 1] - - values = [getattr(self, f) for f in frames] - - for _ in range(3 - len(values)): - values.append(1) - - floor = self.__class__(*values, tzinfo=self.tzinfo) - - if frame_absolute == "week": - floor = floor.shift(days=-(self.isoweekday() - 1)) - elif frame_absolute == "quarter": - floor = floor.shift(months=-((self.month - 1) % 3)) - - ceil = floor.shift(**{frame_relative: count * relative_steps}) - - if bounds[0] == "(": - floor = floor.shift(microseconds=+1) - - if bounds[1] == ")": - ceil = ceil.shift(microseconds=-1) - - return floor, ceil - - def floor(self, frame): - """Returns a new :class:`Arrow ` object, representing the "floor" - of the timespan of the :class:`Arrow ` object in a given timeframe. - Equivalent to the first element in the 2-tuple returned by - :func:`span `. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - - Usage:: - - >>> arrow.utcnow().floor('hour') - - """ - - return self.span(frame)[0] - - def ceil(self, frame): - """Returns a new :class:`Arrow ` object, representing the "ceiling" - of the timespan of the :class:`Arrow ` object in a given timeframe. - Equivalent to the second element in the 2-tuple returned by - :func:`span `. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - - Usage:: - - >>> arrow.utcnow().ceil('hour') - - """ - - return self.span(frame)[1] - - @classmethod - def span_range(cls, frame, start, end, tz=None, limit=None, bounds="[)"): - """Returns an iterator of tuples, each :class:`Arrow ` objects, - representing a series of timespans between two inputs. - - :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param tz: (optional) A :ref:`timezone expression `. Defaults to - ``start``'s timezone, or UTC if ``start`` is naive. - :param limit: (optional) A maximum number of tuples to return. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in each span in the range. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '[)' is used. - - **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to - return the entire range. Call with ``limit`` alone to return a maximum # of results from - the start. Call with both to cap a range at a maximum # of results. - - **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before - iterating. As such, either call with naive objects and ``tz``, or aware objects from the - same timezone and no ``tz``. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned - iterator of timespans. - - Usage: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.span_range('hour', start, end): - ... print(r) - ... - (, ) - (, ) - (, ) - (, ) - (, ) - (, ) - - """ - - tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) - start = cls.fromdatetime(start, tzinfo).span(frame)[0] - _range = cls.range(frame, start, end, tz, limit) - return (r.span(frame, bounds=bounds) for r in _range) - - @classmethod - def interval(cls, frame, start, end, interval=1, tz=None, bounds="[)"): - """Returns an iterator of tuples, each :class:`Arrow ` objects, - representing a series of intervals between two inputs. - - :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param interval: (optional) Time interval for the given time frame. - :param tz: (optional) A timezone expression. Defaults to UTC. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in the intervals. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '[)' is used. - - Supported frame values: year, quarter, month, week, day, hour, minute, second - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - Recognized timezone expressions: - - - A ``tzinfo`` object. - - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. - - A ``str`` in ISO 8601 style, as in '+07:00'. - - A ``str``, one of the following: 'local', 'utc', 'UTC'. - - Usage: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.interval('hour', start, end, 2): - ... print r - ... - (, ) - (, ) - (, ) - """ - if interval < 1: - raise ValueError("interval has to be a positive integer") - - spanRange = iter(cls.span_range(frame, start, end, tz, bounds=bounds)) - while True: - try: - intvlStart, intvlEnd = next(spanRange) - for _ in range(interval - 1): - _, intvlEnd = next(spanRange) - yield intvlStart, intvlEnd - except StopIteration: - return - - # representations - - def __repr__(self): - return "<{} [{}]>".format(self.__class__.__name__, self.__str__()) - - def __str__(self): - return self._datetime.isoformat() - - def __format__(self, formatstr): - - if len(formatstr) > 0: - return self.format(formatstr) - - return str(self) - - def __hash__(self): - return self._datetime.__hash__() - - # attributes and properties - - def __getattr__(self, name): - - if name == "week": - return self.isocalendar()[1] - - if name == "quarter": - return int((self.month - 1) / self._MONTHS_PER_QUARTER) + 1 - - if not name.startswith("_"): - value = getattr(self._datetime, name, None) - - if value is not None: - return value - - return object.__getattribute__(self, name) - - @property - def tzinfo(self): - """Gets the ``tzinfo`` of the :class:`Arrow ` object. - - Usage:: - - >>> arw=arrow.utcnow() - >>> arw.tzinfo - tzutc() - - """ - - return self._datetime.tzinfo - - @tzinfo.setter - def tzinfo(self, tzinfo): - """ Sets the ``tzinfo`` of the :class:`Arrow ` object. """ - - self._datetime = self._datetime.replace(tzinfo=tzinfo) - - @property - def datetime(self): - """Returns a datetime representation of the :class:`Arrow ` object. - - Usage:: - - >>> arw=arrow.utcnow() - >>> arw.datetime - datetime.datetime(2019, 1, 24, 16, 35, 27, 276649, tzinfo=tzutc()) - - """ - - return self._datetime - - @property - def naive(self): - """Returns a naive datetime representation of the :class:`Arrow ` - object. - - Usage:: - - >>> nairobi = arrow.now('Africa/Nairobi') - >>> nairobi - - >>> nairobi.naive - datetime.datetime(2019, 1, 23, 19, 27, 12, 297999) - - """ - - return self._datetime.replace(tzinfo=None) - - @property - def timestamp(self): - """Returns a timestamp representation of the :class:`Arrow ` object, in - UTC time. - - Usage:: - - >>> arrow.utcnow().timestamp - 1548260567 - - """ - - warnings.warn( - "For compatibility with the datetime.timestamp() method this property will be replaced with a method in " - "the 1.0.0 release, please switch to the .int_timestamp property for identical behaviour as soon as " - "possible.", - DeprecationWarning, - ) - return calendar.timegm(self._datetime.utctimetuple()) - - @property - def int_timestamp(self): - """Returns a timestamp representation of the :class:`Arrow ` object, in - UTC time. - - Usage:: - - >>> arrow.utcnow().int_timestamp - 1548260567 - - """ - - return calendar.timegm(self._datetime.utctimetuple()) - - @property - def float_timestamp(self): - """Returns a floating-point representation of the :class:`Arrow ` - object, in UTC time. - - Usage:: - - >>> arrow.utcnow().float_timestamp - 1548260516.830896 - - """ - - # IDEA get rid of this in 1.0.0 and wrap datetime.timestamp() - # Or for compatibility retain this but make it call the timestamp method - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - return self.timestamp + float(self.microsecond) / 1000000 - - @property - def fold(self): - """ Returns the ``fold`` value of the :class:`Arrow ` object. """ - - # in python < 3.6 _datetime will be a _DatetimeWithFold if fold=1 and a datetime with no fold attribute - # otherwise, so we need to return zero to cover the latter case - return getattr(self._datetime, "fold", 0) - - @property - def ambiguous(self): - """ Returns a boolean indicating whether the :class:`Arrow ` object is ambiguous.""" - - return dateutil_tz.datetime_ambiguous(self._datetime) - - @property - def imaginary(self): - """Indicates whether the :class: `Arrow ` object exists in the current timezone.""" - - return not dateutil_tz.datetime_exists(self._datetime) - - # mutation and duplication. - - def clone(self): - """Returns a new :class:`Arrow ` object, cloned from the current one. - - Usage: - - >>> arw = arrow.utcnow() - >>> cloned = arw.clone() - - """ - - return self.fromdatetime(self._datetime) - - def replace(self, **kwargs): - """Returns a new :class:`Arrow ` object with attributes updated - according to inputs. - - Use property names to set their value absolutely:: - - >>> import arrow - >>> arw = arrow.utcnow() - >>> arw - - >>> arw.replace(year=2014, month=6) - - - You can also replace the timezone without conversion, using a - :ref:`timezone expression `:: - - >>> arw.replace(tzinfo=tz.tzlocal()) - - - """ - - absolute_kwargs = {} - - for key, value in kwargs.items(): - - if key in self._ATTRS: - absolute_kwargs[key] = value - elif key in ["week", "quarter"]: - raise AttributeError("setting absolute {} is not supported".format(key)) - elif key not in ["tzinfo", "fold"]: - raise AttributeError('unknown attribute: "{}"'.format(key)) - - current = self._datetime.replace(**absolute_kwargs) - - tzinfo = kwargs.get("tzinfo") - - if tzinfo is not None: - tzinfo = self._get_tzinfo(tzinfo) - current = current.replace(tzinfo=tzinfo) - - fold = kwargs.get("fold") - - # TODO revisit this once we drop support for 2.7/3.5 - if fold is not None: - current = dateutil_tz.enfold(current, fold=fold) - - return self.fromdatetime(current) - - def shift(self, **kwargs): - """Returns a new :class:`Arrow ` object with attributes updated - according to inputs. - - Use pluralized property names to relatively shift their current value: - - >>> import arrow - >>> arw = arrow.utcnow() - >>> arw - - >>> arw.shift(years=1, months=-1) - - - Day-of-the-week relative shifting can use either Python's weekday numbers - (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's - day instances (MO, TU .. SU). When using weekday numbers, the returned - date will always be greater than or equal to the starting date. - - Using the above code (which is a Saturday) and asking it to shift to Saturday: - - >>> arw.shift(weekday=5) - - - While asking for a Monday: - - >>> arw.shift(weekday=0) - - - """ - - relative_kwargs = {} - additional_attrs = ["weeks", "quarters", "weekday"] - - for key, value in kwargs.items(): - - if key in self._ATTRS_PLURAL or key in additional_attrs: - relative_kwargs[key] = value - else: - raise AttributeError( - "Invalid shift time frame. Please select one of the following: {}.".format( - ", ".join(self._ATTRS_PLURAL + additional_attrs) - ) - ) - - # core datetime does not support quarters, translate to months. - relative_kwargs.setdefault("months", 0) - relative_kwargs["months"] += ( - relative_kwargs.pop("quarters", 0) * self._MONTHS_PER_QUARTER - ) - - current = self._datetime + relativedelta(**relative_kwargs) - - if not dateutil_tz.datetime_exists(current): - current = dateutil_tz.resolve_imaginary(current) - - return self.fromdatetime(current) - - def to(self, tz): - """Returns a new :class:`Arrow ` object, converted - to the target timezone. - - :param tz: A :ref:`timezone expression `. - - Usage:: - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc.to('US/Pacific') - - - >>> utc.to(tz.tzlocal()) - - - >>> utc.to('-07:00') - - - >>> utc.to('local') - - - >>> utc.to('local').to('utc') - - - """ - - if not isinstance(tz, dt_tzinfo): - tz = parser.TzinfoParser.parse(tz) - - dt = self._datetime.astimezone(tz) - - return self.__class__( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - # string output and formatting - - def format(self, fmt="YYYY-MM-DD HH:mm:ssZZ", locale="en_us"): - """Returns a string representation of the :class:`Arrow ` object, - formatted according to a format string. - - :param fmt: the format string. - - Usage:: - - >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-09 03:56:47 -00:00' - - >>> arrow.utcnow().format('X') - '1368071882' - - >>> arrow.utcnow().format('MMMM DD, YYYY') - 'May 09, 2013' - - >>> arrow.utcnow().format() - '2013-05-09 03:56:47 -00:00' - - """ - - return formatter.DateTimeFormatter(locale).format(self._datetime, fmt) - - def humanize( - self, other=None, locale="en_us", only_distance=False, granularity="auto" - ): - """Returns a localized, humanized representation of a relative difference in time. - - :param other: (optional) an :class:`Arrow ` or ``datetime`` object. - Defaults to now in the current :class:`Arrow ` object's timezone. - :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'. - :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part. - :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute', - 'hour', 'day', 'week', 'month' or 'year' or a list of any combination of these strings - - Usage:: - - >>> earlier = arrow.utcnow().shift(hours=-2) - >>> earlier.humanize() - '2 hours ago' - - >>> later = earlier.shift(hours=4) - >>> later.humanize(earlier) - 'in 4 hours' - - """ - - locale_name = locale - locale = locales.get_locale(locale) - - if other is None: - utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) - dt = utc.astimezone(self._datetime.tzinfo) - - elif isinstance(other, Arrow): - dt = other._datetime - - elif isinstance(other, datetime): - if other.tzinfo is None: - dt = other.replace(tzinfo=self._datetime.tzinfo) - else: - dt = other.astimezone(self._datetime.tzinfo) - - else: - raise TypeError( - "Invalid 'other' argument of type '{}'. " - "Argument must be of type None, Arrow, or datetime.".format( - type(other).__name__ - ) - ) - - if isinstance(granularity, list) and len(granularity) == 1: - granularity = granularity[0] - - delta = int(round(util.total_seconds(self._datetime - dt))) - sign = -1 if delta < 0 else 1 - diff = abs(delta) - delta = diff - - try: - if granularity == "auto": - if diff < 10: - return locale.describe("now", only_distance=only_distance) - - if diff < 45: - seconds = sign * delta - return locale.describe( - "seconds", seconds, only_distance=only_distance - ) - - elif diff < 90: - return locale.describe("minute", sign, only_distance=only_distance) - elif diff < 2700: - minutes = sign * int(max(delta / 60, 2)) - return locale.describe( - "minutes", minutes, only_distance=only_distance - ) - - elif diff < 5400: - return locale.describe("hour", sign, only_distance=only_distance) - elif diff < 79200: - hours = sign * int(max(delta / 3600, 2)) - return locale.describe("hours", hours, only_distance=only_distance) - - # anything less than 48 hours should be 1 day - elif diff < 172800: - return locale.describe("day", sign, only_distance=only_distance) - elif diff < 554400: - days = sign * int(max(delta / 86400, 2)) - return locale.describe("days", days, only_distance=only_distance) - - elif diff < 907200: - return locale.describe("week", sign, only_distance=only_distance) - elif diff < 2419200: - weeks = sign * int(max(delta / 604800, 2)) - return locale.describe("weeks", weeks, only_distance=only_distance) - - elif diff < 3888000: - return locale.describe("month", sign, only_distance=only_distance) - elif diff < 29808000: - self_months = self._datetime.year * 12 + self._datetime.month - other_months = dt.year * 12 + dt.month - - months = sign * int(max(abs(other_months - self_months), 2)) - - return locale.describe( - "months", months, only_distance=only_distance - ) - - elif diff < 47260800: - return locale.describe("year", sign, only_distance=only_distance) - else: - years = sign * int(max(delta / 31536000, 2)) - return locale.describe("years", years, only_distance=only_distance) - - elif util.isstr(granularity): - if granularity == "second": - delta = sign * delta - if abs(delta) < 2: - return locale.describe("now", only_distance=only_distance) - elif granularity == "minute": - delta = sign * delta / self._SECS_PER_MINUTE - elif granularity == "hour": - delta = sign * delta / self._SECS_PER_HOUR - elif granularity == "day": - delta = sign * delta / self._SECS_PER_DAY - elif granularity == "week": - delta = sign * delta / self._SECS_PER_WEEK - elif granularity == "month": - delta = sign * delta / self._SECS_PER_MONTH - elif granularity == "year": - delta = sign * delta / self._SECS_PER_YEAR - else: - raise AttributeError( - "Invalid level of granularity. Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'" - ) - - if trunc(abs(delta)) != 1: - granularity += "s" - return locale.describe(granularity, delta, only_distance=only_distance) - - else: - timeframes = [] - if "year" in granularity: - years = sign * delta / self._SECS_PER_YEAR - delta %= self._SECS_PER_YEAR - timeframes.append(["year", years]) - - if "month" in granularity: - months = sign * delta / self._SECS_PER_MONTH - delta %= self._SECS_PER_MONTH - timeframes.append(["month", months]) - - if "week" in granularity: - weeks = sign * delta / self._SECS_PER_WEEK - delta %= self._SECS_PER_WEEK - timeframes.append(["week", weeks]) - - if "day" in granularity: - days = sign * delta / self._SECS_PER_DAY - delta %= self._SECS_PER_DAY - timeframes.append(["day", days]) - - if "hour" in granularity: - hours = sign * delta / self._SECS_PER_HOUR - delta %= self._SECS_PER_HOUR - timeframes.append(["hour", hours]) - - if "minute" in granularity: - minutes = sign * delta / self._SECS_PER_MINUTE - delta %= self._SECS_PER_MINUTE - timeframes.append(["minute", minutes]) - - if "second" in granularity: - seconds = sign * delta - timeframes.append(["second", seconds]) - - if len(timeframes) < len(granularity): - raise AttributeError( - "Invalid level of granularity. " - "Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'." - ) - - for tf in timeframes: - # Make granularity plural if the delta is not equal to 1 - if trunc(abs(tf[1])) != 1: - tf[0] += "s" - return locale.describe_multi(timeframes, only_distance=only_distance) - - except KeyError as e: - raise ValueError( - "Humanization of the {} granularity is not currently translated in the '{}' locale. " - "Please consider making a contribution to this locale.".format( - e, locale_name - ) - ) - - # query functions - - def is_between(self, start, end, bounds="()"): - """Returns a boolean denoting whether the specified date and time is between - the start and end dates and times. - - :param start: an :class:`Arrow ` object. - :param end: an :class:`Arrow ` object. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in the range. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '()' is used. - - Usage:: - - >>> start = arrow.get(datetime(2013, 5, 5, 12, 30, 10)) - >>> end = arrow.get(datetime(2013, 5, 5, 12, 30, 36)) - >>> arrow.get(datetime(2013, 5, 5, 12, 30, 27)).is_between(start, end) - True - - >>> start = arrow.get(datetime(2013, 5, 5)) - >>> end = arrow.get(datetime(2013, 5, 8)) - >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[]') - True - - >>> start = arrow.get(datetime(2013, 5, 5)) - >>> end = arrow.get(datetime(2013, 5, 8)) - >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[)') - False - - """ - - util.validate_bounds(bounds) - - if not isinstance(start, Arrow): - raise TypeError( - "Can't parse start date argument type of '{}'".format(type(start)) - ) - - if not isinstance(end, Arrow): - raise TypeError( - "Can't parse end date argument type of '{}'".format(type(end)) - ) - - include_start = bounds[0] == "[" - include_end = bounds[1] == "]" - - target_timestamp = self.float_timestamp - start_timestamp = start.float_timestamp - end_timestamp = end.float_timestamp - - if include_start and include_end: - return ( - target_timestamp >= start_timestamp - and target_timestamp <= end_timestamp - ) - elif include_start and not include_end: - return ( - target_timestamp >= start_timestamp and target_timestamp < end_timestamp - ) - elif not include_start and include_end: - return ( - target_timestamp > start_timestamp and target_timestamp <= end_timestamp - ) - else: - return ( - target_timestamp > start_timestamp and target_timestamp < end_timestamp - ) - - # datetime methods - - def date(self): - """Returns a ``date`` object with the same year, month and day. - - Usage:: - - >>> arrow.utcnow().date() - datetime.date(2019, 1, 23) - - """ - - return self._datetime.date() - - def time(self): - """Returns a ``time`` object with the same hour, minute, second, microsecond. - - Usage:: - - >>> arrow.utcnow().time() - datetime.time(12, 15, 34, 68352) - - """ - - return self._datetime.time() - - def timetz(self): - """Returns a ``time`` object with the same hour, minute, second, microsecond and - tzinfo. - - Usage:: - - >>> arrow.utcnow().timetz() - datetime.time(12, 5, 18, 298893, tzinfo=tzutc()) - - """ - - return self._datetime.timetz() - - def astimezone(self, tz): - """Returns a ``datetime`` object, converted to the specified timezone. - - :param tz: a ``tzinfo`` object. - - Usage:: - - >>> pacific=arrow.now('US/Pacific') - >>> nyc=arrow.now('America/New_York').tzinfo - >>> pacific.astimezone(nyc) - datetime.datetime(2019, 1, 20, 10, 24, 22, 328172, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York')) - - """ - - return self._datetime.astimezone(tz) - - def utcoffset(self): - """Returns a ``timedelta`` object representing the whole number of minutes difference from - UTC time. - - Usage:: - - >>> arrow.now('US/Pacific').utcoffset() - datetime.timedelta(-1, 57600) - - """ - - return self._datetime.utcoffset() - - def dst(self): - """Returns the daylight savings time adjustment. - - Usage:: - - >>> arrow.utcnow().dst() - datetime.timedelta(0) - - """ - - return self._datetime.dst() - - def timetuple(self): - """Returns a ``time.struct_time``, in the current timezone. - - Usage:: - - >>> arrow.utcnow().timetuple() - time.struct_time(tm_year=2019, tm_mon=1, tm_mday=20, tm_hour=15, tm_min=17, tm_sec=8, tm_wday=6, tm_yday=20, tm_isdst=0) - - """ - - return self._datetime.timetuple() - - def utctimetuple(self): - """Returns a ``time.struct_time``, in UTC time. - - Usage:: - - >>> arrow.utcnow().utctimetuple() - time.struct_time(tm_year=2019, tm_mon=1, tm_mday=19, tm_hour=21, tm_min=41, tm_sec=7, tm_wday=5, tm_yday=19, tm_isdst=0) - - """ - - return self._datetime.utctimetuple() - - def toordinal(self): - """Returns the proleptic Gregorian ordinal of the date. - - Usage:: - - >>> arrow.utcnow().toordinal() - 737078 - - """ - - return self._datetime.toordinal() - - def weekday(self): - """Returns the day of the week as an integer (0-6). - - Usage:: - - >>> arrow.utcnow().weekday() - 5 - - """ - - return self._datetime.weekday() - - def isoweekday(self): - """Returns the ISO day of the week as an integer (1-7). - - Usage:: - - >>> arrow.utcnow().isoweekday() - 6 - - """ - - return self._datetime.isoweekday() - - def isocalendar(self): - """Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). - - Usage:: - - >>> arrow.utcnow().isocalendar() - (2019, 3, 6) - - """ - - return self._datetime.isocalendar() - - def isoformat(self, sep="T"): - """Returns an ISO 8601 formatted representation of the date and time. - - Usage:: - - >>> arrow.utcnow().isoformat() - '2019-01-19T18:30:52.442118+00:00' - - """ - - return self._datetime.isoformat(sep) - - def ctime(self): - """Returns a ctime formatted representation of the date and time. - - Usage:: - - >>> arrow.utcnow().ctime() - 'Sat Jan 19 18:26:50 2019' - - """ - - return self._datetime.ctime() - - def strftime(self, format): - """Formats in the style of ``datetime.strftime``. - - :param format: the format string. - - Usage:: - - >>> arrow.utcnow().strftime('%d-%m-%Y %H:%M:%S') - '23-01-2019 12:28:17' - - """ - - return self._datetime.strftime(format) - - def for_json(self): - """Serializes for the ``for_json`` protocol of simplejson. - - Usage:: - - >>> arrow.utcnow().for_json() - '2019-01-19T18:25:36.760079+00:00' - - """ - - return self.isoformat() - - # math - - def __add__(self, other): - - if isinstance(other, (timedelta, relativedelta)): - return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) - - return NotImplemented - - def __radd__(self, other): - return self.__add__(other) - - def __sub__(self, other): - - if isinstance(other, (timedelta, relativedelta)): - return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) - - elif isinstance(other, datetime): - return self._datetime - other - - elif isinstance(other, Arrow): - return self._datetime - other._datetime - - return NotImplemented - - def __rsub__(self, other): - - if isinstance(other, datetime): - return other - self._datetime - - return NotImplemented - - # comparisons - - def __eq__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return False - - return self._datetime == self._get_datetime(other) - - def __ne__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return True - - return not self.__eq__(other) - - def __gt__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime > self._get_datetime(other) - - def __ge__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime >= self._get_datetime(other) - - def __lt__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime < self._get_datetime(other) - - def __le__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime <= self._get_datetime(other) - - def __cmp__(self, other): - if sys.version_info[0] < 3: # pragma: no cover - if not isinstance(other, (Arrow, datetime)): - raise TypeError( - "can't compare '{}' to '{}'".format(type(self), type(other)) - ) - - # internal methods - - @staticmethod - def _get_tzinfo(tz_expr): - - if tz_expr is None: - return dateutil_tz.tzutc() - if isinstance(tz_expr, dt_tzinfo): - return tz_expr - else: - try: - return parser.TzinfoParser.parse(tz_expr) - except parser.ParserError: - raise ValueError("'{}' not recognized as a timezone".format(tz_expr)) - - @classmethod - def _get_datetime(cls, expr): - """Get datetime object for a specified expression.""" - if isinstance(expr, Arrow): - return expr.datetime - elif isinstance(expr, datetime): - return expr - elif util.is_timestamp(expr): - timestamp = float(expr) - return cls.utcfromtimestamp(timestamp).datetime - else: - raise ValueError( - "'{}' not recognized as a datetime or timestamp.".format(expr) - ) - - @classmethod - def _get_frames(cls, name): - - if name in cls._ATTRS: - return name, "{}s".format(name), 1 - elif name[-1] == "s" and name[:-1] in cls._ATTRS: - return name[:-1], name, 1 - elif name in ["week", "weeks"]: - return "week", "weeks", 1 - elif name in ["quarter", "quarters"]: - return "quarter", "months", 3 - - supported = ", ".join( - [ - "year(s)", - "month(s)", - "day(s)", - "hour(s)", - "minute(s)", - "second(s)", - "microsecond(s)", - "week(s)", - "quarter(s)", - ] - ) - raise AttributeError( - "range/span over frame {} not supported. Supported frames: {}".format( - name, supported - ) - ) - - @classmethod - def _get_iteration_params(cls, end, limit): - - if end is None: - - if limit is None: - raise ValueError("one of 'end' or 'limit' is required") - - return cls.max, limit - - else: - if limit is None: - return end, sys.maxsize - return end, limit - - @staticmethod - def _is_last_day_of_month(date): - return date.day == calendar.monthrange(date.year, date.month)[1] - - -Arrow.min = Arrow.fromdatetime(datetime.min) -Arrow.max = Arrow.fromdatetime(datetime.max) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py deleted file mode 100644 index 81e37b26de6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- - -# Output of time.mktime(datetime.max.timetuple()) on macOS -# This value must be hardcoded for compatibility with Windows -# Platform-independent max timestamps are hard to form -# https://stackoverflow.com/q/46133223 -MAX_TIMESTAMP = 253402318799.0 -MAX_TIMESTAMP_MS = MAX_TIMESTAMP * 1000 -MAX_TIMESTAMP_US = MAX_TIMESTAMP * 1000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py deleted file mode 100644 index 05933e81518..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py +++ /dev/null @@ -1,301 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Implements the :class:`ArrowFactory ` class, -providing factory methods for common :class:`Arrow ` -construction scenarios. - -""" - -from __future__ import absolute_import - -import calendar -from datetime import date, datetime -from datetime import tzinfo as dt_tzinfo -from time import struct_time - -from dateutil import tz as dateutil_tz - -from arrow import parser -from arrow.arrow import Arrow -from arrow.util import is_timestamp, iso_to_gregorian, isstr - - -class ArrowFactory(object): - """A factory for generating :class:`Arrow ` objects. - - :param type: (optional) the :class:`Arrow `-based class to construct from. - Defaults to :class:`Arrow `. - - """ - - def __init__(self, type=Arrow): - self.type = type - - def get(self, *args, **kwargs): - """Returns an :class:`Arrow ` object based on flexible inputs. - - :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en_us'. - :param tzinfo: (optional) a :ref:`timezone expression ` or tzinfo object. - Replaces the timezone unless using an input form that is explicitly UTC or specifies - the timezone in a positional argument. Defaults to UTC. - :param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize - redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing. - Defaults to false. - - Usage:: - - >>> import arrow - - **No inputs** to get current UTC time:: - - >>> arrow.get() - - - **None** to also get current UTC time:: - - >>> arrow.get(None) - - - **One** :class:`Arrow ` object, to get a copy. - - >>> arw = arrow.utcnow() - >>> arrow.get(arw) - - - **One** ``float`` or ``int``, convertible to a floating-point timestamp, to get - that timestamp in UTC:: - - >>> arrow.get(1367992474.293378) - - - >>> arrow.get(1367992474) - - - **One** ISO 8601-formatted ``str``, to parse it:: - - >>> arrow.get('2013-09-29T01:26:43.830580') - - - **One** ISO 8601-formatted ``str``, in basic format, to parse it:: - - >>> arrow.get('20160413T133656.456289') - - - **One** ``tzinfo``, to get the current time **converted** to that timezone:: - - >>> arrow.get(tz.tzlocal()) - - - **One** naive ``datetime``, to get that datetime in UTC:: - - >>> arrow.get(datetime(2013, 5, 5)) - - - **One** aware ``datetime``, to get that datetime:: - - >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) - - - **One** naive ``date``, to get that date in UTC:: - - >>> arrow.get(date(2013, 5, 5)) - - - **One** time.struct time:: - - >>> arrow.get(gmtime(0)) - - - **One** iso calendar ``tuple``, to get that week date in UTC:: - - >>> arrow.get((2013, 18, 7)) - - - **Two** arguments, a naive or aware ``datetime``, and a replacement - :ref:`timezone expression `:: - - >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') - - - **Two** arguments, a naive ``date``, and a replacement - :ref:`timezone expression `:: - - >>> arrow.get(date(2013, 5, 5), 'US/Pacific') - - - **Two** arguments, both ``str``, to parse the first according to the format of the second:: - - >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ') - - - **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try:: - - >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss']) - - - **Three or more** arguments, as for the constructor of a ``datetime``:: - - >>> arrow.get(2013, 5, 5, 12, 30, 45) - - - """ - - arg_count = len(args) - locale = kwargs.pop("locale", "en_us") - tz = kwargs.get("tzinfo", None) - normalize_whitespace = kwargs.pop("normalize_whitespace", False) - - # if kwargs given, send to constructor unless only tzinfo provided - if len(kwargs) > 1: - arg_count = 3 - - # tzinfo kwarg is not provided - if len(kwargs) == 1 and tz is None: - arg_count = 3 - - # () -> now, @ utc. - if arg_count == 0: - if isstr(tz): - tz = parser.TzinfoParser.parse(tz) - return self.type.now(tz) - - if isinstance(tz, dt_tzinfo): - return self.type.now(tz) - - return self.type.utcnow() - - if arg_count == 1: - arg = args[0] - - # (None) -> now, @ utc. - if arg is None: - return self.type.utcnow() - - # try (int, float) -> from timestamp with tz - elif not isstr(arg) and is_timestamp(arg): - if tz is None: - # set to UTC by default - tz = dateutil_tz.tzutc() - return self.type.fromtimestamp(arg, tzinfo=tz) - - # (Arrow) -> from the object's datetime. - elif isinstance(arg, Arrow): - return self.type.fromdatetime(arg.datetime) - - # (datetime) -> from datetime. - elif isinstance(arg, datetime): - return self.type.fromdatetime(arg) - - # (date) -> from date. - elif isinstance(arg, date): - return self.type.fromdate(arg) - - # (tzinfo) -> now, @ tzinfo. - elif isinstance(arg, dt_tzinfo): - return self.type.now(arg) - - # (str) -> parse. - elif isstr(arg): - dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace) - return self.type.fromdatetime(dt, tz) - - # (struct_time) -> from struct_time - elif isinstance(arg, struct_time): - return self.type.utcfromtimestamp(calendar.timegm(arg)) - - # (iso calendar) -> convert then from date - elif isinstance(arg, tuple) and len(arg) == 3: - dt = iso_to_gregorian(*arg) - return self.type.fromdate(dt) - - else: - raise TypeError( - "Can't parse single argument of type '{}'".format(type(arg)) - ) - - elif arg_count == 2: - - arg_1, arg_2 = args[0], args[1] - - if isinstance(arg_1, datetime): - - # (datetime, tzinfo/str) -> fromdatetime replace tzinfo. - if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): - return self.type.fromdatetime(arg_1, arg_2) - else: - raise TypeError( - "Can't parse two arguments of types 'datetime', '{}'".format( - type(arg_2) - ) - ) - - elif isinstance(arg_1, date): - - # (date, tzinfo/str) -> fromdate replace tzinfo. - if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): - return self.type.fromdate(arg_1, tzinfo=arg_2) - else: - raise TypeError( - "Can't parse two arguments of types 'date', '{}'".format( - type(arg_2) - ) - ) - - # (str, format) -> parse. - elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)): - dt = parser.DateTimeParser(locale).parse( - args[0], args[1], normalize_whitespace - ) - return self.type.fromdatetime(dt, tzinfo=tz) - - else: - raise TypeError( - "Can't parse two arguments of types '{}' and '{}'".format( - type(arg_1), type(arg_2) - ) - ) - - # 3+ args -> datetime-like via constructor. - else: - return self.type(*args, **kwargs) - - def utcnow(self): - """Returns an :class:`Arrow ` object, representing "now" in UTC time. - - Usage:: - - >>> import arrow - >>> arrow.utcnow() - - """ - - return self.type.utcnow() - - def now(self, tz=None): - """Returns an :class:`Arrow ` object, representing "now" in the given - timezone. - - :param tz: (optional) A :ref:`timezone expression `. Defaults to local time. - - Usage:: - - >>> import arrow - >>> arrow.now() - - - >>> arrow.now('US/Pacific') - - - >>> arrow.now('+02:00') - - - >>> arrow.now('local') - - """ - - if tz is None: - tz = dateutil_tz.tzlocal() - elif not isinstance(tz, dt_tzinfo): - tz = parser.TzinfoParser.parse(tz) - - return self.type.now(tz) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py deleted file mode 100644 index 9f9d7a44da7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, division - -import calendar -import re - -from dateutil import tz as dateutil_tz - -from arrow import locales, util - -FORMAT_ATOM = "YYYY-MM-DD HH:mm:ssZZ" -FORMAT_COOKIE = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ" -FORMAT_RFC822 = "ddd, DD MMM YY HH:mm:ss Z" -FORMAT_RFC850 = "dddd, DD-MMM-YY HH:mm:ss ZZZ" -FORMAT_RFC1036 = "ddd, DD MMM YY HH:mm:ss Z" -FORMAT_RFC1123 = "ddd, DD MMM YYYY HH:mm:ss Z" -FORMAT_RFC2822 = "ddd, DD MMM YYYY HH:mm:ss Z" -FORMAT_RFC3339 = "YYYY-MM-DD HH:mm:ssZZ" -FORMAT_RSS = "ddd, DD MMM YYYY HH:mm:ss Z" -FORMAT_W3C = "YYYY-MM-DD HH:mm:ssZZ" - - -class DateTimeFormatter(object): - - # This pattern matches characters enclosed in square brackets are matched as - # an atomic group. For more info on atomic groups and how to they are - # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578 - - _FORMAT_RE = re.compile( - r"(\[(?:(?=(?P[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)" - ) - - def __init__(self, locale="en_us"): - - self.locale = locales.get_locale(locale) - - def format(cls, dt, fmt): - - return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt) - - def _format_token(self, dt, token): - - if token and token.startswith("[") and token.endswith("]"): - return token[1:-1] - - if token == "YYYY": - return self.locale.year_full(dt.year) - if token == "YY": - return self.locale.year_abbreviation(dt.year) - - if token == "MMMM": - return self.locale.month_name(dt.month) - if token == "MMM": - return self.locale.month_abbreviation(dt.month) - if token == "MM": - return "{:02d}".format(dt.month) - if token == "M": - return str(dt.month) - - if token == "DDDD": - return "{:03d}".format(dt.timetuple().tm_yday) - if token == "DDD": - return str(dt.timetuple().tm_yday) - if token == "DD": - return "{:02d}".format(dt.day) - if token == "D": - return str(dt.day) - - if token == "Do": - return self.locale.ordinal_number(dt.day) - - if token == "dddd": - return self.locale.day_name(dt.isoweekday()) - if token == "ddd": - return self.locale.day_abbreviation(dt.isoweekday()) - if token == "d": - return str(dt.isoweekday()) - - if token == "HH": - return "{:02d}".format(dt.hour) - if token == "H": - return str(dt.hour) - if token == "hh": - return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) - if token == "h": - return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) - - if token == "mm": - return "{:02d}".format(dt.minute) - if token == "m": - return str(dt.minute) - - if token == "ss": - return "{:02d}".format(dt.second) - if token == "s": - return str(dt.second) - - if token == "SSSSSS": - return str("{:06d}".format(int(dt.microsecond))) - if token == "SSSSS": - return str("{:05d}".format(int(dt.microsecond / 10))) - if token == "SSSS": - return str("{:04d}".format(int(dt.microsecond / 100))) - if token == "SSS": - return str("{:03d}".format(int(dt.microsecond / 1000))) - if token == "SS": - return str("{:02d}".format(int(dt.microsecond / 10000))) - if token == "S": - return str(int(dt.microsecond / 100000)) - - if token == "X": - # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 - return str(calendar.timegm(dt.utctimetuple())) - - if token == "x": - # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 - ts = calendar.timegm(dt.utctimetuple()) + (dt.microsecond / 1000000) - return str(int(ts * 1000000)) - - if token == "ZZZ": - return dt.tzname() - - if token in ["ZZ", "Z"]: - separator = ":" if token == "ZZ" else "" - tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo - total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60) - - sign = "+" if total_minutes >= 0 else "-" - total_minutes = abs(total_minutes) - hour, minute = divmod(total_minutes, 60) - - return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute) - - if token in ("a", "A"): - return self.locale.meridian(dt.hour, token) - - if token == "W": - year, week, day = dt.isocalendar() - return "{}-W{:02d}-{}".format(year, week, day) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py deleted file mode 100644 index 6833da5a781..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py +++ /dev/null @@ -1,4267 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import inspect -import sys -from math import trunc - - -def get_locale(name): - """Returns an appropriate :class:`Locale ` - corresponding to an inpute locale name. - - :param name: the name of the locale. - - """ - - locale_cls = _locales.get(name.lower()) - - if locale_cls is None: - raise ValueError("Unsupported locale '{}'".format(name)) - - return locale_cls() - - -def get_locale_by_class_name(name): - """Returns an appropriate :class:`Locale ` - corresponding to an locale class name. - - :param name: the name of the locale class. - - """ - locale_cls = globals().get(name) - - if locale_cls is None: - raise ValueError("Unsupported locale '{}'".format(name)) - - return locale_cls() - - -# base locale type. - - -class Locale(object): - """ Represents locale-specific data and functionality. """ - - names = [] - - timeframes = { - "now": "", - "second": "", - "seconds": "", - "minute": "", - "minutes": "", - "hour": "", - "hours": "", - "day": "", - "days": "", - "week": "", - "weeks": "", - "month": "", - "months": "", - "year": "", - "years": "", - } - - meridians = {"am": "", "pm": "", "AM": "", "PM": ""} - - past = None - future = None - and_word = None - - month_names = [] - month_abbreviations = [] - - day_names = [] - day_abbreviations = [] - - ordinal_day_re = r"(\d+)" - - def __init__(self): - - self._month_name_to_ordinal = None - - def describe(self, timeframe, delta=0, only_distance=False): - """Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - """ - - humanized = self._format_timeframe(timeframe, delta) - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - def describe_multi(self, timeframes, only_distance=False): - """Describes a delta within multiple timeframes in plain language. - - :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. - :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords - """ - - humanized = "" - for index, (timeframe, delta) in enumerate(timeframes): - humanized += self._format_timeframe(timeframe, delta) - if index == len(timeframes) - 2 and self.and_word: - humanized += " " + self.and_word + " " - elif index < len(timeframes) - 1: - humanized += " " - - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - def day_name(self, day): - """Returns the day name for a specified day of the week. - - :param day: the ``int`` day of the week (1-7). - - """ - - return self.day_names[day] - - def day_abbreviation(self, day): - """Returns the day abbreviation for a specified day of the week. - - :param day: the ``int`` day of the week (1-7). - - """ - - return self.day_abbreviations[day] - - def month_name(self, month): - """Returns the month name for a specified month of the year. - - :param month: the ``int`` month of the year (1-12). - - """ - - return self.month_names[month] - - def month_abbreviation(self, month): - """Returns the month abbreviation for a specified month of the year. - - :param month: the ``int`` month of the year (1-12). - - """ - - return self.month_abbreviations[month] - - def month_number(self, name): - """Returns the month number for a month specified by name or abbreviation. - - :param name: the month name or abbreviation. - - """ - - if self._month_name_to_ordinal is None: - self._month_name_to_ordinal = self._name_to_ordinal(self.month_names) - self._month_name_to_ordinal.update( - self._name_to_ordinal(self.month_abbreviations) - ) - - return self._month_name_to_ordinal.get(name) - - def year_full(self, year): - """Returns the year for specific locale if available - - :param name: the ``int`` year (4-digit) - """ - return "{:04d}".format(year) - - def year_abbreviation(self, year): - """Returns the year for specific locale if available - - :param name: the ``int`` year (4-digit) - """ - return "{:04d}".format(year)[2:] - - def meridian(self, hour, token): - """Returns the meridian indicator for a specified hour and format token. - - :param hour: the ``int`` hour of the day. - :param token: the format token. - """ - - if token == "a": - return self.meridians["am"] if hour < 12 else self.meridians["pm"] - if token == "A": - return self.meridians["AM"] if hour < 12 else self.meridians["PM"] - - def ordinal_number(self, n): - """Returns the ordinal format of a given integer - - :param n: an integer - """ - return self._ordinal_number(n) - - def _ordinal_number(self, n): - return "{}".format(n) - - def _name_to_ordinal(self, lst): - return dict(map(lambda i: (i[1].lower(), i[0] + 1), enumerate(lst[1:]))) - - def _format_timeframe(self, timeframe, delta): - return self.timeframes[timeframe].format(trunc(abs(delta))) - - def _format_relative(self, humanized, timeframe, delta): - - if timeframe == "now": - return humanized - - direction = self.past if delta < 0 else self.future - - return direction.format(humanized) - - -# base locale type implementations. - - -class EnglishLocale(Locale): - - names = [ - "en", - "en_us", - "en_gb", - "en_au", - "en_be", - "en_jp", - "en_za", - "en_ca", - "en_ph", - ] - - past = "{0} ago" - future = "in {0}" - and_word = "and" - - timeframes = { - "now": "just now", - "second": "a second", - "seconds": "{0} seconds", - "minute": "a minute", - "minutes": "{0} minutes", - "hour": "an hour", - "hours": "{0} hours", - "day": "a day", - "days": "{0} days", - "week": "a week", - "weeks": "{0} weeks", - "month": "a month", - "months": "{0} months", - "year": "a year", - "years": "{0} years", - } - - meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} - - month_names = [ - "", - "January", - "February", - "March", - "April", - "May", - "June", - "July", - "August", - "September", - "October", - "November", - "December", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ] - - day_names = [ - "", - "Monday", - "Tuesday", - "Wednesday", - "Thursday", - "Friday", - "Saturday", - "Sunday", - ] - day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] - - ordinal_day_re = r"((?P[2-3]?1(?=st)|[2-3]?2(?=nd)|[2-3]?3(?=rd)|[1-3]?[04-9](?=th)|1[1-3](?=th))(st|nd|rd|th))" - - def _ordinal_number(self, n): - if n % 100 not in (11, 12, 13): - remainder = abs(n) % 10 - if remainder == 1: - return "{}st".format(n) - elif remainder == 2: - return "{}nd".format(n) - elif remainder == 3: - return "{}rd".format(n) - return "{}th".format(n) - - def describe(self, timeframe, delta=0, only_distance=False): - """Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - """ - - humanized = super(EnglishLocale, self).describe(timeframe, delta, only_distance) - if only_distance and timeframe == "now": - humanized = "instantly" - - return humanized - - -class ItalianLocale(Locale): - names = ["it", "it_it"] - past = "{0} fa" - future = "tra {0}" - and_word = "e" - - timeframes = { - "now": "adesso", - "second": "un secondo", - "seconds": "{0} qualche secondo", - "minute": "un minuto", - "minutes": "{0} minuti", - "hour": "un'ora", - "hours": "{0} ore", - "day": "un giorno", - "days": "{0} giorni", - "week": "una settimana,", - "weeks": "{0} settimane", - "month": "un mese", - "months": "{0} mesi", - "year": "un anno", - "years": "{0} anni", - } - - month_names = [ - "", - "gennaio", - "febbraio", - "marzo", - "aprile", - "maggio", - "giugno", - "luglio", - "agosto", - "settembre", - "ottobre", - "novembre", - "dicembre", - ] - month_abbreviations = [ - "", - "gen", - "feb", - "mar", - "apr", - "mag", - "giu", - "lug", - "ago", - "set", - "ott", - "nov", - "dic", - ] - - day_names = [ - "", - "lunedì", - "martedì", - "mercoledì", - "giovedì", - "venerdì", - "sabato", - "domenica", - ] - day_abbreviations = ["", "lun", "mar", "mer", "gio", "ven", "sab", "dom"] - - ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" - - def _ordinal_number(self, n): - return "{}º".format(n) - - -class SpanishLocale(Locale): - names = ["es", "es_es"] - past = "hace {0}" - future = "en {0}" - and_word = "y" - - timeframes = { - "now": "ahora", - "second": "un segundo", - "seconds": "{0} segundos", - "minute": "un minuto", - "minutes": "{0} minutos", - "hour": "una hora", - "hours": "{0} horas", - "day": "un día", - "days": "{0} días", - "week": "una semana", - "weeks": "{0} semanas", - "month": "un mes", - "months": "{0} meses", - "year": "un año", - "years": "{0} años", - } - - meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} - - month_names = [ - "", - "enero", - "febrero", - "marzo", - "abril", - "mayo", - "junio", - "julio", - "agosto", - "septiembre", - "octubre", - "noviembre", - "diciembre", - ] - month_abbreviations = [ - "", - "ene", - "feb", - "mar", - "abr", - "may", - "jun", - "jul", - "ago", - "sep", - "oct", - "nov", - "dic", - ] - - day_names = [ - "", - "lunes", - "martes", - "miércoles", - "jueves", - "viernes", - "sábado", - "domingo", - ] - day_abbreviations = ["", "lun", "mar", "mie", "jue", "vie", "sab", "dom"] - - ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" - - def _ordinal_number(self, n): - return "{}º".format(n) - - -class FrenchBaseLocale(Locale): - - past = "il y a {0}" - future = "dans {0}" - and_word = "et" - - timeframes = { - "now": "maintenant", - "second": "une seconde", - "seconds": "{0} quelques secondes", - "minute": "une minute", - "minutes": "{0} minutes", - "hour": "une heure", - "hours": "{0} heures", - "day": "un jour", - "days": "{0} jours", - "week": "une semaine", - "weeks": "{0} semaines", - "month": "un mois", - "months": "{0} mois", - "year": "un an", - "years": "{0} ans", - } - - month_names = [ - "", - "janvier", - "février", - "mars", - "avril", - "mai", - "juin", - "juillet", - "août", - "septembre", - "octobre", - "novembre", - "décembre", - ] - - day_names = [ - "", - "lundi", - "mardi", - "mercredi", - "jeudi", - "vendredi", - "samedi", - "dimanche", - ] - day_abbreviations = ["", "lun", "mar", "mer", "jeu", "ven", "sam", "dim"] - - ordinal_day_re = ( - r"((?P\b1(?=er\b)|[1-3]?[02-9](?=e\b)|[1-3]1(?=e\b))(er|e)\b)" - ) - - def _ordinal_number(self, n): - if abs(n) == 1: - return "{}er".format(n) - return "{}e".format(n) - - -class FrenchLocale(FrenchBaseLocale, Locale): - - names = ["fr", "fr_fr"] - - month_abbreviations = [ - "", - "janv", - "févr", - "mars", - "avr", - "mai", - "juin", - "juil", - "août", - "sept", - "oct", - "nov", - "déc", - ] - - -class FrenchCanadianLocale(FrenchBaseLocale, Locale): - - names = ["fr_ca"] - - month_abbreviations = [ - "", - "janv", - "févr", - "mars", - "avr", - "mai", - "juin", - "juill", - "août", - "sept", - "oct", - "nov", - "déc", - ] - - -class GreekLocale(Locale): - - names = ["el", "el_gr"] - - past = "{0} πριν" - future = "σε {0}" - and_word = "και" - - timeframes = { - "now": "τώρα", - "second": "ένα δεύτερο", - "seconds": "{0} δευτερόλεπτα", - "minute": "ένα λεπτό", - "minutes": "{0} λεπτά", - "hour": "μία ώρα", - "hours": "{0} ώρες", - "day": "μία μέρα", - "days": "{0} μέρες", - "month": "ένα μήνα", - "months": "{0} μήνες", - "year": "ένα χρόνο", - "years": "{0} χρόνια", - } - - month_names = [ - "", - "Ιανουαρίου", - "Φεβρουαρίου", - "Μαρτίου", - "Απριλίου", - "Μαΐου", - "Ιουνίου", - "Ιουλίου", - "Αυγούστου", - "Σεπτεμβρίου", - "Οκτωβρίου", - "Νοεμβρίου", - "Δεκεμβρίου", - ] - month_abbreviations = [ - "", - "Ιαν", - "Φεβ", - "Μαρ", - "Απρ", - "Μαϊ", - "Ιον", - "Ιολ", - "Αυγ", - "Σεπ", - "Οκτ", - "Νοε", - "Δεκ", - ] - - day_names = [ - "", - "Δευτέρα", - "Τρίτη", - "Τετάρτη", - "Πέμπτη", - "Παρασκευή", - "Σάββατο", - "Κυριακή", - ] - day_abbreviations = ["", "Δευ", "Τρι", "Τετ", "Πεμ", "Παρ", "Σαβ", "Κυρ"] - - -class JapaneseLocale(Locale): - - names = ["ja", "ja_jp"] - - past = "{0}前" - future = "{0}後" - - timeframes = { - "now": "現在", - "second": "二番目の", - "seconds": "{0}数秒", - "minute": "1分", - "minutes": "{0}分", - "hour": "1時間", - "hours": "{0}時間", - "day": "1日", - "days": "{0}日", - "week": "1週間", - "weeks": "{0}週間", - "month": "1ヶ月", - "months": "{0}ヶ月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "1月", - "2月", - "3月", - "4月", - "5月", - "6月", - "7月", - "8月", - "9月", - "10月", - "11月", - "12月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "月曜日", "火曜日", "水曜日", "木曜日", "金曜日", "土曜日", "日曜日"] - day_abbreviations = ["", "月", "火", "水", "木", "金", "土", "日"] - - -class SwedishLocale(Locale): - - names = ["sv", "sv_se"] - - past = "för {0} sen" - future = "om {0}" - and_word = "och" - - timeframes = { - "now": "just nu", - "second": "en sekund", - "seconds": "{0} några sekunder", - "minute": "en minut", - "minutes": "{0} minuter", - "hour": "en timme", - "hours": "{0} timmar", - "day": "en dag", - "days": "{0} dagar", - "week": "en vecka", - "weeks": "{0} veckor", - "month": "en månad", - "months": "{0} månader", - "year": "ett år", - "years": "{0} år", - } - - month_names = [ - "", - "januari", - "februari", - "mars", - "april", - "maj", - "juni", - "juli", - "augusti", - "september", - "oktober", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maj", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "måndag", - "tisdag", - "onsdag", - "torsdag", - "fredag", - "lördag", - "söndag", - ] - day_abbreviations = ["", "mån", "tis", "ons", "tor", "fre", "lör", "sön"] - - -class FinnishLocale(Locale): - - names = ["fi", "fi_fi"] - - # The finnish grammar is very complex, and its hard to convert - # 1-to-1 to something like English. - - past = "{0} sitten" - future = "{0} kuluttua" - - timeframes = { - "now": ["juuri nyt", "juuri nyt"], - "second": ["sekunti", "sekunti"], - "seconds": ["{0} muutama sekunti", "{0} muutaman sekunnin"], - "minute": ["minuutti", "minuutin"], - "minutes": ["{0} minuuttia", "{0} minuutin"], - "hour": ["tunti", "tunnin"], - "hours": ["{0} tuntia", "{0} tunnin"], - "day": ["päivä", "päivä"], - "days": ["{0} päivää", "{0} päivän"], - "month": ["kuukausi", "kuukauden"], - "months": ["{0} kuukautta", "{0} kuukauden"], - "year": ["vuosi", "vuoden"], - "years": ["{0} vuotta", "{0} vuoden"], - } - - # Months and days are lowercase in Finnish - month_names = [ - "", - "tammikuu", - "helmikuu", - "maaliskuu", - "huhtikuu", - "toukokuu", - "kesäkuu", - "heinäkuu", - "elokuu", - "syyskuu", - "lokakuu", - "marraskuu", - "joulukuu", - ] - - month_abbreviations = [ - "", - "tammi", - "helmi", - "maalis", - "huhti", - "touko", - "kesä", - "heinä", - "elo", - "syys", - "loka", - "marras", - "joulu", - ] - - day_names = [ - "", - "maanantai", - "tiistai", - "keskiviikko", - "torstai", - "perjantai", - "lauantai", - "sunnuntai", - ] - - day_abbreviations = ["", "ma", "ti", "ke", "to", "pe", "la", "su"] - - def _format_timeframe(self, timeframe, delta): - return ( - self.timeframes[timeframe][0].format(abs(delta)), - self.timeframes[timeframe][1].format(abs(delta)), - ) - - def _format_relative(self, humanized, timeframe, delta): - if timeframe == "now": - return humanized[0] - - direction = self.past if delta < 0 else self.future - which = 0 if delta < 0 else 1 - - return direction.format(humanized[which]) - - def _ordinal_number(self, n): - return "{}.".format(n) - - -class ChineseCNLocale(Locale): - - names = ["zh", "zh_cn"] - - past = "{0}前" - future = "{0}后" - - timeframes = { - "now": "刚才", - "second": "一秒", - "seconds": "{0}秒", - "minute": "1分钟", - "minutes": "{0}分钟", - "hour": "1小时", - "hours": "{0}小时", - "day": "1天", - "days": "{0}天", - "week": "一周", - "weeks": "{0}周", - "month": "1个月", - "months": "{0}个月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "一月", - "二月", - "三月", - "四月", - "五月", - "六月", - "七月", - "八月", - "九月", - "十月", - "十一月", - "十二月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] - day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] - - -class ChineseTWLocale(Locale): - - names = ["zh_tw"] - - past = "{0}前" - future = "{0}後" - and_word = "和" - - timeframes = { - "now": "剛才", - "second": "1秒", - "seconds": "{0}秒", - "minute": "1分鐘", - "minutes": "{0}分鐘", - "hour": "1小時", - "hours": "{0}小時", - "day": "1天", - "days": "{0}天", - "week": "1週", - "weeks": "{0}週", - "month": "1個月", - "months": "{0}個月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "1月", - "2月", - "3月", - "4月", - "5月", - "6月", - "7月", - "8月", - "9月", - "10月", - "11月", - "12月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "週一", "週二", "週三", "週四", "週五", "週六", "週日"] - day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] - - -class HongKongLocale(Locale): - - names = ["zh_hk"] - - past = "{0}前" - future = "{0}後" - - timeframes = { - "now": "剛才", - "second": "1秒", - "seconds": "{0}秒", - "minute": "1分鐘", - "minutes": "{0}分鐘", - "hour": "1小時", - "hours": "{0}小時", - "day": "1天", - "days": "{0}天", - "week": "1星期", - "weeks": "{0}星期", - "month": "1個月", - "months": "{0}個月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "1月", - "2月", - "3月", - "4月", - "5月", - "6月", - "7月", - "8月", - "9月", - "10月", - "11月", - "12月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] - day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] - - -class KoreanLocale(Locale): - - names = ["ko", "ko_kr"] - - past = "{0} 전" - future = "{0} 후" - - timeframes = { - "now": "지금", - "second": "1초", - "seconds": "{0}초", - "minute": "1분", - "minutes": "{0}분", - "hour": "한시간", - "hours": "{0}시간", - "day": "하루", - "days": "{0}일", - "week": "1주", - "weeks": "{0}주", - "month": "한달", - "months": "{0}개월", - "year": "1년", - "years": "{0}년", - } - - special_dayframes = { - -3: "그끄제", - -2: "그제", - -1: "어제", - 1: "내일", - 2: "모레", - 3: "글피", - 4: "그글피", - } - - special_yearframes = {-2: "제작년", -1: "작년", 1: "내년", 2: "내후년"} - - month_names = [ - "", - "1월", - "2월", - "3월", - "4월", - "5월", - "6월", - "7월", - "8월", - "9월", - "10월", - "11월", - "12월", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] - day_abbreviations = ["", "월", "화", "수", "목", "금", "토", "일"] - - def _ordinal_number(self, n): - ordinals = ["0", "첫", "두", "세", "네", "다섯", "여섯", "일곱", "여덟", "아홉", "열"] - if n < len(ordinals): - return "{}번째".format(ordinals[n]) - return "{}번째".format(n) - - def _format_relative(self, humanized, timeframe, delta): - if timeframe in ("day", "days"): - special = self.special_dayframes.get(delta) - if special: - return special - elif timeframe in ("year", "years"): - special = self.special_yearframes.get(delta) - if special: - return special - - return super(KoreanLocale, self)._format_relative(humanized, timeframe, delta) - - -# derived locale types & implementations. -class DutchLocale(Locale): - - names = ["nl", "nl_nl"] - - past = "{0} geleden" - future = "over {0}" - - timeframes = { - "now": "nu", - "second": "een seconde", - "seconds": "{0} seconden", - "minute": "een minuut", - "minutes": "{0} minuten", - "hour": "een uur", - "hours": "{0} uur", - "day": "een dag", - "days": "{0} dagen", - "week": "een week", - "weeks": "{0} weken", - "month": "een maand", - "months": "{0} maanden", - "year": "een jaar", - "years": "{0} jaar", - } - - # In Dutch names of months and days are not starting with a capital letter - # like in the English language. - month_names = [ - "", - "januari", - "februari", - "maart", - "april", - "mei", - "juni", - "juli", - "augustus", - "september", - "oktober", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mrt", - "apr", - "mei", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "maandag", - "dinsdag", - "woensdag", - "donderdag", - "vrijdag", - "zaterdag", - "zondag", - ] - day_abbreviations = ["", "ma", "di", "wo", "do", "vr", "za", "zo"] - - -class SlavicBaseLocale(Locale): - def _format_timeframe(self, timeframe, delta): - - form = self.timeframes[timeframe] - delta = abs(delta) - - if isinstance(form, list): - - if delta % 10 == 1 and delta % 100 != 11: - form = form[0] - elif 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[1] - else: - form = form[2] - - return form.format(delta) - - -class BelarusianLocale(SlavicBaseLocale): - - names = ["be", "be_by"] - - past = "{0} таму" - future = "праз {0}" - - timeframes = { - "now": "зараз", - "second": "секунду", - "seconds": "{0} некалькі секунд", - "minute": "хвіліну", - "minutes": ["{0} хвіліну", "{0} хвіліны", "{0} хвілін"], - "hour": "гадзіну", - "hours": ["{0} гадзіну", "{0} гадзіны", "{0} гадзін"], - "day": "дзень", - "days": ["{0} дзень", "{0} дні", "{0} дзён"], - "month": "месяц", - "months": ["{0} месяц", "{0} месяцы", "{0} месяцаў"], - "year": "год", - "years": ["{0} год", "{0} гады", "{0} гадоў"], - } - - month_names = [ - "", - "студзеня", - "лютага", - "сакавіка", - "красавіка", - "траўня", - "чэрвеня", - "ліпеня", - "жніўня", - "верасня", - "кастрычніка", - "лістапада", - "снежня", - ] - month_abbreviations = [ - "", - "студ", - "лют", - "сак", - "крас", - "трав", - "чэрв", - "ліп", - "жнів", - "вер", - "каст", - "ліст", - "снеж", - ] - - day_names = [ - "", - "панядзелак", - "аўторак", - "серада", - "чацвер", - "пятніца", - "субота", - "нядзеля", - ] - day_abbreviations = ["", "пн", "ат", "ср", "чц", "пт", "сб", "нд"] - - -class PolishLocale(SlavicBaseLocale): - - names = ["pl", "pl_pl"] - - past = "{0} temu" - future = "za {0}" - - # The nouns should be in genitive case (Polish: "dopełniacz") - # in order to correctly form `past` & `future` expressions. - timeframes = { - "now": "teraz", - "second": "sekundę", - "seconds": ["{0} sekund", "{0} sekundy", "{0} sekund"], - "minute": "minutę", - "minutes": ["{0} minut", "{0} minuty", "{0} minut"], - "hour": "godzinę", - "hours": ["{0} godzin", "{0} godziny", "{0} godzin"], - "day": "dzień", - "days": "{0} dni", - "week": "tydzień", - "weeks": ["{0} tygodni", "{0} tygodnie", "{0} tygodni"], - "month": "miesiąc", - "months": ["{0} miesięcy", "{0} miesiące", "{0} miesięcy"], - "year": "rok", - "years": ["{0} lat", "{0} lata", "{0} lat"], - } - - month_names = [ - "", - "styczeń", - "luty", - "marzec", - "kwiecień", - "maj", - "czerwiec", - "lipiec", - "sierpień", - "wrzesień", - "październik", - "listopad", - "grudzień", - ] - month_abbreviations = [ - "", - "sty", - "lut", - "mar", - "kwi", - "maj", - "cze", - "lip", - "sie", - "wrz", - "paź", - "lis", - "gru", - ] - - day_names = [ - "", - "poniedziałek", - "wtorek", - "środa", - "czwartek", - "piątek", - "sobota", - "niedziela", - ] - day_abbreviations = ["", "Pn", "Wt", "Śr", "Czw", "Pt", "So", "Nd"] - - -class RussianLocale(SlavicBaseLocale): - - names = ["ru", "ru_ru"] - - past = "{0} назад" - future = "через {0}" - - timeframes = { - "now": "сейчас", - "second": "Второй", - "seconds": "{0} несколько секунд", - "minute": "минуту", - "minutes": ["{0} минуту", "{0} минуты", "{0} минут"], - "hour": "час", - "hours": ["{0} час", "{0} часа", "{0} часов"], - "day": "день", - "days": ["{0} день", "{0} дня", "{0} дней"], - "week": "неделю", - "weeks": ["{0} неделю", "{0} недели", "{0} недель"], - "month": "месяц", - "months": ["{0} месяц", "{0} месяца", "{0} месяцев"], - "year": "год", - "years": ["{0} год", "{0} года", "{0} лет"], - } - - month_names = [ - "", - "января", - "февраля", - "марта", - "апреля", - "мая", - "июня", - "июля", - "августа", - "сентября", - "октября", - "ноября", - "декабря", - ] - month_abbreviations = [ - "", - "янв", - "фев", - "мар", - "апр", - "май", - "июн", - "июл", - "авг", - "сен", - "окт", - "ноя", - "дек", - ] - - day_names = [ - "", - "понедельник", - "вторник", - "среда", - "четверг", - "пятница", - "суббота", - "воскресенье", - ] - day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "вс"] - - -class AfrikaansLocale(Locale): - - names = ["af", "af_nl"] - - past = "{0} gelede" - future = "in {0}" - - timeframes = { - "now": "nou", - "second": "n sekonde", - "seconds": "{0} sekondes", - "minute": "minuut", - "minutes": "{0} minute", - "hour": "uur", - "hours": "{0} ure", - "day": "een dag", - "days": "{0} dae", - "month": "een maand", - "months": "{0} maande", - "year": "een jaar", - "years": "{0} jaar", - } - - month_names = [ - "", - "Januarie", - "Februarie", - "Maart", - "April", - "Mei", - "Junie", - "Julie", - "Augustus", - "September", - "Oktober", - "November", - "Desember", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mrt", - "Apr", - "Mei", - "Jun", - "Jul", - "Aug", - "Sep", - "Okt", - "Nov", - "Des", - ] - - day_names = [ - "", - "Maandag", - "Dinsdag", - "Woensdag", - "Donderdag", - "Vrydag", - "Saterdag", - "Sondag", - ] - day_abbreviations = ["", "Ma", "Di", "Wo", "Do", "Vr", "Za", "So"] - - -class BulgarianLocale(SlavicBaseLocale): - - names = ["bg", "bg_BG"] - - past = "{0} назад" - future = "напред {0}" - - timeframes = { - "now": "сега", - "second": "секунда", - "seconds": "{0} няколко секунди", - "minute": "минута", - "minutes": ["{0} минута", "{0} минути", "{0} минути"], - "hour": "час", - "hours": ["{0} час", "{0} часа", "{0} часа"], - "day": "ден", - "days": ["{0} ден", "{0} дни", "{0} дни"], - "month": "месец", - "months": ["{0} месец", "{0} месеца", "{0} месеца"], - "year": "година", - "years": ["{0} година", "{0} години", "{0} години"], - } - - month_names = [ - "", - "януари", - "февруари", - "март", - "април", - "май", - "юни", - "юли", - "август", - "септември", - "октомври", - "ноември", - "декември", - ] - month_abbreviations = [ - "", - "ян", - "февр", - "март", - "апр", - "май", - "юни", - "юли", - "авг", - "септ", - "окт", - "ноем", - "дек", - ] - - day_names = [ - "", - "понеделник", - "вторник", - "сряда", - "четвъртък", - "петък", - "събота", - "неделя", - ] - day_abbreviations = ["", "пон", "вт", "ср", "четв", "пет", "съб", "нед"] - - -class UkrainianLocale(SlavicBaseLocale): - - names = ["ua", "uk_ua"] - - past = "{0} тому" - future = "за {0}" - - timeframes = { - "now": "зараз", - "second": "секунда", - "seconds": "{0} кілька секунд", - "minute": "хвилину", - "minutes": ["{0} хвилину", "{0} хвилини", "{0} хвилин"], - "hour": "годину", - "hours": ["{0} годину", "{0} години", "{0} годин"], - "day": "день", - "days": ["{0} день", "{0} дні", "{0} днів"], - "month": "місяць", - "months": ["{0} місяць", "{0} місяці", "{0} місяців"], - "year": "рік", - "years": ["{0} рік", "{0} роки", "{0} років"], - } - - month_names = [ - "", - "січня", - "лютого", - "березня", - "квітня", - "травня", - "червня", - "липня", - "серпня", - "вересня", - "жовтня", - "листопада", - "грудня", - ] - month_abbreviations = [ - "", - "січ", - "лют", - "бер", - "квіт", - "трав", - "черв", - "лип", - "серп", - "вер", - "жовт", - "лист", - "груд", - ] - - day_names = [ - "", - "понеділок", - "вівторок", - "середа", - "четвер", - "п’ятниця", - "субота", - "неділя", - ] - day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "нд"] - - -class MacedonianLocale(SlavicBaseLocale): - names = ["mk", "mk_mk"] - - past = "пред {0}" - future = "за {0}" - - timeframes = { - "now": "сега", - "second": "една секунда", - "seconds": ["{0} секунда", "{0} секунди", "{0} секунди"], - "minute": "една минута", - "minutes": ["{0} минута", "{0} минути", "{0} минути"], - "hour": "еден саат", - "hours": ["{0} саат", "{0} саати", "{0} саати"], - "day": "еден ден", - "days": ["{0} ден", "{0} дена", "{0} дена"], - "week": "една недела", - "weeks": ["{0} недела", "{0} недели", "{0} недели"], - "month": "еден месец", - "months": ["{0} месец", "{0} месеци", "{0} месеци"], - "year": "една година", - "years": ["{0} година", "{0} години", "{0} години"], - } - - meridians = {"am": "дп", "pm": "пп", "AM": "претпладне", "PM": "попладне"} - - month_names = [ - "", - "Јануари", - "Февруари", - "Март", - "Април", - "Мај", - "Јуни", - "Јули", - "Август", - "Септември", - "Октомври", - "Ноември", - "Декември", - ] - month_abbreviations = [ - "", - "Јан", - "Фев", - "Мар", - "Апр", - "Мај", - "Јун", - "Јул", - "Авг", - "Септ", - "Окт", - "Ноем", - "Декем", - ] - - day_names = [ - "", - "Понеделник", - "Вторник", - "Среда", - "Четврток", - "Петок", - "Сабота", - "Недела", - ] - day_abbreviations = [ - "", - "Пон", - "Вт", - "Сре", - "Чет", - "Пет", - "Саб", - "Нед", - ] - - -class GermanBaseLocale(Locale): - - past = "vor {0}" - future = "in {0}" - and_word = "und" - - timeframes = { - "now": "gerade eben", - "second": "eine Sekunde", - "seconds": "{0} Sekunden", - "minute": "einer Minute", - "minutes": "{0} Minuten", - "hour": "einer Stunde", - "hours": "{0} Stunden", - "day": "einem Tag", - "days": "{0} Tagen", - "week": "einer Woche", - "weeks": "{0} Wochen", - "month": "einem Monat", - "months": "{0} Monaten", - "year": "einem Jahr", - "years": "{0} Jahren", - } - - timeframes_only_distance = timeframes.copy() - timeframes_only_distance["minute"] = "eine Minute" - timeframes_only_distance["hour"] = "eine Stunde" - timeframes_only_distance["day"] = "ein Tag" - timeframes_only_distance["week"] = "eine Woche" - timeframes_only_distance["month"] = "ein Monat" - timeframes_only_distance["year"] = "ein Jahr" - - month_names = [ - "", - "Januar", - "Februar", - "März", - "April", - "Mai", - "Juni", - "Juli", - "August", - "September", - "Oktober", - "November", - "Dezember", - ] - - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mär", - "Apr", - "Mai", - "Jun", - "Jul", - "Aug", - "Sep", - "Okt", - "Nov", - "Dez", - ] - - day_names = [ - "", - "Montag", - "Dienstag", - "Mittwoch", - "Donnerstag", - "Freitag", - "Samstag", - "Sonntag", - ] - - day_abbreviations = ["", "Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] - - def _ordinal_number(self, n): - return "{}.".format(n) - - def describe(self, timeframe, delta=0, only_distance=False): - """Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - """ - - if not only_distance: - return super(GermanBaseLocale, self).describe( - timeframe, delta, only_distance - ) - - # German uses a different case without 'in' or 'ago' - humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) - - return humanized - - -class GermanLocale(GermanBaseLocale, Locale): - - names = ["de", "de_de"] - - -class SwissLocale(GermanBaseLocale, Locale): - - names = ["de_ch"] - - -class AustrianLocale(GermanBaseLocale, Locale): - - names = ["de_at"] - - month_names = [ - "", - "Jänner", - "Februar", - "März", - "April", - "Mai", - "Juni", - "Juli", - "August", - "September", - "Oktober", - "November", - "Dezember", - ] - - -class NorwegianLocale(Locale): - - names = ["nb", "nb_no"] - - past = "for {0} siden" - future = "om {0}" - - timeframes = { - "now": "nå nettopp", - "second": "et sekund", - "seconds": "{0} noen sekunder", - "minute": "ett minutt", - "minutes": "{0} minutter", - "hour": "en time", - "hours": "{0} timer", - "day": "en dag", - "days": "{0} dager", - "month": "en måned", - "months": "{0} måneder", - "year": "ett år", - "years": "{0} år", - } - - month_names = [ - "", - "januar", - "februar", - "mars", - "april", - "mai", - "juni", - "juli", - "august", - "september", - "oktober", - "november", - "desember", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "mai", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "des", - ] - - day_names = [ - "", - "mandag", - "tirsdag", - "onsdag", - "torsdag", - "fredag", - "lørdag", - "søndag", - ] - day_abbreviations = ["", "ma", "ti", "on", "to", "fr", "lø", "sø"] - - -class NewNorwegianLocale(Locale): - - names = ["nn", "nn_no"] - - past = "for {0} sidan" - future = "om {0}" - - timeframes = { - "now": "no nettopp", - "second": "et sekund", - "seconds": "{0} nokre sekund", - "minute": "ett minutt", - "minutes": "{0} minutt", - "hour": "ein time", - "hours": "{0} timar", - "day": "ein dag", - "days": "{0} dagar", - "month": "en månad", - "months": "{0} månader", - "year": "eit år", - "years": "{0} år", - } - - month_names = [ - "", - "januar", - "februar", - "mars", - "april", - "mai", - "juni", - "juli", - "august", - "september", - "oktober", - "november", - "desember", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "mai", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "des", - ] - - day_names = [ - "", - "måndag", - "tysdag", - "onsdag", - "torsdag", - "fredag", - "laurdag", - "sundag", - ] - day_abbreviations = ["", "må", "ty", "on", "to", "fr", "la", "su"] - - -class PortugueseLocale(Locale): - names = ["pt", "pt_pt"] - - past = "há {0}" - future = "em {0}" - and_word = "e" - - timeframes = { - "now": "agora", - "second": "um segundo", - "seconds": "{0} segundos", - "minute": "um minuto", - "minutes": "{0} minutos", - "hour": "uma hora", - "hours": "{0} horas", - "day": "um dia", - "days": "{0} dias", - "week": "uma semana", - "weeks": "{0} semanas", - "month": "um mês", - "months": "{0} meses", - "year": "um ano", - "years": "{0} anos", - } - - month_names = [ - "", - "Janeiro", - "Fevereiro", - "Março", - "Abril", - "Maio", - "Junho", - "Julho", - "Agosto", - "Setembro", - "Outubro", - "Novembro", - "Dezembro", - ] - month_abbreviations = [ - "", - "Jan", - "Fev", - "Mar", - "Abr", - "Mai", - "Jun", - "Jul", - "Ago", - "Set", - "Out", - "Nov", - "Dez", - ] - - day_names = [ - "", - "Segunda-feira", - "Terça-feira", - "Quarta-feira", - "Quinta-feira", - "Sexta-feira", - "Sábado", - "Domingo", - ] - day_abbreviations = ["", "Seg", "Ter", "Qua", "Qui", "Sex", "Sab", "Dom"] - - -class BrazilianPortugueseLocale(PortugueseLocale): - names = ["pt_br"] - - past = "faz {0}" - - -class TagalogLocale(Locale): - - names = ["tl", "tl_ph"] - - past = "nakaraang {0}" - future = "{0} mula ngayon" - - timeframes = { - "now": "ngayon lang", - "second": "isang segundo", - "seconds": "{0} segundo", - "minute": "isang minuto", - "minutes": "{0} minuto", - "hour": "isang oras", - "hours": "{0} oras", - "day": "isang araw", - "days": "{0} araw", - "week": "isang linggo", - "weeks": "{0} linggo", - "month": "isang buwan", - "months": "{0} buwan", - "year": "isang taon", - "years": "{0} taon", - } - - month_names = [ - "", - "Enero", - "Pebrero", - "Marso", - "Abril", - "Mayo", - "Hunyo", - "Hulyo", - "Agosto", - "Setyembre", - "Oktubre", - "Nobyembre", - "Disyembre", - ] - month_abbreviations = [ - "", - "Ene", - "Peb", - "Mar", - "Abr", - "May", - "Hun", - "Hul", - "Ago", - "Set", - "Okt", - "Nob", - "Dis", - ] - - day_names = [ - "", - "Lunes", - "Martes", - "Miyerkules", - "Huwebes", - "Biyernes", - "Sabado", - "Linggo", - ] - day_abbreviations = ["", "Lun", "Mar", "Miy", "Huw", "Biy", "Sab", "Lin"] - - meridians = {"am": "nu", "pm": "nh", "AM": "ng umaga", "PM": "ng hapon"} - - def _ordinal_number(self, n): - return "ika-{}".format(n) - - -class VietnameseLocale(Locale): - - names = ["vi", "vi_vn"] - - past = "{0} trước" - future = "{0} nữa" - - timeframes = { - "now": "hiện tại", - "second": "một giây", - "seconds": "{0} giây", - "minute": "một phút", - "minutes": "{0} phút", - "hour": "một giờ", - "hours": "{0} giờ", - "day": "một ngày", - "days": "{0} ngày", - "week": "một tuần", - "weeks": "{0} tuần", - "month": "một tháng", - "months": "{0} tháng", - "year": "một năm", - "years": "{0} năm", - } - - month_names = [ - "", - "Tháng Một", - "Tháng Hai", - "Tháng Ba", - "Tháng Tư", - "Tháng Năm", - "Tháng Sáu", - "Tháng Bảy", - "Tháng Tám", - "Tháng Chín", - "Tháng Mười", - "Tháng Mười Một", - "Tháng Mười Hai", - ] - month_abbreviations = [ - "", - "Tháng 1", - "Tháng 2", - "Tháng 3", - "Tháng 4", - "Tháng 5", - "Tháng 6", - "Tháng 7", - "Tháng 8", - "Tháng 9", - "Tháng 10", - "Tháng 11", - "Tháng 12", - ] - - day_names = [ - "", - "Thứ Hai", - "Thứ Ba", - "Thứ Tư", - "Thứ Năm", - "Thứ Sáu", - "Thứ Bảy", - "Chủ Nhật", - ] - day_abbreviations = ["", "Thứ 2", "Thứ 3", "Thứ 4", "Thứ 5", "Thứ 6", "Thứ 7", "CN"] - - -class TurkishLocale(Locale): - - names = ["tr", "tr_tr"] - - past = "{0} önce" - future = "{0} sonra" - - timeframes = { - "now": "şimdi", - "second": "bir saniye", - "seconds": "{0} saniye", - "minute": "bir dakika", - "minutes": "{0} dakika", - "hour": "bir saat", - "hours": "{0} saat", - "day": "bir gün", - "days": "{0} gün", - "month": "bir ay", - "months": "{0} ay", - "year": "yıl", - "years": "{0} yıl", - } - - month_names = [ - "", - "Ocak", - "Şubat", - "Mart", - "Nisan", - "Mayıs", - "Haziran", - "Temmuz", - "Ağustos", - "Eylül", - "Ekim", - "Kasım", - "Aralık", - ] - month_abbreviations = [ - "", - "Oca", - "Şub", - "Mar", - "Nis", - "May", - "Haz", - "Tem", - "Ağu", - "Eyl", - "Eki", - "Kas", - "Ara", - ] - - day_names = [ - "", - "Pazartesi", - "Salı", - "Çarşamba", - "Perşembe", - "Cuma", - "Cumartesi", - "Pazar", - ] - day_abbreviations = ["", "Pzt", "Sal", "Çar", "Per", "Cum", "Cmt", "Paz"] - - -class AzerbaijaniLocale(Locale): - - names = ["az", "az_az"] - - past = "{0} əvvəl" - future = "{0} sonra" - - timeframes = { - "now": "indi", - "second": "saniyə", - "seconds": "{0} saniyə", - "minute": "bir dəqiqə", - "minutes": "{0} dəqiqə", - "hour": "bir saat", - "hours": "{0} saat", - "day": "bir gün", - "days": "{0} gün", - "month": "bir ay", - "months": "{0} ay", - "year": "il", - "years": "{0} il", - } - - month_names = [ - "", - "Yanvar", - "Fevral", - "Mart", - "Aprel", - "May", - "İyun", - "İyul", - "Avqust", - "Sentyabr", - "Oktyabr", - "Noyabr", - "Dekabr", - ] - month_abbreviations = [ - "", - "Yan", - "Fev", - "Mar", - "Apr", - "May", - "İyn", - "İyl", - "Avq", - "Sen", - "Okt", - "Noy", - "Dek", - ] - - day_names = [ - "", - "Bazar ertəsi", - "Çərşənbə axşamı", - "Çərşənbə", - "Cümə axşamı", - "Cümə", - "Şənbə", - "Bazar", - ] - day_abbreviations = ["", "Ber", "Çax", "Çər", "Cax", "Cüm", "Şnb", "Bzr"] - - -class ArabicLocale(Locale): - names = [ - "ar", - "ar_ae", - "ar_bh", - "ar_dj", - "ar_eg", - "ar_eh", - "ar_er", - "ar_km", - "ar_kw", - "ar_ly", - "ar_om", - "ar_qa", - "ar_sa", - "ar_sd", - "ar_so", - "ar_ss", - "ar_td", - "ar_ye", - ] - - past = "منذ {0}" - future = "خلال {0}" - - timeframes = { - "now": "الآن", - "second": "ثانية", - "seconds": {"double": "ثانيتين", "ten": "{0} ثوان", "higher": "{0} ثانية"}, - "minute": "دقيقة", - "minutes": {"double": "دقيقتين", "ten": "{0} دقائق", "higher": "{0} دقيقة"}, - "hour": "ساعة", - "hours": {"double": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, - "day": "يوم", - "days": {"double": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, - "month": "شهر", - "months": {"double": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, - "year": "سنة", - "years": {"double": "سنتين", "ten": "{0} سنوات", "higher": "{0} سنة"}, - } - - month_names = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "مايو", - "يونيو", - "يوليو", - "أغسطس", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - month_abbreviations = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "مايو", - "يونيو", - "يوليو", - "أغسطس", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - - day_names = [ - "", - "الإثنين", - "الثلاثاء", - "الأربعاء", - "الخميس", - "الجمعة", - "السبت", - "الأحد", - ] - day_abbreviations = ["", "إثنين", "ثلاثاء", "أربعاء", "خميس", "جمعة", "سبت", "أحد"] - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - delta = abs(delta) - if isinstance(form, dict): - if delta == 2: - form = form["double"] - elif delta > 2 and delta <= 10: - form = form["ten"] - else: - form = form["higher"] - - return form.format(delta) - - -class LevantArabicLocale(ArabicLocale): - names = ["ar_iq", "ar_jo", "ar_lb", "ar_ps", "ar_sy"] - month_names = [ - "", - "كانون الثاني", - "شباط", - "آذار", - "نيسان", - "أيار", - "حزيران", - "تموز", - "آب", - "أيلول", - "تشرين الأول", - "تشرين الثاني", - "كانون الأول", - ] - month_abbreviations = [ - "", - "كانون الثاني", - "شباط", - "آذار", - "نيسان", - "أيار", - "حزيران", - "تموز", - "آب", - "أيلول", - "تشرين الأول", - "تشرين الثاني", - "كانون الأول", - ] - - -class AlgeriaTunisiaArabicLocale(ArabicLocale): - names = ["ar_tn", "ar_dz"] - month_names = [ - "", - "جانفي", - "فيفري", - "مارس", - "أفريل", - "ماي", - "جوان", - "جويلية", - "أوت", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - month_abbreviations = [ - "", - "جانفي", - "فيفري", - "مارس", - "أفريل", - "ماي", - "جوان", - "جويلية", - "أوت", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - - -class MauritaniaArabicLocale(ArabicLocale): - names = ["ar_mr"] - month_names = [ - "", - "يناير", - "فبراير", - "مارس", - "إبريل", - "مايو", - "يونيو", - "يوليو", - "أغشت", - "شتمبر", - "أكتوبر", - "نوفمبر", - "دجمبر", - ] - month_abbreviations = [ - "", - "يناير", - "فبراير", - "مارس", - "إبريل", - "مايو", - "يونيو", - "يوليو", - "أغشت", - "شتمبر", - "أكتوبر", - "نوفمبر", - "دجمبر", - ] - - -class MoroccoArabicLocale(ArabicLocale): - names = ["ar_ma"] - month_names = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "ماي", - "يونيو", - "يوليوز", - "غشت", - "شتنبر", - "أكتوبر", - "نونبر", - "دجنبر", - ] - month_abbreviations = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "ماي", - "يونيو", - "يوليوز", - "غشت", - "شتنبر", - "أكتوبر", - "نونبر", - "دجنبر", - ] - - -class IcelandicLocale(Locale): - def _format_timeframe(self, timeframe, delta): - - timeframe = self.timeframes[timeframe] - if delta < 0: - timeframe = timeframe[0] - elif delta > 0: - timeframe = timeframe[1] - - return timeframe.format(abs(delta)) - - names = ["is", "is_is"] - - past = "fyrir {0} síðan" - future = "eftir {0}" - - timeframes = { - "now": "rétt í þessu", - "second": ("sekúndu", "sekúndu"), - "seconds": ("{0} nokkrum sekúndum", "nokkrar sekúndur"), - "minute": ("einni mínútu", "eina mínútu"), - "minutes": ("{0} mínútum", "{0} mínútur"), - "hour": ("einum tíma", "einn tíma"), - "hours": ("{0} tímum", "{0} tíma"), - "day": ("einum degi", "einn dag"), - "days": ("{0} dögum", "{0} daga"), - "month": ("einum mánuði", "einn mánuð"), - "months": ("{0} mánuðum", "{0} mánuði"), - "year": ("einu ári", "eitt ár"), - "years": ("{0} árum", "{0} ár"), - } - - meridians = {"am": "f.h.", "pm": "e.h.", "AM": "f.h.", "PM": "e.h."} - - month_names = [ - "", - "janúar", - "febrúar", - "mars", - "apríl", - "maí", - "júní", - "júlí", - "ágúst", - "september", - "október", - "nóvember", - "desember", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maí", - "jún", - "júl", - "ágú", - "sep", - "okt", - "nóv", - "des", - ] - - day_names = [ - "", - "mánudagur", - "þriðjudagur", - "miðvikudagur", - "fimmtudagur", - "föstudagur", - "laugardagur", - "sunnudagur", - ] - day_abbreviations = ["", "mán", "þri", "mið", "fim", "fös", "lau", "sun"] - - -class DanishLocale(Locale): - - names = ["da", "da_dk"] - - past = "for {0} siden" - future = "efter {0}" - and_word = "og" - - timeframes = { - "now": "lige nu", - "second": "et sekund", - "seconds": "{0} et par sekunder", - "minute": "et minut", - "minutes": "{0} minutter", - "hour": "en time", - "hours": "{0} timer", - "day": "en dag", - "days": "{0} dage", - "month": "en måned", - "months": "{0} måneder", - "year": "et år", - "years": "{0} år", - } - - month_names = [ - "", - "januar", - "februar", - "marts", - "april", - "maj", - "juni", - "juli", - "august", - "september", - "oktober", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maj", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "mandag", - "tirsdag", - "onsdag", - "torsdag", - "fredag", - "lørdag", - "søndag", - ] - day_abbreviations = ["", "man", "tir", "ons", "tor", "fre", "lør", "søn"] - - -class MalayalamLocale(Locale): - - names = ["ml"] - - past = "{0} മുമ്പ്" - future = "{0} ശേഷം" - - timeframes = { - "now": "ഇപ്പോൾ", - "second": "ഒരു നിമിഷം", - "seconds": "{0} സെക്കന്റ്‌", - "minute": "ഒരു മിനിറ്റ്", - "minutes": "{0} മിനിറ്റ്", - "hour": "ഒരു മണിക്കൂർ", - "hours": "{0} മണിക്കൂർ", - "day": "ഒരു ദിവസം ", - "days": "{0} ദിവസം ", - "month": "ഒരു മാസം ", - "months": "{0} മാസം ", - "year": "ഒരു വർഷം ", - "years": "{0} വർഷം ", - } - - meridians = { - "am": "രാവിലെ", - "pm": "ഉച്ചക്ക് ശേഷം", - "AM": "രാവിലെ", - "PM": "ഉച്ചക്ക് ശേഷം", - } - - month_names = [ - "", - "ജനുവരി", - "ഫെബ്രുവരി", - "മാർച്ച്‌", - "ഏപ്രിൽ ", - "മെയ്‌ ", - "ജൂണ്‍", - "ജൂലൈ", - "ഓഗസ്റ്റ്‌", - "സെപ്റ്റംബർ", - "ഒക്ടോബർ", - "നവംബർ", - "ഡിസംബർ", - ] - month_abbreviations = [ - "", - "ജനു", - "ഫെബ് ", - "മാർ", - "ഏപ്രിൽ", - "മേയ്", - "ജൂണ്‍", - "ജൂലൈ", - "ഓഗസ്റ", - "സെപ്റ്റ", - "ഒക്ടോ", - "നവം", - "ഡിസം", - ] - - day_names = ["", "തിങ്കള്‍", "ചൊവ്വ", "ബുധന്‍", "വ്യാഴം", "വെള്ളി", "ശനി", "ഞായര്‍"] - day_abbreviations = [ - "", - "തിങ്കള്‍", - "ചൊവ്വ", - "ബുധന്‍", - "വ്യാഴം", - "വെള്ളി", - "ശനി", - "ഞായര്‍", - ] - - -class HindiLocale(Locale): - - names = ["hi"] - - past = "{0} पहले" - future = "{0} बाद" - - timeframes = { - "now": "अभी", - "second": "एक पल", - "seconds": "{0} सेकंड्", - "minute": "एक मिनट ", - "minutes": "{0} मिनट ", - "hour": "एक घंटा", - "hours": "{0} घंटे", - "day": "एक दिन", - "days": "{0} दिन", - "month": "एक माह ", - "months": "{0} महीने ", - "year": "एक वर्ष ", - "years": "{0} साल ", - } - - meridians = {"am": "सुबह", "pm": "शाम", "AM": "सुबह", "PM": "शाम"} - - month_names = [ - "", - "जनवरी", - "फरवरी", - "मार्च", - "अप्रैल ", - "मई", - "जून", - "जुलाई", - "अगस्त", - "सितंबर", - "अक्टूबर", - "नवंबर", - "दिसंबर", - ] - month_abbreviations = [ - "", - "जन", - "फ़र", - "मार्च", - "अप्रै", - "मई", - "जून", - "जुलाई", - "आग", - "सित", - "अकत", - "नवे", - "दिस", - ] - - day_names = [ - "", - "सोमवार", - "मंगलवार", - "बुधवार", - "गुरुवार", - "शुक्रवार", - "शनिवार", - "रविवार", - ] - day_abbreviations = ["", "सोम", "मंगल", "बुध", "गुरुवार", "शुक्र", "शनि", "रवि"] - - -class CzechLocale(Locale): - names = ["cs", "cs_cz"] - - timeframes = { - "now": "Teď", - "second": {"past": "vteřina", "future": "vteřina", "zero": "vteřina"}, - "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekund"]}, - "minute": {"past": "minutou", "future": "minutu", "zero": "{0} minut"}, - "minutes": {"past": "{0} minutami", "future": ["{0} minuty", "{0} minut"]}, - "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodin"}, - "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodin"]}, - "day": {"past": "dnem", "future": "den", "zero": "{0} dnů"}, - "days": {"past": "{0} dny", "future": ["{0} dny", "{0} dnů"]}, - "week": {"past": "týdnem", "future": "týden", "zero": "{0} týdnů"}, - "weeks": {"past": "{0} týdny", "future": ["{0} týdny", "{0} týdnů"]}, - "month": {"past": "měsícem", "future": "měsíc", "zero": "{0} měsíců"}, - "months": {"past": "{0} měsíci", "future": ["{0} měsíce", "{0} měsíců"]}, - "year": {"past": "rokem", "future": "rok", "zero": "{0} let"}, - "years": {"past": "{0} lety", "future": ["{0} roky", "{0} let"]}, - } - - past = "Před {0}" - future = "Za {0}" - - month_names = [ - "", - "leden", - "únor", - "březen", - "duben", - "květen", - "červen", - "červenec", - "srpen", - "září", - "říjen", - "listopad", - "prosinec", - ] - month_abbreviations = [ - "", - "led", - "úno", - "bře", - "dub", - "kvě", - "čvn", - "čvc", - "srp", - "zář", - "říj", - "lis", - "pro", - ] - - day_names = [ - "", - "pondělí", - "úterý", - "středa", - "čtvrtek", - "pátek", - "sobota", - "neděle", - ] - day_abbreviations = ["", "po", "út", "st", "čt", "pá", "so", "ne"] - - def _format_timeframe(self, timeframe, delta): - """Czech aware time frame format function, takes into account - the differences between past and future forms.""" - form = self.timeframes[timeframe] - if isinstance(form, dict): - if delta == 0: - form = form["zero"] # And *never* use 0 in the singular! - elif delta > 0: - form = form["future"] - else: - form = form["past"] - delta = abs(delta) - - if isinstance(form, list): - if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[0] - else: - form = form[1] - - return form.format(delta) - - -class SlovakLocale(Locale): - names = ["sk", "sk_sk"] - - timeframes = { - "now": "Teraz", - "second": {"past": "sekundou", "future": "sekundu", "zero": "{0} sekúnd"}, - "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekúnd"]}, - "minute": {"past": "minútou", "future": "minútu", "zero": "{0} minút"}, - "minutes": {"past": "{0} minútami", "future": ["{0} minúty", "{0} minút"]}, - "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodín"}, - "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodín"]}, - "day": {"past": "dňom", "future": "deň", "zero": "{0} dní"}, - "days": {"past": "{0} dňami", "future": ["{0} dni", "{0} dní"]}, - "week": {"past": "týždňom", "future": "týždeň", "zero": "{0} týždňov"}, - "weeks": {"past": "{0} týždňami", "future": ["{0} týždne", "{0} týždňov"]}, - "month": {"past": "mesiacom", "future": "mesiac", "zero": "{0} mesiacov"}, - "months": {"past": "{0} mesiacmi", "future": ["{0} mesiace", "{0} mesiacov"]}, - "year": {"past": "rokom", "future": "rok", "zero": "{0} rokov"}, - "years": {"past": "{0} rokmi", "future": ["{0} roky", "{0} rokov"]}, - } - - past = "Pred {0}" - future = "O {0}" - and_word = "a" - - month_names = [ - "", - "január", - "február", - "marec", - "apríl", - "máj", - "jún", - "júl", - "august", - "september", - "október", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "máj", - "jún", - "júl", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "pondelok", - "utorok", - "streda", - "štvrtok", - "piatok", - "sobota", - "nedeľa", - ] - day_abbreviations = ["", "po", "ut", "st", "št", "pi", "so", "ne"] - - def _format_timeframe(self, timeframe, delta): - """Slovak aware time frame format function, takes into account - the differences between past and future forms.""" - form = self.timeframes[timeframe] - if isinstance(form, dict): - if delta == 0: - form = form["zero"] # And *never* use 0 in the singular! - elif delta > 0: - form = form["future"] - else: - form = form["past"] - delta = abs(delta) - - if isinstance(form, list): - if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[0] - else: - form = form[1] - - return form.format(delta) - - -class FarsiLocale(Locale): - - names = ["fa", "fa_ir"] - - past = "{0} قبل" - future = "در {0}" - - timeframes = { - "now": "اکنون", - "second": "یک لحظه", - "seconds": "{0} ثانیه", - "minute": "یک دقیقه", - "minutes": "{0} دقیقه", - "hour": "یک ساعت", - "hours": "{0} ساعت", - "day": "یک روز", - "days": "{0} روز", - "month": "یک ماه", - "months": "{0} ماه", - "year": "یک سال", - "years": "{0} سال", - } - - meridians = { - "am": "قبل از ظهر", - "pm": "بعد از ظهر", - "AM": "قبل از ظهر", - "PM": "بعد از ظهر", - } - - month_names = [ - "", - "January", - "February", - "March", - "April", - "May", - "June", - "July", - "August", - "September", - "October", - "November", - "December", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ] - - day_names = [ - "", - "دو شنبه", - "سه شنبه", - "چهارشنبه", - "پنجشنبه", - "جمعه", - "شنبه", - "یکشنبه", - ] - day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] - - -class HebrewLocale(Locale): - - names = ["he", "he_IL"] - - past = "לפני {0}" - future = "בעוד {0}" - and_word = "ו" - - timeframes = { - "now": "הרגע", - "second": "שנייה", - "seconds": "{0} שניות", - "minute": "דקה", - "minutes": "{0} דקות", - "hour": "שעה", - "hours": "{0} שעות", - "2-hours": "שעתיים", - "day": "יום", - "days": "{0} ימים", - "2-days": "יומיים", - "week": "שבוע", - "weeks": "{0} שבועות", - "2-weeks": "שבועיים", - "month": "חודש", - "months": "{0} חודשים", - "2-months": "חודשיים", - "year": "שנה", - "years": "{0} שנים", - "2-years": "שנתיים", - } - - meridians = { - "am": 'לפנ"צ', - "pm": 'אחר"צ', - "AM": "לפני הצהריים", - "PM": "אחרי הצהריים", - } - - month_names = [ - "", - "ינואר", - "פברואר", - "מרץ", - "אפריל", - "מאי", - "יוני", - "יולי", - "אוגוסט", - "ספטמבר", - "אוקטובר", - "נובמבר", - "דצמבר", - ] - month_abbreviations = [ - "", - "ינו׳", - "פבר׳", - "מרץ", - "אפר׳", - "מאי", - "יוני", - "יולי", - "אוג׳", - "ספט׳", - "אוק׳", - "נוב׳", - "דצמ׳", - ] - - day_names = ["", "שני", "שלישי", "רביעי", "חמישי", "שישי", "שבת", "ראשון"] - day_abbreviations = ["", "ב׳", "ג׳", "ד׳", "ה׳", "ו׳", "ש׳", "א׳"] - - def _format_timeframe(self, timeframe, delta): - """Hebrew couple of aware""" - couple = "2-{}".format(timeframe) - single = timeframe.rstrip("s") - if abs(delta) == 2 and couple in self.timeframes: - key = couple - elif abs(delta) == 1 and single in self.timeframes: - key = single - else: - key = timeframe - - return self.timeframes[key].format(trunc(abs(delta))) - - def describe_multi(self, timeframes, only_distance=False): - """Describes a delta within multiple timeframes in plain language. - In Hebrew, the and word behaves a bit differently. - - :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. - :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords - """ - - humanized = "" - for index, (timeframe, delta) in enumerate(timeframes): - last_humanized = self._format_timeframe(timeframe, delta) - if index == 0: - humanized = last_humanized - elif index == len(timeframes) - 1: # Must have at least 2 items - humanized += " " + self.and_word - if last_humanized[0].isdecimal(): - humanized += "־" - humanized += last_humanized - else: # Don't add for the last one - humanized += ", " + last_humanized - - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - -class MarathiLocale(Locale): - - names = ["mr"] - - past = "{0} आधी" - future = "{0} नंतर" - - timeframes = { - "now": "सद्य", - "second": "एक सेकंद", - "seconds": "{0} सेकंद", - "minute": "एक मिनिट ", - "minutes": "{0} मिनिट ", - "hour": "एक तास", - "hours": "{0} तास", - "day": "एक दिवस", - "days": "{0} दिवस", - "month": "एक महिना ", - "months": "{0} महिने ", - "year": "एक वर्ष ", - "years": "{0} वर्ष ", - } - - meridians = {"am": "सकाळ", "pm": "संध्याकाळ", "AM": "सकाळ", "PM": "संध्याकाळ"} - - month_names = [ - "", - "जानेवारी", - "फेब्रुवारी", - "मार्च", - "एप्रिल", - "मे", - "जून", - "जुलै", - "अॉगस्ट", - "सप्टेंबर", - "अॉक्टोबर", - "नोव्हेंबर", - "डिसेंबर", - ] - month_abbreviations = [ - "", - "जान", - "फेब्रु", - "मार्च", - "एप्रि", - "मे", - "जून", - "जुलै", - "अॉग", - "सप्टें", - "अॉक्टो", - "नोव्हें", - "डिसें", - ] - - day_names = [ - "", - "सोमवार", - "मंगळवार", - "बुधवार", - "गुरुवार", - "शुक्रवार", - "शनिवार", - "रविवार", - ] - day_abbreviations = ["", "सोम", "मंगळ", "बुध", "गुरु", "शुक्र", "शनि", "रवि"] - - -def _map_locales(): - - locales = {} - - for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): - if issubclass(cls, Locale): # pragma: no branch - for name in cls.names: - locales[name.lower()] = cls - - return locales - - -class CatalanLocale(Locale): - names = ["ca", "ca_es", "ca_ad", "ca_fr", "ca_it"] - past = "Fa {0}" - future = "En {0}" - and_word = "i" - - timeframes = { - "now": "Ara mateix", - "second": "un segon", - "seconds": "{0} segons", - "minute": "1 minut", - "minutes": "{0} minuts", - "hour": "una hora", - "hours": "{0} hores", - "day": "un dia", - "days": "{0} dies", - "month": "un mes", - "months": "{0} mesos", - "year": "un any", - "years": "{0} anys", - } - - month_names = [ - "", - "gener", - "febrer", - "març", - "abril", - "maig", - "juny", - "juliol", - "agost", - "setembre", - "octubre", - "novembre", - "desembre", - ] - month_abbreviations = [ - "", - "gen.", - "febr.", - "març", - "abr.", - "maig", - "juny", - "jul.", - "ag.", - "set.", - "oct.", - "nov.", - "des.", - ] - day_names = [ - "", - "dilluns", - "dimarts", - "dimecres", - "dijous", - "divendres", - "dissabte", - "diumenge", - ] - day_abbreviations = [ - "", - "dl.", - "dt.", - "dc.", - "dj.", - "dv.", - "ds.", - "dg.", - ] - - -class BasqueLocale(Locale): - names = ["eu", "eu_eu"] - past = "duela {0}" - future = "{0}" # I don't know what's the right phrase in Basque for the future. - - timeframes = { - "now": "Orain", - "second": "segundo bat", - "seconds": "{0} segundu", - "minute": "minutu bat", - "minutes": "{0} minutu", - "hour": "ordu bat", - "hours": "{0} ordu", - "day": "egun bat", - "days": "{0} egun", - "month": "hilabete bat", - "months": "{0} hilabet", - "year": "urte bat", - "years": "{0} urte", - } - - month_names = [ - "", - "urtarrilak", - "otsailak", - "martxoak", - "apirilak", - "maiatzak", - "ekainak", - "uztailak", - "abuztuak", - "irailak", - "urriak", - "azaroak", - "abenduak", - ] - month_abbreviations = [ - "", - "urt", - "ots", - "mar", - "api", - "mai", - "eka", - "uzt", - "abu", - "ira", - "urr", - "aza", - "abe", - ] - day_names = [ - "", - "astelehena", - "asteartea", - "asteazkena", - "osteguna", - "ostirala", - "larunbata", - "igandea", - ] - day_abbreviations = ["", "al", "ar", "az", "og", "ol", "lr", "ig"] - - -class HungarianLocale(Locale): - - names = ["hu", "hu_hu"] - - past = "{0} ezelőtt" - future = "{0} múlva" - - timeframes = { - "now": "éppen most", - "second": {"past": "egy második", "future": "egy második"}, - "seconds": {"past": "{0} másodpercekkel", "future": "{0} pár másodperc"}, - "minute": {"past": "egy perccel", "future": "egy perc"}, - "minutes": {"past": "{0} perccel", "future": "{0} perc"}, - "hour": {"past": "egy órával", "future": "egy óra"}, - "hours": {"past": "{0} órával", "future": "{0} óra"}, - "day": {"past": "egy nappal", "future": "egy nap"}, - "days": {"past": "{0} nappal", "future": "{0} nap"}, - "month": {"past": "egy hónappal", "future": "egy hónap"}, - "months": {"past": "{0} hónappal", "future": "{0} hónap"}, - "year": {"past": "egy évvel", "future": "egy év"}, - "years": {"past": "{0} évvel", "future": "{0} év"}, - } - - month_names = [ - "", - "január", - "február", - "március", - "április", - "május", - "június", - "július", - "augusztus", - "szeptember", - "október", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "febr", - "márc", - "ápr", - "máj", - "jún", - "júl", - "aug", - "szept", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "hétfő", - "kedd", - "szerda", - "csütörtök", - "péntek", - "szombat", - "vasárnap", - ] - day_abbreviations = ["", "hét", "kedd", "szer", "csüt", "pént", "szom", "vas"] - - meridians = {"am": "de", "pm": "du", "AM": "DE", "PM": "DU"} - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - - if isinstance(form, dict): - if delta > 0: - form = form["future"] - else: - form = form["past"] - - return form.format(abs(delta)) - - -class EsperantoLocale(Locale): - names = ["eo", "eo_xx"] - past = "antaŭ {0}" - future = "post {0}" - - timeframes = { - "now": "nun", - "second": "sekundo", - "seconds": "{0} kelkaj sekundoj", - "minute": "unu minuto", - "minutes": "{0} minutoj", - "hour": "un horo", - "hours": "{0} horoj", - "day": "unu tago", - "days": "{0} tagoj", - "month": "unu monato", - "months": "{0} monatoj", - "year": "unu jaro", - "years": "{0} jaroj", - } - - month_names = [ - "", - "januaro", - "februaro", - "marto", - "aprilo", - "majo", - "junio", - "julio", - "aŭgusto", - "septembro", - "oktobro", - "novembro", - "decembro", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maj", - "jun", - "jul", - "aŭg", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "lundo", - "mardo", - "merkredo", - "ĵaŭdo", - "vendredo", - "sabato", - "dimanĉo", - ] - day_abbreviations = ["", "lun", "mar", "mer", "ĵaŭ", "ven", "sab", "dim"] - - meridians = {"am": "atm", "pm": "ptm", "AM": "ATM", "PM": "PTM"} - - ordinal_day_re = r"((?P[1-3]?[0-9](?=a))a)" - - def _ordinal_number(self, n): - return "{}a".format(n) - - -class ThaiLocale(Locale): - - names = ["th", "th_th"] - - past = "{0}{1}ที่ผ่านมา" - future = "ในอีก{1}{0}" - - timeframes = { - "now": "ขณะนี้", - "second": "วินาที", - "seconds": "{0} ไม่กี่วินาที", - "minute": "1 นาที", - "minutes": "{0} นาที", - "hour": "1 ชั่วโมง", - "hours": "{0} ชั่วโมง", - "day": "1 วัน", - "days": "{0} วัน", - "month": "1 เดือน", - "months": "{0} เดือน", - "year": "1 ปี", - "years": "{0} ปี", - } - - month_names = [ - "", - "มกราคม", - "กุมภาพันธ์", - "มีนาคม", - "เมษายน", - "พฤษภาคม", - "มิถุนายน", - "กรกฎาคม", - "สิงหาคม", - "กันยายน", - "ตุลาคม", - "พฤศจิกายน", - "ธันวาคม", - ] - month_abbreviations = [ - "", - "ม.ค.", - "ก.พ.", - "มี.ค.", - "เม.ย.", - "พ.ค.", - "มิ.ย.", - "ก.ค.", - "ส.ค.", - "ก.ย.", - "ต.ค.", - "พ.ย.", - "ธ.ค.", - ] - - day_names = ["", "จันทร์", "อังคาร", "พุธ", "พฤหัสบดี", "ศุกร์", "เสาร์", "อาทิตย์"] - day_abbreviations = ["", "จ", "อ", "พ", "พฤ", "ศ", "ส", "อา"] - - meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} - - BE_OFFSET = 543 - - def year_full(self, year): - """Thai always use Buddhist Era (BE) which is CE + 543""" - year += self.BE_OFFSET - return "{:04d}".format(year) - - def year_abbreviation(self, year): - """Thai always use Buddhist Era (BE) which is CE + 543""" - year += self.BE_OFFSET - return "{:04d}".format(year)[2:] - - def _format_relative(self, humanized, timeframe, delta): - """Thai normally doesn't have any space between words""" - if timeframe == "now": - return humanized - space = "" if timeframe == "seconds" else " " - direction = self.past if delta < 0 else self.future - - return direction.format(humanized, space) - - -class BengaliLocale(Locale): - - names = ["bn", "bn_bd", "bn_in"] - - past = "{0} আগে" - future = "{0} পরে" - - timeframes = { - "now": "এখন", - "second": "একটি দ্বিতীয়", - "seconds": "{0} সেকেন্ড", - "minute": "এক মিনিট", - "minutes": "{0} মিনিট", - "hour": "এক ঘণ্টা", - "hours": "{0} ঘণ্টা", - "day": "এক দিন", - "days": "{0} দিন", - "month": "এক মাস", - "months": "{0} মাস ", - "year": "এক বছর", - "years": "{0} বছর", - } - - meridians = {"am": "সকাল", "pm": "বিকাল", "AM": "সকাল", "PM": "বিকাল"} - - month_names = [ - "", - "জানুয়ারি", - "ফেব্রুয়ারি", - "মার্চ", - "এপ্রিল", - "মে", - "জুন", - "জুলাই", - "আগস্ট", - "সেপ্টেম্বর", - "অক্টোবর", - "নভেম্বর", - "ডিসেম্বর", - ] - month_abbreviations = [ - "", - "জানু", - "ফেব", - "মার্চ", - "এপ্রি", - "মে", - "জুন", - "জুল", - "অগা", - "সেপ্ট", - "অক্টো", - "নভে", - "ডিসে", - ] - - day_names = [ - "", - "সোমবার", - "মঙ্গলবার", - "বুধবার", - "বৃহস্পতিবার", - "শুক্রবার", - "শনিবার", - "রবিবার", - ] - day_abbreviations = ["", "সোম", "মঙ্গল", "বুধ", "বৃহঃ", "শুক্র", "শনি", "রবি"] - - def _ordinal_number(self, n): - if n > 10 or n == 0: - return "{}তম".format(n) - if n in [1, 5, 7, 8, 9, 10]: - return "{}ম".format(n) - if n in [2, 3]: - return "{}য়".format(n) - if n == 4: - return "{}র্থ".format(n) - if n == 6: - return "{}ষ্ঠ".format(n) - - -class RomanshLocale(Locale): - - names = ["rm", "rm_ch"] - - past = "avant {0}" - future = "en {0}" - - timeframes = { - "now": "en quest mument", - "second": "in secunda", - "seconds": "{0} secundas", - "minute": "ina minuta", - "minutes": "{0} minutas", - "hour": "in'ura", - "hours": "{0} ura", - "day": "in di", - "days": "{0} dis", - "month": "in mais", - "months": "{0} mais", - "year": "in onn", - "years": "{0} onns", - } - - month_names = [ - "", - "schaner", - "favrer", - "mars", - "avrigl", - "matg", - "zercladur", - "fanadur", - "avust", - "settember", - "october", - "november", - "december", - ] - - month_abbreviations = [ - "", - "schan", - "fav", - "mars", - "avr", - "matg", - "zer", - "fan", - "avu", - "set", - "oct", - "nov", - "dec", - ] - - day_names = [ - "", - "glindesdi", - "mardi", - "mesemna", - "gievgia", - "venderdi", - "sonda", - "dumengia", - ] - - day_abbreviations = ["", "gli", "ma", "me", "gie", "ve", "so", "du"] - - -class RomanianLocale(Locale): - names = ["ro", "ro_ro"] - - past = "{0} în urmă" - future = "peste {0}" - and_word = "și" - - timeframes = { - "now": "acum", - "second": "o secunda", - "seconds": "{0} câteva secunde", - "minute": "un minut", - "minutes": "{0} minute", - "hour": "o oră", - "hours": "{0} ore", - "day": "o zi", - "days": "{0} zile", - "month": "o lună", - "months": "{0} luni", - "year": "un an", - "years": "{0} ani", - } - - month_names = [ - "", - "ianuarie", - "februarie", - "martie", - "aprilie", - "mai", - "iunie", - "iulie", - "august", - "septembrie", - "octombrie", - "noiembrie", - "decembrie", - ] - month_abbreviations = [ - "", - "ian", - "febr", - "mart", - "apr", - "mai", - "iun", - "iul", - "aug", - "sept", - "oct", - "nov", - "dec", - ] - - day_names = [ - "", - "luni", - "marți", - "miercuri", - "joi", - "vineri", - "sâmbătă", - "duminică", - ] - day_abbreviations = ["", "Lun", "Mar", "Mie", "Joi", "Vin", "Sâm", "Dum"] - - -class SlovenianLocale(Locale): - names = ["sl", "sl_si"] - - past = "pred {0}" - future = "čez {0}" - and_word = "in" - - timeframes = { - "now": "zdaj", - "second": "sekundo", - "seconds": "{0} sekund", - "minute": "minuta", - "minutes": "{0} minutami", - "hour": "uro", - "hours": "{0} ur", - "day": "dan", - "days": "{0} dni", - "month": "mesec", - "months": "{0} mesecev", - "year": "leto", - "years": "{0} let", - } - - meridians = {"am": "", "pm": "", "AM": "", "PM": ""} - - month_names = [ - "", - "Januar", - "Februar", - "Marec", - "April", - "Maj", - "Junij", - "Julij", - "Avgust", - "September", - "Oktober", - "November", - "December", - ] - - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "Maj", - "Jun", - "Jul", - "Avg", - "Sep", - "Okt", - "Nov", - "Dec", - ] - - day_names = [ - "", - "Ponedeljek", - "Torek", - "Sreda", - "Četrtek", - "Petek", - "Sobota", - "Nedelja", - ] - - day_abbreviations = ["", "Pon", "Tor", "Sre", "Čet", "Pet", "Sob", "Ned"] - - -class IndonesianLocale(Locale): - - names = ["id", "id_id"] - - past = "{0} yang lalu" - future = "dalam {0}" - and_word = "dan" - - timeframes = { - "now": "baru saja", - "second": "1 sebentar", - "seconds": "{0} detik", - "minute": "1 menit", - "minutes": "{0} menit", - "hour": "1 jam", - "hours": "{0} jam", - "day": "1 hari", - "days": "{0} hari", - "month": "1 bulan", - "months": "{0} bulan", - "year": "1 tahun", - "years": "{0} tahun", - } - - meridians = {"am": "", "pm": "", "AM": "", "PM": ""} - - month_names = [ - "", - "Januari", - "Februari", - "Maret", - "April", - "Mei", - "Juni", - "Juli", - "Agustus", - "September", - "Oktober", - "November", - "Desember", - ] - - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "Mei", - "Jun", - "Jul", - "Ags", - "Sept", - "Okt", - "Nov", - "Des", - ] - - day_names = ["", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu", "Minggu"] - - day_abbreviations = [ - "", - "Senin", - "Selasa", - "Rabu", - "Kamis", - "Jumat", - "Sabtu", - "Minggu", - ] - - -class NepaliLocale(Locale): - names = ["ne", "ne_np"] - - past = "{0} पहिले" - future = "{0} पछी" - - timeframes = { - "now": "अहिले", - "second": "एक सेकेन्ड", - "seconds": "{0} सेकण्ड", - "minute": "मिनेट", - "minutes": "{0} मिनेट", - "hour": "एक घण्टा", - "hours": "{0} घण्टा", - "day": "एक दिन", - "days": "{0} दिन", - "month": "एक महिना", - "months": "{0} महिना", - "year": "एक बर्ष", - "years": "बर्ष", - } - - meridians = {"am": "पूर्वाह्न", "pm": "अपरान्ह", "AM": "पूर्वाह्न", "PM": "अपरान्ह"} - - month_names = [ - "", - "जनवरी", - "फेब्रुअरी", - "मार्च", - "एप्रील", - "मे", - "जुन", - "जुलाई", - "अगष्ट", - "सेप्टेम्बर", - "अक्टोबर", - "नोवेम्बर", - "डिसेम्बर", - ] - month_abbreviations = [ - "", - "जन", - "फेब", - "मार्च", - "एप्रील", - "मे", - "जुन", - "जुलाई", - "अग", - "सेप", - "अक्ट", - "नोव", - "डिस", - ] - - day_names = [ - "", - "सोमवार", - "मंगलवार", - "बुधवार", - "बिहिवार", - "शुक्रवार", - "शनिवार", - "आइतवार", - ] - - day_abbreviations = ["", "सोम", "मंगल", "बुध", "बिहि", "शुक्र", "शनि", "आइत"] - - -class EstonianLocale(Locale): - names = ["ee", "et"] - - past = "{0} tagasi" - future = "{0} pärast" - and_word = "ja" - - timeframes = { - "now": {"past": "just nüüd", "future": "just nüüd"}, - "second": {"past": "üks sekund", "future": "ühe sekundi"}, - "seconds": {"past": "{0} sekundit", "future": "{0} sekundi"}, - "minute": {"past": "üks minut", "future": "ühe minuti"}, - "minutes": {"past": "{0} minutit", "future": "{0} minuti"}, - "hour": {"past": "tund aega", "future": "tunni aja"}, - "hours": {"past": "{0} tundi", "future": "{0} tunni"}, - "day": {"past": "üks päev", "future": "ühe päeva"}, - "days": {"past": "{0} päeva", "future": "{0} päeva"}, - "month": {"past": "üks kuu", "future": "ühe kuu"}, - "months": {"past": "{0} kuud", "future": "{0} kuu"}, - "year": {"past": "üks aasta", "future": "ühe aasta"}, - "years": {"past": "{0} aastat", "future": "{0} aasta"}, - } - - month_names = [ - "", - "Jaanuar", - "Veebruar", - "Märts", - "Aprill", - "Mai", - "Juuni", - "Juuli", - "August", - "September", - "Oktoober", - "November", - "Detsember", - ] - month_abbreviations = [ - "", - "Jan", - "Veb", - "Mär", - "Apr", - "Mai", - "Jun", - "Jul", - "Aug", - "Sep", - "Okt", - "Nov", - "Dets", - ] - - day_names = [ - "", - "Esmaspäev", - "Teisipäev", - "Kolmapäev", - "Neljapäev", - "Reede", - "Laupäev", - "Pühapäev", - ] - day_abbreviations = ["", "Esm", "Teis", "Kolm", "Nelj", "Re", "Lau", "Püh"] - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - if delta > 0: - form = form["future"] - else: - form = form["past"] - return form.format(abs(delta)) - - -class SwahiliLocale(Locale): - - names = [ - "sw", - "sw_ke", - "sw_tz", - ] - - past = "{0} iliyopita" - future = "muda wa {0}" - and_word = "na" - - timeframes = { - "now": "sasa hivi", - "second": "sekunde", - "seconds": "sekunde {0}", - "minute": "dakika moja", - "minutes": "dakika {0}", - "hour": "saa moja", - "hours": "saa {0}", - "day": "siku moja", - "days": "siku {0}", - "week": "wiki moja", - "weeks": "wiki {0}", - "month": "mwezi moja", - "months": "miezi {0}", - "year": "mwaka moja", - "years": "miaka {0}", - } - - meridians = {"am": "asu", "pm": "mch", "AM": "ASU", "PM": "MCH"} - - month_names = [ - "", - "Januari", - "Februari", - "Machi", - "Aprili", - "Mei", - "Juni", - "Julai", - "Agosti", - "Septemba", - "Oktoba", - "Novemba", - "Desemba", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mac", - "Apr", - "Mei", - "Jun", - "Jul", - "Ago", - "Sep", - "Okt", - "Nov", - "Des", - ] - - day_names = [ - "", - "Jumatatu", - "Jumanne", - "Jumatano", - "Alhamisi", - "Ijumaa", - "Jumamosi", - "Jumapili", - ] - day_abbreviations = [ - "", - "Jumatatu", - "Jumanne", - "Jumatano", - "Alhamisi", - "Ijumaa", - "Jumamosi", - "Jumapili", - ] - - -_locales = _map_locales() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py deleted file mode 100644 index 243fd1721c4..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py +++ /dev/null @@ -1,596 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import re -from datetime import datetime, timedelta - -from dateutil import tz - -from arrow import locales -from arrow.util import iso_to_gregorian, next_weekday, normalize_timestamp - -try: - from functools import lru_cache -except ImportError: # pragma: no cover - from backports.functools_lru_cache import lru_cache # pragma: no cover - - -class ParserError(ValueError): - pass - - -# Allows for ParserErrors to be propagated from _build_datetime() -# when day_of_year errors occur. -# Before this, the ParserErrors were caught by the try/except in -# _parse_multiformat() and the appropriate error message was not -# transmitted to the user. -class ParserMatchError(ParserError): - pass - - -class DateTimeParser(object): - - _FORMAT_RE = re.compile( - r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)" - ) - _ESCAPE_RE = re.compile(r"\[[^\[\]]*\]") - - _ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}") - _ONE_OR_TWO_OR_THREE_DIGIT_RE = re.compile(r"\d{1,3}") - _ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+") - _TWO_DIGIT_RE = re.compile(r"\d{2}") - _THREE_DIGIT_RE = re.compile(r"\d{3}") - _FOUR_DIGIT_RE = re.compile(r"\d{4}") - _TZ_Z_RE = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z") - _TZ_ZZ_RE = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z") - _TZ_NAME_RE = re.compile(r"\w[\w+\-/]+") - # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will - # break cases like "15 Jul 2000" and a format list (see issue #447) - _TIMESTAMP_RE = re.compile(r"^\-?\d+\.?\d+$") - _TIMESTAMP_EXPANDED_RE = re.compile(r"^\-?\d+$") - _TIME_RE = re.compile(r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$") - _WEEK_DATE_RE = re.compile(r"(?P\d{4})[\-]?W(?P\d{2})[\-]?(?P\d)?") - - _BASE_INPUT_RE_MAP = { - "YYYY": _FOUR_DIGIT_RE, - "YY": _TWO_DIGIT_RE, - "MM": _TWO_DIGIT_RE, - "M": _ONE_OR_TWO_DIGIT_RE, - "DDDD": _THREE_DIGIT_RE, - "DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE, - "DD": _TWO_DIGIT_RE, - "D": _ONE_OR_TWO_DIGIT_RE, - "HH": _TWO_DIGIT_RE, - "H": _ONE_OR_TWO_DIGIT_RE, - "hh": _TWO_DIGIT_RE, - "h": _ONE_OR_TWO_DIGIT_RE, - "mm": _TWO_DIGIT_RE, - "m": _ONE_OR_TWO_DIGIT_RE, - "ss": _TWO_DIGIT_RE, - "s": _ONE_OR_TWO_DIGIT_RE, - "X": _TIMESTAMP_RE, - "x": _TIMESTAMP_EXPANDED_RE, - "ZZZ": _TZ_NAME_RE, - "ZZ": _TZ_ZZ_RE, - "Z": _TZ_Z_RE, - "S": _ONE_OR_MORE_DIGIT_RE, - "W": _WEEK_DATE_RE, - } - - SEPARATORS = ["-", "/", "."] - - def __init__(self, locale="en_us", cache_size=0): - - self.locale = locales.get_locale(locale) - self._input_re_map = self._BASE_INPUT_RE_MAP.copy() - self._input_re_map.update( - { - "MMMM": self._generate_choice_re( - self.locale.month_names[1:], re.IGNORECASE - ), - "MMM": self._generate_choice_re( - self.locale.month_abbreviations[1:], re.IGNORECASE - ), - "Do": re.compile(self.locale.ordinal_day_re), - "dddd": self._generate_choice_re( - self.locale.day_names[1:], re.IGNORECASE - ), - "ddd": self._generate_choice_re( - self.locale.day_abbreviations[1:], re.IGNORECASE - ), - "d": re.compile(r"[1-7]"), - "a": self._generate_choice_re( - (self.locale.meridians["am"], self.locale.meridians["pm"]) - ), - # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to - # ensure backwards compatibility of this token - "A": self._generate_choice_re(self.locale.meridians.values()), - } - ) - if cache_size > 0: - self._generate_pattern_re = lru_cache(maxsize=cache_size)( - self._generate_pattern_re - ) - - # TODO: since we support more than ISO 8601, we should rename this function - # IDEA: break into multiple functions - def parse_iso(self, datetime_string, normalize_whitespace=False): - - if normalize_whitespace: - datetime_string = re.sub(r"\s+", " ", datetime_string.strip()) - - has_space_divider = " " in datetime_string - has_t_divider = "T" in datetime_string - - num_spaces = datetime_string.count(" ") - if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0: - raise ParserError( - "Expected an ISO 8601-like string, but was given '{}'. Try passing in a format string to resolve this.".format( - datetime_string - ) - ) - - has_time = has_space_divider or has_t_divider - has_tz = False - - # date formats (ISO 8601 and others) to test against - # NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used) - formats = [ - "YYYY-MM-DD", - "YYYY-M-DD", - "YYYY-M-D", - "YYYY/MM/DD", - "YYYY/M/DD", - "YYYY/M/D", - "YYYY.MM.DD", - "YYYY.M.DD", - "YYYY.M.D", - "YYYYMMDD", - "YYYY-DDDD", - "YYYYDDDD", - "YYYY-MM", - "YYYY/MM", - "YYYY.MM", - "YYYY", - "W", - ] - - if has_time: - - if has_space_divider: - date_string, time_string = datetime_string.split(" ", 1) - else: - date_string, time_string = datetime_string.split("T", 1) - - time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE) - - time_components = self._TIME_RE.match(time_parts[0]) - - if time_components is None: - raise ParserError( - "Invalid time component provided. Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format." - ) - - ( - hours, - minutes, - seconds, - subseconds_sep, - subseconds, - ) = time_components.groups() - - has_tz = len(time_parts) == 2 - has_minutes = minutes is not None - has_seconds = seconds is not None - has_subseconds = subseconds is not None - - is_basic_time_format = ":" not in time_parts[0] - tz_format = "Z" - - # use 'ZZ' token instead since tz offset is present in non-basic format - if has_tz and ":" in time_parts[1]: - tz_format = "ZZ" - - time_sep = "" if is_basic_time_format else ":" - - if has_subseconds: - time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format( - time_sep=time_sep, subseconds_sep=subseconds_sep - ) - elif has_seconds: - time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep) - elif has_minutes: - time_string = "HH{time_sep}mm".format(time_sep=time_sep) - else: - time_string = "HH" - - if has_space_divider: - formats = ["{} {}".format(f, time_string) for f in formats] - else: - formats = ["{}T{}".format(f, time_string) for f in formats] - - if has_time and has_tz: - # Add "Z" or "ZZ" to the format strings to indicate to - # _parse_token() that a timezone needs to be parsed - formats = ["{}{}".format(f, tz_format) for f in formats] - - return self._parse_multiformat(datetime_string, formats) - - def parse(self, datetime_string, fmt, normalize_whitespace=False): - - if normalize_whitespace: - datetime_string = re.sub(r"\s+", " ", datetime_string) - - if isinstance(fmt, list): - return self._parse_multiformat(datetime_string, fmt) - - fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt) - - match = fmt_pattern_re.search(datetime_string) - - if match is None: - raise ParserMatchError( - "Failed to match '{}' when parsing '{}'".format(fmt, datetime_string) - ) - - parts = {} - for token in fmt_tokens: - if token == "Do": - value = match.group("value") - elif token == "W": - value = (match.group("year"), match.group("week"), match.group("day")) - else: - value = match.group(token) - self._parse_token(token, value, parts) - - return self._build_datetime(parts) - - def _generate_pattern_re(self, fmt): - - # fmt is a string of tokens like 'YYYY-MM-DD' - # we construct a new string by replacing each - # token by its pattern: - # 'YYYY-MM-DD' -> '(?P\d{4})-(?P\d{2})-(?P
\d{2})' - tokens = [] - offset = 0 - - # Escape all special RegEx chars - escaped_fmt = re.escape(fmt) - - # Extract the bracketed expressions to be reinserted later. - escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt) - - # Any number of S is the same as one. - # TODO: allow users to specify the number of digits to parse - escaped_fmt = re.sub(r"S+", "S", escaped_fmt) - - escaped_data = re.findall(self._ESCAPE_RE, fmt) - - fmt_pattern = escaped_fmt - - for m in self._FORMAT_RE.finditer(escaped_fmt): - token = m.group(0) - try: - input_re = self._input_re_map[token] - except KeyError: - raise ParserError("Unrecognized token '{}'".format(token)) - input_pattern = "(?P<{}>{})".format(token, input_re.pattern) - tokens.append(token) - # a pattern doesn't have the same length as the token - # it replaces! We keep the difference in the offset variable. - # This works because the string is scanned left-to-right and matches - # are returned in the order found by finditer. - fmt_pattern = ( - fmt_pattern[: m.start() + offset] - + input_pattern - + fmt_pattern[m.end() + offset :] - ) - offset += len(input_pattern) - (m.end() - m.start()) - - final_fmt_pattern = "" - split_fmt = fmt_pattern.split(r"\#") - - # Due to the way Python splits, 'split_fmt' will always be longer - for i in range(len(split_fmt)): - final_fmt_pattern += split_fmt[i] - if i < len(escaped_data): - final_fmt_pattern += escaped_data[i][1:-1] - - # Wrap final_fmt_pattern in a custom word boundary to strictly - # match the formatting pattern and filter out date and time formats - # that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah, - # blah1998-09-12blah. The custom word boundary matches every character - # that is not a whitespace character to allow for searching for a date - # and time string in a natural language sentence. Therefore, searching - # for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will - # work properly. - # Certain punctuation before or after the target pattern such as - # "1998-09-12," is permitted. For the full list of valid punctuation, - # see the documentation. - - starting_word_boundary = ( - r"(?\s])" # This is the list of punctuation that is ok before the pattern (i.e. "It can't not be these characters before the pattern") - r"(\b|^)" # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a negative number through i.e. before epoch numbers - ) - ending_word_boundary = ( - r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks can appear after the pattern at most 1 time - r"(?!\S))" # Don't allow any non-whitespace character after the punctuation - ) - bounded_fmt_pattern = r"{}{}{}".format( - starting_word_boundary, final_fmt_pattern, ending_word_boundary - ) - - return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE) - - def _parse_token(self, token, value, parts): - - if token == "YYYY": - parts["year"] = int(value) - - elif token == "YY": - value = int(value) - parts["year"] = 1900 + value if value > 68 else 2000 + value - - elif token in ["MMMM", "MMM"]: - parts["month"] = self.locale.month_number(value.lower()) - - elif token in ["MM", "M"]: - parts["month"] = int(value) - - elif token in ["DDDD", "DDD"]: - parts["day_of_year"] = int(value) - - elif token in ["DD", "D"]: - parts["day"] = int(value) - - elif token == "Do": - parts["day"] = int(value) - - elif token == "dddd": - # locale day names are 1-indexed - day_of_week = [x.lower() for x in self.locale.day_names].index( - value.lower() - ) - parts["day_of_week"] = day_of_week - 1 - - elif token == "ddd": - # locale day abbreviations are 1-indexed - day_of_week = [x.lower() for x in self.locale.day_abbreviations].index( - value.lower() - ) - parts["day_of_week"] = day_of_week - 1 - - elif token.upper() in ["HH", "H"]: - parts["hour"] = int(value) - - elif token in ["mm", "m"]: - parts["minute"] = int(value) - - elif token in ["ss", "s"]: - parts["second"] = int(value) - - elif token == "S": - # We have the *most significant* digits of an arbitrary-precision integer. - # We want the six most significant digits as an integer, rounded. - # IDEA: add nanosecond support somehow? Need datetime support for it first. - value = value.ljust(7, str("0")) - - # floating-point (IEEE-754) defaults to half-to-even rounding - seventh_digit = int(value[6]) - if seventh_digit == 5: - rounding = int(value[5]) % 2 - elif seventh_digit > 5: - rounding = 1 - else: - rounding = 0 - - parts["microsecond"] = int(value[:6]) + rounding - - elif token == "X": - parts["timestamp"] = float(value) - - elif token == "x": - parts["expanded_timestamp"] = int(value) - - elif token in ["ZZZ", "ZZ", "Z"]: - parts["tzinfo"] = TzinfoParser.parse(value) - - elif token in ["a", "A"]: - if value in (self.locale.meridians["am"], self.locale.meridians["AM"]): - parts["am_pm"] = "am" - elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]): - parts["am_pm"] = "pm" - - elif token == "W": - parts["weekdate"] = value - - @staticmethod - def _build_datetime(parts): - - weekdate = parts.get("weekdate") - - if weekdate is not None: - # we can use strptime (%G, %V, %u) in python 3.6 but these tokens aren't available before that - year, week = int(weekdate[0]), int(weekdate[1]) - - if weekdate[2] is not None: - day = int(weekdate[2]) - else: - # day not given, default to 1 - day = 1 - - dt = iso_to_gregorian(year, week, day) - parts["year"] = dt.year - parts["month"] = dt.month - parts["day"] = dt.day - - timestamp = parts.get("timestamp") - - if timestamp is not None: - return datetime.fromtimestamp(timestamp, tz=tz.tzutc()) - - expanded_timestamp = parts.get("expanded_timestamp") - - if expanded_timestamp is not None: - return datetime.fromtimestamp( - normalize_timestamp(expanded_timestamp), - tz=tz.tzutc(), - ) - - day_of_year = parts.get("day_of_year") - - if day_of_year is not None: - year = parts.get("year") - month = parts.get("month") - if year is None: - raise ParserError( - "Year component is required with the DDD and DDDD tokens." - ) - - if month is not None: - raise ParserError( - "Month component is not allowed with the DDD and DDDD tokens." - ) - - date_string = "{}-{}".format(year, day_of_year) - try: - dt = datetime.strptime(date_string, "%Y-%j") - except ValueError: - raise ParserError( - "The provided day of year '{}' is invalid.".format(day_of_year) - ) - - parts["year"] = dt.year - parts["month"] = dt.month - parts["day"] = dt.day - - day_of_week = parts.get("day_of_week") - day = parts.get("day") - - # If day is passed, ignore day of week - if day_of_week is not None and day is None: - year = parts.get("year", 1970) - month = parts.get("month", 1) - day = 1 - - # dddd => first day of week after epoch - # dddd YYYY => first day of week in specified year - # dddd MM YYYY => first day of week in specified year and month - # dddd MM => first day after epoch in specified month - next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week) - parts["year"] = next_weekday_dt.year - parts["month"] = next_weekday_dt.month - parts["day"] = next_weekday_dt.day - - am_pm = parts.get("am_pm") - hour = parts.get("hour", 0) - - if am_pm == "pm" and hour < 12: - hour += 12 - elif am_pm == "am" and hour == 12: - hour = 0 - - # Support for midnight at the end of day - if hour == 24: - if parts.get("minute", 0) != 0: - raise ParserError("Midnight at the end of day must not contain minutes") - if parts.get("second", 0) != 0: - raise ParserError("Midnight at the end of day must not contain seconds") - if parts.get("microsecond", 0) != 0: - raise ParserError( - "Midnight at the end of day must not contain microseconds" - ) - hour = 0 - day_increment = 1 - else: - day_increment = 0 - - # account for rounding up to 1000000 - microsecond = parts.get("microsecond", 0) - if microsecond == 1000000: - microsecond = 0 - second_increment = 1 - else: - second_increment = 0 - - increment = timedelta(days=day_increment, seconds=second_increment) - - return ( - datetime( - year=parts.get("year", 1), - month=parts.get("month", 1), - day=parts.get("day", 1), - hour=hour, - minute=parts.get("minute", 0), - second=parts.get("second", 0), - microsecond=microsecond, - tzinfo=parts.get("tzinfo"), - ) - + increment - ) - - def _parse_multiformat(self, string, formats): - - _datetime = None - - for fmt in formats: - try: - _datetime = self.parse(string, fmt) - break - except ParserMatchError: - pass - - if _datetime is None: - raise ParserError( - "Could not match input '{}' to any of the following formats: {}".format( - string, ", ".join(formats) - ) - ) - - return _datetime - - # generates a capture group of choices separated by an OR operator - @staticmethod - def _generate_choice_re(choices, flags=0): - return re.compile(r"({})".format("|".join(choices)), flags=flags) - - -class TzinfoParser(object): - _TZINFO_RE = re.compile(r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$") - - @classmethod - def parse(cls, tzinfo_string): - - tzinfo = None - - if tzinfo_string == "local": - tzinfo = tz.tzlocal() - - elif tzinfo_string in ["utc", "UTC", "Z"]: - tzinfo = tz.tzutc() - - else: - - iso_match = cls._TZINFO_RE.match(tzinfo_string) - - if iso_match: - sign, hours, minutes = iso_match.groups() - if minutes is None: - minutes = 0 - seconds = int(hours) * 3600 + int(minutes) * 60 - - if sign == "-": - seconds *= -1 - - tzinfo = tz.tzoffset(None, seconds) - - else: - tzinfo = tz.gettz(tzinfo_string) - - if tzinfo is None: - raise ParserError( - 'Could not parse timezone expression "{}"'.format(tzinfo_string) - ) - - return tzinfo diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py deleted file mode 100644 index acce8878df8..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -import datetime -import numbers - -from dateutil.rrule import WEEKLY, rrule - -from arrow.constants import MAX_TIMESTAMP, MAX_TIMESTAMP_MS, MAX_TIMESTAMP_US - - -def next_weekday(start_date, weekday): - """Get next weekday from the specified start date. - - :param start_date: Datetime object representing the start date. - :param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday). - :return: Datetime object corresponding to the next weekday after start_date. - - Usage:: - - # Get first Monday after epoch - >>> next_weekday(datetime(1970, 1, 1), 0) - 1970-01-05 00:00:00 - - # Get first Thursday after epoch - >>> next_weekday(datetime(1970, 1, 1), 3) - 1970-01-01 00:00:00 - - # Get first Sunday after epoch - >>> next_weekday(datetime(1970, 1, 1), 6) - 1970-01-04 00:00:00 - """ - if weekday < 0 or weekday > 6: - raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).") - return rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0] - - -def total_seconds(td): - """Get total seconds for timedelta.""" - return td.total_seconds() - - -def is_timestamp(value): - """Check if value is a valid timestamp.""" - if isinstance(value, bool): - return False - if not ( - isinstance(value, numbers.Integral) - or isinstance(value, float) - or isinstance(value, str) - ): - return False - try: - float(value) - return True - except ValueError: - return False - - -def normalize_timestamp(timestamp): - """Normalize millisecond and microsecond timestamps into normal timestamps.""" - if timestamp > MAX_TIMESTAMP: - if timestamp < MAX_TIMESTAMP_MS: - timestamp /= 1e3 - elif timestamp < MAX_TIMESTAMP_US: - timestamp /= 1e6 - else: - raise ValueError( - "The specified timestamp '{}' is too large.".format(timestamp) - ) - return timestamp - - -# Credit to https://stackoverflow.com/a/1700069 -def iso_to_gregorian(iso_year, iso_week, iso_day): - """Converts an ISO week date tuple into a datetime object.""" - - if not 1 <= iso_week <= 53: - raise ValueError("ISO Calendar week value must be between 1-53.") - - if not 1 <= iso_day <= 7: - raise ValueError("ISO Calendar day value must be between 1-7") - - # The first week of the year always contains 4 Jan. - fourth_jan = datetime.date(iso_year, 1, 4) - delta = datetime.timedelta(fourth_jan.isoweekday() - 1) - year_start = fourth_jan - delta - gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) - - return gregorian - - -def validate_bounds(bounds): - if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]": - raise ValueError( - 'Invalid bounds. Please select between "()", "(]", "[)", or "[]".' - ) - - -# Python 2.7 / 3.0+ definitions for isstr function. - -try: # pragma: no cover - basestring - - def isstr(s): - return isinstance(s, basestring) # noqa: F821 - - -except NameError: # pragma: no cover - - def isstr(s): - return isinstance(s, str) - - -__all__ = ["next_weekday", "total_seconds", "is_timestamp", "isstr", "iso_to_gregorian"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile deleted file mode 100644 index d4bb2cbb9ed..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py deleted file mode 100644 index aaf3c508223..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- - -# -- Path setup -------------------------------------------------------------- - -import io -import os -import sys - -sys.path.insert(0, os.path.abspath("..")) - -about = {} -with io.open("../arrow/_version.py", "r", encoding="utf-8") as f: - exec(f.read(), about) - -# -- Project information ----------------------------------------------------- - -project = u"Arrow 🏹" -copyright = "2020, Chris Smith" -author = "Chris Smith" - -release = about["__version__"] - -# -- General configuration --------------------------------------------------- - -extensions = ["sphinx.ext.autodoc"] - -templates_path = [] - -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -master_doc = "index" -source_suffix = ".rst" -pygments_style = "sphinx" - -language = None - -# -- Options for HTML output ------------------------------------------------- - -html_theme = "alabaster" -html_theme_path = [] -html_static_path = [] - -html_show_sourcelink = False -html_show_sphinx = False -html_show_copyright = True - -# https://alabaster.readthedocs.io/en/latest/customization.html -html_theme_options = { - "description": "Arrow is a sensible and human-friendly approach to dates, times and timestamps.", - "github_user": "arrow-py", - "github_repo": "arrow", - "github_banner": True, - "show_related": False, - "show_powered_by": False, - "github_button": True, - "github_type": "star", - "github_count": "true", # must be a string -} - -html_sidebars = { - "**": ["about.html", "localtoc.html", "relations.html", "searchbox.html"] -} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst deleted file mode 100644 index e2830b04f30..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst +++ /dev/null @@ -1,566 +0,0 @@ -Arrow: Better dates & times for Python -====================================== - -Release v\ |release| (`Installation`_) (`Changelog `_) - -.. include:: ../README.rst - :start-after: start-inclusion-marker-do-not-remove - :end-before: end-inclusion-marker-do-not-remove - -User's Guide ------------- - -Creation -~~~~~~~~ - -Get 'now' easily: - -.. code-block:: python - - >>> arrow.utcnow() - - - >>> arrow.now() - - - >>> arrow.now('US/Pacific') - - -Create from timestamps (:code:`int` or :code:`float`): - -.. code-block:: python - - >>> arrow.get(1367900664) - - - >>> arrow.get(1367900664.152325) - - -Use a naive or timezone-aware datetime, or flexibly specify a timezone: - -.. code-block:: python - - >>> arrow.get(datetime.utcnow()) - - - >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') - - - >>> from dateutil import tz - >>> arrow.get(datetime(2013, 5, 5), tz.gettz('US/Pacific')) - - - >>> arrow.get(datetime.now(tz.gettz('US/Pacific'))) - - -Parse from a string: - -.. code-block:: python - - >>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss') - - -Search a date in a string: - -.. code-block:: python - - >>> arrow.get('June was born in May 1980', 'MMMM YYYY') - - -Some ISO 8601 compliant strings are recognized and parsed without a format string: - - >>> arrow.get('2013-09-30T15:34:00.000-07:00') - - -Arrow objects can be instantiated directly too, with the same arguments as a datetime: - -.. code-block:: python - - >>> arrow.get(2013, 5, 5) - - - >>> arrow.Arrow(2013, 5, 5) - - -Properties -~~~~~~~~~~ - -Get a datetime or timestamp representation: - -.. code-block:: python - - >>> a = arrow.utcnow() - >>> a.datetime - datetime.datetime(2013, 5, 7, 4, 38, 15, 447644, tzinfo=tzutc()) - - >>> a.timestamp - 1367901495 - -Get a naive datetime, and tzinfo: - -.. code-block:: python - - >>> a.naive - datetime.datetime(2013, 5, 7, 4, 38, 15, 447644) - - >>> a.tzinfo - tzutc() - -Get any datetime value: - -.. code-block:: python - - >>> a.year - 2013 - -Call datetime functions that return properties: - -.. code-block:: python - - >>> a.date() - datetime.date(2013, 5, 7) - - >>> a.time() - datetime.time(4, 38, 15, 447644) - -Replace & Shift -~~~~~~~~~~~~~~~ - -Get a new :class:`Arrow ` object, with altered attributes, just as you would with a datetime: - -.. code-block:: python - - >>> arw = arrow.utcnow() - >>> arw - - - >>> arw.replace(hour=4, minute=40) - - -Or, get one with attributes shifted forward or backward: - -.. code-block:: python - - >>> arw.shift(weeks=+3) - - -Even replace the timezone without altering other attributes: - -.. code-block:: python - - >>> arw.replace(tzinfo='US/Pacific') - - -Move between the earlier and later moments of an ambiguous time: - -.. code-block:: python - - >>> paris_transition = arrow.Arrow(2019, 10, 27, 2, tzinfo="Europe/Paris", fold=0) - >>> paris_transition - - >>> paris_transition.ambiguous - True - >>> paris_transition.replace(fold=1) - - -Format -~~~~~~ - -.. code-block:: python - - >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-07 05:23:16 -00:00' - -Convert -~~~~~~~ - -Convert from UTC to other timezones by name or tzinfo: - -.. code-block:: python - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc.to('US/Pacific') - - - >>> utc.to(tz.gettz('US/Pacific')) - - -Or using shorthand: - -.. code-block:: python - - >>> utc.to('local') - - - >>> utc.to('local').to('utc') - - - -Humanize -~~~~~~~~ - -Humanize relative to now: - -.. code-block:: python - - >>> past = arrow.utcnow().shift(hours=-1) - >>> past.humanize() - 'an hour ago' - -Or another Arrow, or datetime: - -.. code-block:: python - - >>> present = arrow.utcnow() - >>> future = present.shift(hours=2) - >>> future.humanize(present) - 'in 2 hours' - -Indicate time as relative or include only the distance - -.. code-block:: python - - >>> present = arrow.utcnow() - >>> future = present.shift(hours=2) - >>> future.humanize(present) - 'in 2 hours' - >>> future.humanize(present, only_distance=True) - '2 hours' - - -Indicate a specific time granularity (or multiple): - -.. code-block:: python - - >>> present = arrow.utcnow() - >>> future = present.shift(minutes=66) - >>> future.humanize(present, granularity="minute") - 'in 66 minutes' - >>> future.humanize(present, granularity=["hour", "minute"]) - 'in an hour and 6 minutes' - >>> present.humanize(future, granularity=["hour", "minute"]) - 'an hour and 6 minutes ago' - >>> future.humanize(present, only_distance=True, granularity=["hour", "minute"]) - 'an hour and 6 minutes' - -Support for a growing number of locales (see ``locales.py`` for supported languages): - -.. code-block:: python - - - >>> future = arrow.utcnow().shift(hours=1) - >>> future.humanize(a, locale='ru') - 'через 2 час(а,ов)' - - -Ranges & Spans -~~~~~~~~~~~~~~ - -Get the time span of any unit: - -.. code-block:: python - - >>> arrow.utcnow().span('hour') - (, ) - -Or just get the floor and ceiling: - -.. code-block:: python - - >>> arrow.utcnow().floor('hour') - - - >>> arrow.utcnow().ceil('hour') - - -You can also get a range of time spans: - -.. code-block:: python - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.span_range('hour', start, end): - ... print r - ... - (, ) - (, ) - (, ) - (, ) - (, ) - -Or just iterate over a range of time: - -.. code-block:: python - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print repr(r) - ... - - - - - - -.. toctree:: - :maxdepth: 2 - -Factories -~~~~~~~~~ - -Use factories to harness Arrow's module API for a custom Arrow-derived type. First, derive your type: - -.. code-block:: python - - >>> class CustomArrow(arrow.Arrow): - ... - ... def days_till_xmas(self): - ... - ... xmas = arrow.Arrow(self.year, 12, 25) - ... - ... if self > xmas: - ... xmas = xmas.shift(years=1) - ... - ... return (xmas - self).days - - -Then get and use a factory for it: - -.. code-block:: python - - >>> factory = arrow.ArrowFactory(CustomArrow) - >>> custom = factory.utcnow() - >>> custom - >>> - - >>> custom.days_till_xmas() - >>> 211 - -Supported Tokens -~~~~~~~~~~~~~~~~ - -Use the following tokens for parsing and formatting. Note that they are **not** the same as the tokens for `strptime `_: - -+--------------------------------+--------------+-------------------------------------------+ -| |Token |Output | -+================================+==============+===========================================+ -|**Year** |YYYY |2000, 2001, 2002 ... 2012, 2013 | -+--------------------------------+--------------+-------------------------------------------+ -| |YY |00, 01, 02 ... 12, 13 | -+--------------------------------+--------------+-------------------------------------------+ -|**Month** |MMMM |January, February, March ... [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |MMM |Jan, Feb, Mar ... [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |MM |01, 02, 03 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -| |M |1, 2, 3 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -|**Day of Year** |DDDD |001, 002, 003 ... 364, 365 | -+--------------------------------+--------------+-------------------------------------------+ -| |DDD |1, 2, 3 ... 364, 365 | -+--------------------------------+--------------+-------------------------------------------+ -|**Day of Month** |DD |01, 02, 03 ... 30, 31 | -+--------------------------------+--------------+-------------------------------------------+ -| |D |1, 2, 3 ... 30, 31 | -+--------------------------------+--------------+-------------------------------------------+ -| |Do |1st, 2nd, 3rd ... 30th, 31st | -+--------------------------------+--------------+-------------------------------------------+ -|**Day of Week** |dddd |Monday, Tuesday, Wednesday ... [#t2]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |ddd |Mon, Tue, Wed ... [#t2]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |d |1, 2, 3 ... 6, 7 | -+--------------------------------+--------------+-------------------------------------------+ -|**ISO week date** |W |2011-W05-4, 2019-W17 | -+--------------------------------+--------------+-------------------------------------------+ -|**Hour** |HH |00, 01, 02 ... 23, 24 | -+--------------------------------+--------------+-------------------------------------------+ -| |H |0, 1, 2 ... 23, 24 | -+--------------------------------+--------------+-------------------------------------------+ -| |hh |01, 02, 03 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -| |h |1, 2, 3 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -|**AM / PM** |A |AM, PM, am, pm [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |a |am, pm [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -|**Minute** |mm |00, 01, 02 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -| |m |0, 1, 2 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -|**Second** |ss |00, 01, 02 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -| |s |0, 1, 2 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -|**Sub-second** |S... |0, 02, 003, 000006, 123123123123... [#t3]_ | -+--------------------------------+--------------+-------------------------------------------+ -|**Timezone** |ZZZ |Asia/Baku, Europe/Warsaw, GMT ... [#t4]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |ZZ |-07:00, -06:00 ... +06:00, +07:00, +08, Z | -+--------------------------------+--------------+-------------------------------------------+ -| |Z |-0700, -0600 ... +0600, +0700, +08, Z | -+--------------------------------+--------------+-------------------------------------------+ -|**Seconds Timestamp** |X |1381685817, 1381685817.915482 ... [#t5]_ | -+--------------------------------+--------------+-------------------------------------------+ -|**ms or µs Timestamp** |x |1569980330813, 1569980330813221 | -+--------------------------------+--------------+-------------------------------------------+ - -.. rubric:: Footnotes - -.. [#t1] localization support for parsing and formatting -.. [#t2] localization support only for formatting -.. [#t3] the result is truncated to microseconds, with `half-to-even rounding `_. -.. [#t4] timezone names from `tz database `_ provided via dateutil package, note that abbreviations such as MST, PDT, BRST are unlikely to parse due to ambiguity. Use the full IANA zone name instead (Asia/Shanghai, Europe/London, America/Chicago etc). -.. [#t5] this token cannot be used for parsing timestamps out of natural language strings due to compatibility reasons - -Built-in Formats -++++++++++++++++ - -There are several formatting standards that are provided as built-in tokens. - -.. code-block:: python - - >>> arw = arrow.utcnow() - >>> arw.format(arrow.FORMAT_ATOM) - '2020-05-27 10:30:35+00:00' - >>> arw.format(arrow.FORMAT_COOKIE) - 'Wednesday, 27-May-2020 10:30:35 UTC' - >>> arw.format(arrow.FORMAT_RSS) - 'Wed, 27 May 2020 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC822) - 'Wed, 27 May 20 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC850) - 'Wednesday, 27-May-20 10:30:35 UTC' - >>> arw.format(arrow.FORMAT_RFC1036) - 'Wed, 27 May 20 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC1123) - 'Wed, 27 May 2020 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC2822) - 'Wed, 27 May 2020 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC3339) - '2020-05-27 10:30:35+00:00' - >>> arw.format(arrow.FORMAT_W3C) - '2020-05-27 10:30:35+00:00' - -Escaping Formats -~~~~~~~~~~~~~~~~ - -Tokens, phrases, and regular expressions in a format string can be escaped when parsing and formatting by enclosing them within square brackets. - -Tokens & Phrases -++++++++++++++++ - -Any `token `_ or phrase can be escaped as follows: - -.. code-block:: python - - >>> fmt = "YYYY-MM-DD h [h] m" - >>> arw = arrow.get("2018-03-09 8 h 40", fmt) - - >>> arw.format(fmt) - '2018-03-09 8 h 40' - - >>> fmt = "YYYY-MM-DD h [hello] m" - >>> arw = arrow.get("2018-03-09 8 hello 40", fmt) - - >>> arw.format(fmt) - '2018-03-09 8 hello 40' - - >>> fmt = "YYYY-MM-DD h [hello world] m" - >>> arw = arrow.get("2018-03-09 8 hello world 40", fmt) - - >>> arw.format(fmt) - '2018-03-09 8 hello world 40' - -This can be useful for parsing dates in different locales such as French, in which it is common to format time strings as "8 h 40" rather than "8:40". - -Regular Expressions -+++++++++++++++++++ - -You can also escape regular expressions by enclosing them within square brackets. In the following example, we are using the regular expression :code:`\s+` to match any number of whitespace characters that separate the tokens. This is useful if you do not know the number of spaces between tokens ahead of time (e.g. in log files). - -.. code-block:: python - - >>> fmt = r"ddd[\s+]MMM[\s+]DD[\s+]HH:mm:ss[\s+]YYYY" - >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) - - - >>> arrow.get("Mon \tSep 08 16:41:45 2014", fmt) - - - >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) - - -Punctuation -~~~~~~~~~~~ - -Date and time formats may be fenced on either side by one punctuation character from the following list: ``, . ; : ? ! " \` ' [ ] { } ( ) < >`` - -.. code-block:: python - - >>> arrow.get("Cool date: 2019-10-31T09:12:45.123456+04:30.", "YYYY-MM-DDTHH:mm:ss.SZZ") - - - >>> arrow.get("Tomorrow (2019-10-31) is Halloween!", "YYYY-MM-DD") - - - >>> arrow.get("Halloween is on 2019.10.31.", "YYYY.MM.DD") - - - >>> arrow.get("It's Halloween tomorrow (2019-10-31)!", "YYYY-MM-DD") - # Raises exception because there are multiple punctuation marks following the date - -Redundant Whitespace -~~~~~~~~~~~~~~~~~~~~ - -Redundant whitespace characters (spaces, tabs, and newlines) can be normalized automatically by passing in the ``normalize_whitespace`` flag to ``arrow.get``: - -.. code-block:: python - - >>> arrow.get('\t \n 2013-05-05T12:30:45.123456 \t \n', normalize_whitespace=True) - - - >>> arrow.get('2013-05-05 T \n 12:30:45\t123456', 'YYYY-MM-DD T HH:mm:ss S', normalize_whitespace=True) - - -API Guide ---------- - -arrow.arrow -~~~~~~~~~~~ - -.. automodule:: arrow.arrow - :members: - -arrow.factory -~~~~~~~~~~~~~ - -.. automodule:: arrow.factory - :members: - -arrow.api -~~~~~~~~~ - -.. automodule:: arrow.api - :members: - -arrow.locale -~~~~~~~~~~~~ - -.. automodule:: arrow.locales - :members: - :undoc-members: - -Release History ---------------- - -.. toctree:: - :maxdepth: 2 - - releases diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat deleted file mode 100644 index 922152e96a0..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst deleted file mode 100644 index 22e1e59c8c7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst +++ /dev/null @@ -1,3 +0,0 @@ -.. _releases: - -.. include:: ../CHANGELOG.rst diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt deleted file mode 100644 index df565d83845..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -backports.functools_lru_cache==1.6.1; python_version == "2.7" -dateparser==0.7.* -pre-commit==1.21.*; python_version <= "3.5" -pre-commit==2.6.*; python_version >= "3.6" -pytest==4.6.*; python_version == "2.7" -pytest==6.0.*; python_version >= "3.5" -pytest-cov==2.10.* -pytest-mock==2.0.*; python_version == "2.7" -pytest-mock==3.2.*; python_version >= "3.5" -python-dateutil==2.8.* -pytz==2019.* -simplejson==3.17.* -sphinx==1.8.*; python_version == "2.7" -sphinx==3.2.*; python_version >= "3.5" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg deleted file mode 100644 index 2a9acf13daa..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -universal = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py deleted file mode 100644 index dc4f0e77d54..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -import io - -from setuptools import setup - -with io.open("README.rst", "r", encoding="utf-8") as f: - readme = f.read() - -about = {} -with io.open("arrow/_version.py", "r", encoding="utf-8") as f: - exec(f.read(), about) - -setup( - name="arrow", - version=about["__version__"], - description="Better dates & times for Python", - long_description=readme, - long_description_content_type="text/x-rst", - url="https://arrow.readthedocs.io", - author="Chris Smith", - author_email="crsmithdev@gmail.com", - license="Apache 2.0", - packages=["arrow"], - zip_safe=False, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", - install_requires=[ - "python-dateutil>=2.7.0", - "backports.functools_lru_cache>=1.2.1;python_version=='2.7'", - ], - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Topic :: Software Development :: Libraries :: Python Modules", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - ], - keywords="arrow date time datetime timestamp timezone humanize", - project_urls={ - "Repository": "https://github.com/arrow-py/arrow", - "Bug Reports": "https://github.com/arrow-py/arrow/issues", - "Documentation": "https://arrow.readthedocs.io", - }, -) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py deleted file mode 100644 index 5bc8a4af2e8..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -from datetime import datetime - -import pytest -from dateutil import tz as dateutil_tz - -from arrow import arrow, factory, formatter, locales, parser - - -@pytest.fixture(scope="class") -def time_utcnow(request): - request.cls.arrow = arrow.Arrow.utcnow() - - -@pytest.fixture(scope="class") -def time_2013_01_01(request): - request.cls.now = arrow.Arrow.utcnow() - request.cls.arrow = arrow.Arrow(2013, 1, 1) - request.cls.datetime = datetime(2013, 1, 1) - - -@pytest.fixture(scope="class") -def time_2013_02_03(request): - request.cls.arrow = arrow.Arrow(2013, 2, 3, 12, 30, 45, 1) - - -@pytest.fixture(scope="class") -def time_2013_02_15(request): - request.cls.datetime = datetime(2013, 2, 15, 3, 41, 22, 8923) - request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) - - -@pytest.fixture(scope="class") -def time_1975_12_25(request): - request.cls.datetime = datetime( - 1975, 12, 25, 14, 15, 16, tzinfo=dateutil_tz.gettz("America/New_York") - ) - request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) - - -@pytest.fixture(scope="class") -def arrow_formatter(request): - request.cls.formatter = formatter.DateTimeFormatter() - - -@pytest.fixture(scope="class") -def arrow_factory(request): - request.cls.factory = factory.ArrowFactory() - - -@pytest.fixture(scope="class") -def lang_locales(request): - request.cls.locales = locales._locales - - -@pytest.fixture(scope="class") -def lang_locale(request): - # As locale test classes are prefixed with Test, we are dynamically getting the locale by the test class name. - # TestEnglishLocale -> EnglishLocale - name = request.cls.__name__[4:] - request.cls.locale = locales.get_locale_by_class_name(name) - - -@pytest.fixture(scope="class") -def dt_parser(request): - request.cls.parser = parser.DateTimeParser() - - -@pytest.fixture(scope="class") -def dt_parser_regex(request): - request.cls.format_regex = parser.DateTimeParser._FORMAT_RE - - -@pytest.fixture(scope="class") -def tzinfo_parser(request): - request.cls.parser = parser.TzinfoParser() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py deleted file mode 100644 index 9b19a27cd97..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -import arrow - - -class TestModule: - def test_get(self, mocker): - mocker.patch("arrow.api._factory.get", return_value="result") - - assert arrow.api.get() == "result" - - def test_utcnow(self, mocker): - mocker.patch("arrow.api._factory.utcnow", return_value="utcnow") - - assert arrow.api.utcnow() == "utcnow" - - def test_now(self, mocker): - mocker.patch("arrow.api._factory.now", tz="tz", return_value="now") - - assert arrow.api.now("tz") == "now" - - def test_factory(self): - class MockCustomArrowClass(arrow.Arrow): - pass - - result = arrow.api.factory(MockCustomArrowClass) - - assert isinstance(result, arrow.factory.ArrowFactory) - assert isinstance(result.utcnow(), MockCustomArrowClass) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py deleted file mode 100644 index b0bd20a5e3a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py +++ /dev/null @@ -1,2150 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import calendar -import pickle -import sys -import time -from datetime import date, datetime, timedelta - -import dateutil -import pytest -import pytz -import simplejson as json -from dateutil import tz -from dateutil.relativedelta import FR, MO, SA, SU, TH, TU, WE - -from arrow import arrow - -from .utils import assert_datetime_equality - - -class TestTestArrowInit: - def test_init_bad_input(self): - - with pytest.raises(TypeError): - arrow.Arrow(2013) - - with pytest.raises(TypeError): - arrow.Arrow(2013, 2) - - with pytest.raises(ValueError): - arrow.Arrow(2013, 2, 2, 12, 30, 45, 9999999) - - def test_init(self): - - result = arrow.Arrow(2013, 2, 2) - self.expected = datetime(2013, 2, 2, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12) - self.expected = datetime(2013, 2, 2, 12, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12, 30) - self.expected = datetime(2013, 2, 2, 12, 30, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12, 30, 45) - self.expected = datetime(2013, 2, 2, 12, 30, 45, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12, 30, 45, 999999) - self.expected = datetime(2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") - ) - self.expected = datetime( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") - ) - assert result._datetime == self.expected - - # regression tests for issue #626 - def test_init_pytz_timezone(self): - - result = arrow.Arrow( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=pytz.timezone("Europe/Paris") - ) - self.expected = datetime( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") - ) - assert result._datetime == self.expected - assert_datetime_equality(result._datetime, self.expected, 1) - - def test_init_with_fold(self): - before = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") - after = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1) - - assert hasattr(before, "fold") - assert hasattr(after, "fold") - - # PEP-495 requires the comparisons below to be true - assert before == after - assert before.utcoffset() != after.utcoffset() - - -class TestTestArrowFactory: - def test_now(self): - - result = arrow.Arrow.now() - - assert_datetime_equality( - result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) - ) - - def test_utcnow(self): - - result = arrow.Arrow.utcnow() - - assert_datetime_equality( - result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - assert result.fold == 0 - - def test_fromtimestamp(self): - - timestamp = time.time() - - result = arrow.Arrow.fromtimestamp(timestamp) - assert_datetime_equality( - result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) - ) - - result = arrow.Arrow.fromtimestamp(timestamp, tzinfo=tz.gettz("Europe/Paris")) - assert_datetime_equality( - result._datetime, - datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), - ) - - result = arrow.Arrow.fromtimestamp(timestamp, tzinfo="Europe/Paris") - assert_datetime_equality( - result._datetime, - datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), - ) - - with pytest.raises(ValueError): - arrow.Arrow.fromtimestamp("invalid timestamp") - - def test_utcfromtimestamp(self): - - timestamp = time.time() - - result = arrow.Arrow.utcfromtimestamp(timestamp) - assert_datetime_equality( - result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - with pytest.raises(ValueError): - arrow.Arrow.utcfromtimestamp("invalid timestamp") - - def test_fromdatetime(self): - - dt = datetime(2013, 2, 3, 12, 30, 45, 1) - - result = arrow.Arrow.fromdatetime(dt) - - assert result._datetime == dt.replace(tzinfo=tz.tzutc()) - - def test_fromdatetime_dt_tzinfo(self): - - dt = datetime(2013, 2, 3, 12, 30, 45, 1, tzinfo=tz.gettz("US/Pacific")) - - result = arrow.Arrow.fromdatetime(dt) - - assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) - - def test_fromdatetime_tzinfo_arg(self): - - dt = datetime(2013, 2, 3, 12, 30, 45, 1) - - result = arrow.Arrow.fromdatetime(dt, tz.gettz("US/Pacific")) - - assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) - - def test_fromdate(self): - - dt = date(2013, 2, 3) - - result = arrow.Arrow.fromdate(dt, tz.gettz("US/Pacific")) - - assert result._datetime == datetime(2013, 2, 3, tzinfo=tz.gettz("US/Pacific")) - - def test_strptime(self): - - formatted = datetime(2013, 2, 3, 12, 30, 45).strftime("%Y-%m-%d %H:%M:%S") - - result = arrow.Arrow.strptime(formatted, "%Y-%m-%d %H:%M:%S") - assert result._datetime == datetime(2013, 2, 3, 12, 30, 45, tzinfo=tz.tzutc()) - - result = arrow.Arrow.strptime( - formatted, "%Y-%m-%d %H:%M:%S", tzinfo=tz.gettz("Europe/Paris") - ) - assert result._datetime == datetime( - 2013, 2, 3, 12, 30, 45, tzinfo=tz.gettz("Europe/Paris") - ) - - -@pytest.mark.usefixtures("time_2013_02_03") -class TestTestArrowRepresentation: - def test_repr(self): - - result = self.arrow.__repr__() - - assert result == "".format(self.arrow._datetime.isoformat()) - - def test_str(self): - - result = self.arrow.__str__() - - assert result == self.arrow._datetime.isoformat() - - def test_hash(self): - - result = self.arrow.__hash__() - - assert result == self.arrow._datetime.__hash__() - - def test_format(self): - - result = "{:YYYY-MM-DD}".format(self.arrow) - - assert result == "2013-02-03" - - def test_bare_format(self): - - result = self.arrow.format() - - assert result == "2013-02-03 12:30:45+00:00" - - def test_format_no_format_string(self): - - result = "{}".format(self.arrow) - - assert result == str(self.arrow) - - def test_clone(self): - - result = self.arrow.clone() - - assert result is not self.arrow - assert result._datetime == self.arrow._datetime - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowAttribute: - def test_getattr_base(self): - - with pytest.raises(AttributeError): - self.arrow.prop - - def test_getattr_week(self): - - assert self.arrow.week == 1 - - def test_getattr_quarter(self): - # start dates - q1 = arrow.Arrow(2013, 1, 1) - q2 = arrow.Arrow(2013, 4, 1) - q3 = arrow.Arrow(2013, 8, 1) - q4 = arrow.Arrow(2013, 10, 1) - assert q1.quarter == 1 - assert q2.quarter == 2 - assert q3.quarter == 3 - assert q4.quarter == 4 - - # end dates - q1 = arrow.Arrow(2013, 3, 31) - q2 = arrow.Arrow(2013, 6, 30) - q3 = arrow.Arrow(2013, 9, 30) - q4 = arrow.Arrow(2013, 12, 31) - assert q1.quarter == 1 - assert q2.quarter == 2 - assert q3.quarter == 3 - assert q4.quarter == 4 - - def test_getattr_dt_value(self): - - assert self.arrow.year == 2013 - - def test_tzinfo(self): - - self.arrow.tzinfo = tz.gettz("PST") - assert self.arrow.tzinfo == tz.gettz("PST") - - def test_naive(self): - - assert self.arrow.naive == self.arrow._datetime.replace(tzinfo=None) - - def test_timestamp(self): - - assert self.arrow.timestamp == calendar.timegm( - self.arrow._datetime.utctimetuple() - ) - - with pytest.warns(DeprecationWarning): - self.arrow.timestamp - - def test_int_timestamp(self): - - assert self.arrow.int_timestamp == calendar.timegm( - self.arrow._datetime.utctimetuple() - ) - - def test_float_timestamp(self): - - result = self.arrow.float_timestamp - self.arrow.timestamp - - assert result == self.arrow.microsecond - - def test_getattr_fold(self): - - # UTC is always unambiguous - assert self.now.fold == 0 - - ambiguous_dt = arrow.Arrow( - 2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1 - ) - assert ambiguous_dt.fold == 1 - - with pytest.raises(AttributeError): - ambiguous_dt.fold = 0 - - def test_getattr_ambiguous(self): - - assert not self.now.ambiguous - - ambiguous_dt = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") - - assert ambiguous_dt.ambiguous - - def test_getattr_imaginary(self): - - assert not self.now.imaginary - - imaginary_dt = arrow.Arrow(2013, 3, 31, 2, 30, tzinfo="Europe/Paris") - - assert imaginary_dt.imaginary - - -@pytest.mark.usefixtures("time_utcnow") -class TestArrowComparison: - def test_eq(self): - - assert self.arrow == self.arrow - assert self.arrow == self.arrow.datetime - assert not (self.arrow == "abc") - - def test_ne(self): - - assert not (self.arrow != self.arrow) - assert not (self.arrow != self.arrow.datetime) - assert self.arrow != "abc" - - def test_gt(self): - - arrow_cmp = self.arrow.shift(minutes=1) - - assert not (self.arrow > self.arrow) - assert not (self.arrow > self.arrow.datetime) - - with pytest.raises(TypeError): - self.arrow > "abc" - - assert self.arrow < arrow_cmp - assert self.arrow < arrow_cmp.datetime - - def test_ge(self): - - with pytest.raises(TypeError): - self.arrow >= "abc" - - assert self.arrow >= self.arrow - assert self.arrow >= self.arrow.datetime - - def test_lt(self): - - arrow_cmp = self.arrow.shift(minutes=1) - - assert not (self.arrow < self.arrow) - assert not (self.arrow < self.arrow.datetime) - - with pytest.raises(TypeError): - self.arrow < "abc" - - assert self.arrow < arrow_cmp - assert self.arrow < arrow_cmp.datetime - - def test_le(self): - - with pytest.raises(TypeError): - self.arrow <= "abc" - - assert self.arrow <= self.arrow - assert self.arrow <= self.arrow.datetime - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowMath: - def test_add_timedelta(self): - - result = self.arrow.__add__(timedelta(days=1)) - - assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) - - def test_add_other(self): - - with pytest.raises(TypeError): - self.arrow + 1 - - def test_radd(self): - - result = self.arrow.__radd__(timedelta(days=1)) - - assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) - - def test_sub_timedelta(self): - - result = self.arrow.__sub__(timedelta(days=1)) - - assert result._datetime == datetime(2012, 12, 31, tzinfo=tz.tzutc()) - - def test_sub_datetime(self): - - result = self.arrow.__sub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) - - assert result == timedelta(days=11) - - def test_sub_arrow(self): - - result = self.arrow.__sub__(arrow.Arrow(2012, 12, 21, tzinfo=tz.tzutc())) - - assert result == timedelta(days=11) - - def test_sub_other(self): - - with pytest.raises(TypeError): - self.arrow - object() - - def test_rsub_datetime(self): - - result = self.arrow.__rsub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) - - assert result == timedelta(days=-11) - - def test_rsub_other(self): - - with pytest.raises(TypeError): - timedelta(days=1) - self.arrow - - -@pytest.mark.usefixtures("time_utcnow") -class TestArrowDatetimeInterface: - def test_date(self): - - result = self.arrow.date() - - assert result == self.arrow._datetime.date() - - def test_time(self): - - result = self.arrow.time() - - assert result == self.arrow._datetime.time() - - def test_timetz(self): - - result = self.arrow.timetz() - - assert result == self.arrow._datetime.timetz() - - def test_astimezone(self): - - other_tz = tz.gettz("US/Pacific") - - result = self.arrow.astimezone(other_tz) - - assert result == self.arrow._datetime.astimezone(other_tz) - - def test_utcoffset(self): - - result = self.arrow.utcoffset() - - assert result == self.arrow._datetime.utcoffset() - - def test_dst(self): - - result = self.arrow.dst() - - assert result == self.arrow._datetime.dst() - - def test_timetuple(self): - - result = self.arrow.timetuple() - - assert result == self.arrow._datetime.timetuple() - - def test_utctimetuple(self): - - result = self.arrow.utctimetuple() - - assert result == self.arrow._datetime.utctimetuple() - - def test_toordinal(self): - - result = self.arrow.toordinal() - - assert result == self.arrow._datetime.toordinal() - - def test_weekday(self): - - result = self.arrow.weekday() - - assert result == self.arrow._datetime.weekday() - - def test_isoweekday(self): - - result = self.arrow.isoweekday() - - assert result == self.arrow._datetime.isoweekday() - - def test_isocalendar(self): - - result = self.arrow.isocalendar() - - assert result == self.arrow._datetime.isocalendar() - - def test_isoformat(self): - - result = self.arrow.isoformat() - - assert result == self.arrow._datetime.isoformat() - - def test_simplejson(self): - - result = json.dumps({"v": self.arrow.for_json()}, for_json=True) - - assert json.loads(result)["v"] == self.arrow._datetime.isoformat() - - def test_ctime(self): - - result = self.arrow.ctime() - - assert result == self.arrow._datetime.ctime() - - def test_strftime(self): - - result = self.arrow.strftime("%Y") - - assert result == self.arrow._datetime.strftime("%Y") - - -class TestArrowFalsePositiveDst: - """These tests relate to issues #376 and #551. - The key points in both issues are that arrow will assign a UTC timezone if none is provided and - .to() will change other attributes to be correct whereas .replace() only changes the specified attribute. - - Issue 376 - >>> arrow.get('2016-11-06').to('America/New_York').ceil('day') - < Arrow [2016-11-05T23:59:59.999999-04:00] > - - Issue 551 - >>> just_before = arrow.get('2018-11-04T01:59:59.999999') - >>> just_before - 2018-11-04T01:59:59.999999+00:00 - >>> just_after = just_before.shift(microseconds=1) - >>> just_after - 2018-11-04T02:00:00+00:00 - >>> just_before_eastern = just_before.replace(tzinfo='US/Eastern') - >>> just_before_eastern - 2018-11-04T01:59:59.999999-04:00 - >>> just_after_eastern = just_after.replace(tzinfo='US/Eastern') - >>> just_after_eastern - 2018-11-04T02:00:00-05:00 - """ - - def test_dst(self): - self.before_1 = arrow.Arrow( - 2016, 11, 6, 3, 59, tzinfo=tz.gettz("America/New_York") - ) - self.before_2 = arrow.Arrow(2016, 11, 6, tzinfo=tz.gettz("America/New_York")) - self.after_1 = arrow.Arrow(2016, 11, 6, 4, tzinfo=tz.gettz("America/New_York")) - self.after_2 = arrow.Arrow( - 2016, 11, 6, 23, 59, tzinfo=tz.gettz("America/New_York") - ) - self.before_3 = arrow.Arrow( - 2018, 11, 4, 3, 59, tzinfo=tz.gettz("America/New_York") - ) - self.before_4 = arrow.Arrow(2018, 11, 4, tzinfo=tz.gettz("America/New_York")) - self.after_3 = arrow.Arrow(2018, 11, 4, 4, tzinfo=tz.gettz("America/New_York")) - self.after_4 = arrow.Arrow( - 2018, 11, 4, 23, 59, tzinfo=tz.gettz("America/New_York") - ) - assert self.before_1.day == self.before_2.day - assert self.after_1.day == self.after_2.day - assert self.before_3.day == self.before_4.day - assert self.after_3.day == self.after_4.day - - -class TestArrowConversion: - def test_to(self): - - dt_from = datetime.now() - arrow_from = arrow.Arrow.fromdatetime(dt_from, tz.gettz("US/Pacific")) - - self.expected = dt_from.replace(tzinfo=tz.gettz("US/Pacific")).astimezone( - tz.tzutc() - ) - - assert arrow_from.to("UTC").datetime == self.expected - assert arrow_from.to(tz.tzutc()).datetime == self.expected - - # issue #368 - def test_to_pacific_then_utc(self): - result = arrow.Arrow(2018, 11, 4, 1, tzinfo="-08:00").to("US/Pacific").to("UTC") - assert result == arrow.Arrow(2018, 11, 4, 9) - - # issue #368 - def test_to_amsterdam_then_utc(self): - result = arrow.Arrow(2016, 10, 30).to("Europe/Amsterdam") - assert result.utcoffset() == timedelta(seconds=7200) - - # regression test for #690 - def test_to_israel_same_offset(self): - - result = arrow.Arrow(2019, 10, 27, 2, 21, 1, tzinfo="+03:00").to("Israel") - expected = arrow.Arrow(2019, 10, 27, 1, 21, 1, tzinfo="Israel") - - assert result == expected - assert result.utcoffset() != expected.utcoffset() - - # issue 315 - def test_anchorage_dst(self): - before = arrow.Arrow(2016, 3, 13, 1, 59, tzinfo="America/Anchorage") - after = arrow.Arrow(2016, 3, 13, 2, 1, tzinfo="America/Anchorage") - - assert before.utcoffset() != after.utcoffset() - - # issue 476 - def test_chicago_fall(self): - - result = arrow.Arrow(2017, 11, 5, 2, 1, tzinfo="-05:00").to("America/Chicago") - expected = arrow.Arrow(2017, 11, 5, 1, 1, tzinfo="America/Chicago") - - assert result == expected - assert result.utcoffset() != expected.utcoffset() - - def test_toronto_gap(self): - - before = arrow.Arrow(2011, 3, 13, 6, 30, tzinfo="UTC").to("America/Toronto") - after = arrow.Arrow(2011, 3, 13, 7, 30, tzinfo="UTC").to("America/Toronto") - - assert before.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 1, 30) - assert after.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 3, 30) - - assert before.utcoffset() != after.utcoffset() - - def test_sydney_gap(self): - - before = arrow.Arrow(2012, 10, 6, 15, 30, tzinfo="UTC").to("Australia/Sydney") - after = arrow.Arrow(2012, 10, 6, 16, 30, tzinfo="UTC").to("Australia/Sydney") - - assert before.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 1, 30) - assert after.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 3, 30) - - assert before.utcoffset() != after.utcoffset() - - -class TestArrowPickling: - def test_pickle_and_unpickle(self): - - dt = arrow.Arrow.utcnow() - - pickled = pickle.dumps(dt) - - unpickled = pickle.loads(pickled) - - assert unpickled == dt - - -class TestArrowReplace: - def test_not_attr(self): - - with pytest.raises(AttributeError): - arrow.Arrow.utcnow().replace(abc=1) - - def test_replace(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.replace(year=2012) == arrow.Arrow(2012, 5, 5, 12, 30, 45) - assert arw.replace(month=1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) - assert arw.replace(day=1) == arrow.Arrow(2013, 5, 1, 12, 30, 45) - assert arw.replace(hour=1) == arrow.Arrow(2013, 5, 5, 1, 30, 45) - assert arw.replace(minute=1) == arrow.Arrow(2013, 5, 5, 12, 1, 45) - assert arw.replace(second=1) == arrow.Arrow(2013, 5, 5, 12, 30, 1) - - def test_replace_tzinfo(self): - - arw = arrow.Arrow.utcnow().to("US/Eastern") - - result = arw.replace(tzinfo=tz.gettz("US/Pacific")) - - assert result == arw.datetime.replace(tzinfo=tz.gettz("US/Pacific")) - - def test_replace_fold(self): - - before = arrow.Arrow(2017, 11, 5, 1, tzinfo="America/New_York") - after = before.replace(fold=1) - - assert before.fold == 0 - assert after.fold == 1 - assert before == after - assert before.utcoffset() != after.utcoffset() - - def test_replace_fold_and_other(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.replace(fold=1, minute=50) == arrow.Arrow(2013, 5, 5, 12, 50, 45) - assert arw.replace(minute=50, fold=1) == arrow.Arrow(2013, 5, 5, 12, 50, 45) - - def test_replace_week(self): - - with pytest.raises(AttributeError): - arrow.Arrow.utcnow().replace(week=1) - - def test_replace_quarter(self): - - with pytest.raises(AttributeError): - arrow.Arrow.utcnow().replace(quarter=1) - - def test_replace_quarter_and_fold(self): - with pytest.raises(AttributeError): - arrow.utcnow().replace(fold=1, quarter=1) - - with pytest.raises(AttributeError): - arrow.utcnow().replace(quarter=1, fold=1) - - def test_replace_other_kwargs(self): - - with pytest.raises(AttributeError): - arrow.utcnow().replace(abc="def") - - -class TestArrowShift: - def test_not_attr(self): - - now = arrow.Arrow.utcnow() - - with pytest.raises(AttributeError): - now.shift(abc=1) - - with pytest.raises(AttributeError): - now.shift(week=1) - - def test_shift(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.shift(years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) - assert arw.shift(quarters=1) == arrow.Arrow(2013, 8, 5, 12, 30, 45) - assert arw.shift(quarters=1, months=1) == arrow.Arrow(2013, 9, 5, 12, 30, 45) - assert arw.shift(months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) - assert arw.shift(weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) - assert arw.shift(days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) - assert arw.shift(minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) - assert arw.shift(seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) - assert arw.shift(microseconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 45, 1) - - # Remember: Python's weekday 0 is Monday - assert arw.shift(weekday=0) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=1) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=2) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=3) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=4) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=5) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=6) == arw - - with pytest.raises(IndexError): - arw.shift(weekday=7) - - # Use dateutil.relativedelta's convenient day instances - assert arw.shift(weekday=MO) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=MO(0)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=MO(1)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=MO(2)) == arrow.Arrow(2013, 5, 13, 12, 30, 45) - assert arw.shift(weekday=TU) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=TU(0)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=TU(1)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=TU(2)) == arrow.Arrow(2013, 5, 14, 12, 30, 45) - assert arw.shift(weekday=WE) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=WE(0)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=WE(1)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=WE(2)) == arrow.Arrow(2013, 5, 15, 12, 30, 45) - assert arw.shift(weekday=TH) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=TH(0)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=TH(1)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=TH(2)) == arrow.Arrow(2013, 5, 16, 12, 30, 45) - assert arw.shift(weekday=FR) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=FR(0)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=FR(1)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=FR(2)) == arrow.Arrow(2013, 5, 17, 12, 30, 45) - assert arw.shift(weekday=SA) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=SA(0)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=SA(1)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=SA(2)) == arrow.Arrow(2013, 5, 18, 12, 30, 45) - assert arw.shift(weekday=SU) == arw - assert arw.shift(weekday=SU(0)) == arw - assert arw.shift(weekday=SU(1)) == arw - assert arw.shift(weekday=SU(2)) == arrow.Arrow(2013, 5, 12, 12, 30, 45) - - def test_shift_negative(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.shift(years=-1) == arrow.Arrow(2012, 5, 5, 12, 30, 45) - assert arw.shift(quarters=-1) == arrow.Arrow(2013, 2, 5, 12, 30, 45) - assert arw.shift(quarters=-1, months=-1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) - assert arw.shift(months=-1) == arrow.Arrow(2013, 4, 5, 12, 30, 45) - assert arw.shift(weeks=-1) == arrow.Arrow(2013, 4, 28, 12, 30, 45) - assert arw.shift(days=-1) == arrow.Arrow(2013, 5, 4, 12, 30, 45) - assert arw.shift(hours=-1) == arrow.Arrow(2013, 5, 5, 11, 30, 45) - assert arw.shift(minutes=-1) == arrow.Arrow(2013, 5, 5, 12, 29, 45) - assert arw.shift(seconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44) - assert arw.shift(microseconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44, 999999) - - # Not sure how practical these negative weekdays are - assert arw.shift(weekday=-1) == arw.shift(weekday=SU) - assert arw.shift(weekday=-2) == arw.shift(weekday=SA) - assert arw.shift(weekday=-3) == arw.shift(weekday=FR) - assert arw.shift(weekday=-4) == arw.shift(weekday=TH) - assert arw.shift(weekday=-5) == arw.shift(weekday=WE) - assert arw.shift(weekday=-6) == arw.shift(weekday=TU) - assert arw.shift(weekday=-7) == arw.shift(weekday=MO) - - with pytest.raises(IndexError): - arw.shift(weekday=-8) - - assert arw.shift(weekday=MO(-1)) == arrow.Arrow(2013, 4, 29, 12, 30, 45) - assert arw.shift(weekday=TU(-1)) == arrow.Arrow(2013, 4, 30, 12, 30, 45) - assert arw.shift(weekday=WE(-1)) == arrow.Arrow(2013, 5, 1, 12, 30, 45) - assert arw.shift(weekday=TH(-1)) == arrow.Arrow(2013, 5, 2, 12, 30, 45) - assert arw.shift(weekday=FR(-1)) == arrow.Arrow(2013, 5, 3, 12, 30, 45) - assert arw.shift(weekday=SA(-1)) == arrow.Arrow(2013, 5, 4, 12, 30, 45) - assert arw.shift(weekday=SU(-1)) == arw - assert arw.shift(weekday=SU(-2)) == arrow.Arrow(2013, 4, 28, 12, 30, 45) - - def test_shift_quarters_bug(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - # The value of the last-read argument was used instead of the ``quarters`` argument. - # Recall that the keyword argument dict, like all dicts, is unordered, so only certain - # combinations of arguments would exhibit this. - assert arw.shift(quarters=0, years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) - assert arw.shift(quarters=0, months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) - assert arw.shift(quarters=0, weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) - assert arw.shift(quarters=0, days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(quarters=0, hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) - assert arw.shift(quarters=0, minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) - assert arw.shift(quarters=0, seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) - assert arw.shift(quarters=0, microseconds=1) == arrow.Arrow( - 2013, 5, 5, 12, 30, 45, 1 - ) - - def test_shift_positive_imaginary(self): - - # Avoid shifting into imaginary datetimes, take into account DST and other timezone changes. - - new_york = arrow.Arrow(2017, 3, 12, 1, 30, tzinfo="America/New_York") - assert new_york.shift(hours=+1) == arrow.Arrow( - 2017, 3, 12, 3, 30, tzinfo="America/New_York" - ) - - # pendulum example - paris = arrow.Arrow(2013, 3, 31, 1, 50, tzinfo="Europe/Paris") - assert paris.shift(minutes=+20) == arrow.Arrow( - 2013, 3, 31, 3, 10, tzinfo="Europe/Paris" - ) - - canberra = arrow.Arrow(2018, 10, 7, 1, 30, tzinfo="Australia/Canberra") - assert canberra.shift(hours=+1) == arrow.Arrow( - 2018, 10, 7, 3, 30, tzinfo="Australia/Canberra" - ) - - kiev = arrow.Arrow(2018, 3, 25, 2, 30, tzinfo="Europe/Kiev") - assert kiev.shift(hours=+1) == arrow.Arrow( - 2018, 3, 25, 4, 30, tzinfo="Europe/Kiev" - ) - - # Edge case, the entire day of 2011-12-30 is imaginary in this zone! - apia = arrow.Arrow(2011, 12, 29, 23, tzinfo="Pacific/Apia") - assert apia.shift(hours=+2) == arrow.Arrow( - 2011, 12, 31, 1, tzinfo="Pacific/Apia" - ) - - def test_shift_negative_imaginary(self): - - new_york = arrow.Arrow(2011, 3, 13, 3, 30, tzinfo="America/New_York") - assert new_york.shift(hours=-1) == arrow.Arrow( - 2011, 3, 13, 3, 30, tzinfo="America/New_York" - ) - assert new_york.shift(hours=-2) == arrow.Arrow( - 2011, 3, 13, 1, 30, tzinfo="America/New_York" - ) - - london = arrow.Arrow(2019, 3, 31, 2, tzinfo="Europe/London") - assert london.shift(hours=-1) == arrow.Arrow( - 2019, 3, 31, 2, tzinfo="Europe/London" - ) - assert london.shift(hours=-2) == arrow.Arrow( - 2019, 3, 31, 0, tzinfo="Europe/London" - ) - - # edge case, crossing the international dateline - apia = arrow.Arrow(2011, 12, 31, 1, tzinfo="Pacific/Apia") - assert apia.shift(hours=-2) == arrow.Arrow( - 2011, 12, 31, 23, tzinfo="Pacific/Apia" - ) - - @pytest.mark.skipif( - dateutil.__version__ < "2.7.1", reason="old tz database (2018d needed)" - ) - def test_shift_kiritimati(self): - # corrected 2018d tz database release, will fail in earlier versions - - kiritimati = arrow.Arrow(1994, 12, 30, 12, 30, tzinfo="Pacific/Kiritimati") - assert kiritimati.shift(days=+1) == arrow.Arrow( - 1995, 1, 1, 12, 30, tzinfo="Pacific/Kiritimati" - ) - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="unsupported before python 3.6" - ) - def shift_imaginary_seconds(self): - # offset has a seconds component - monrovia = arrow.Arrow(1972, 1, 6, 23, tzinfo="Africa/Monrovia") - assert monrovia.shift(hours=+1, minutes=+30) == arrow.Arrow( - 1972, 1, 7, 1, 14, 30, tzinfo="Africa/Monrovia" - ) - - -class TestArrowRange: - def test_year(self): - - result = list( - arrow.Arrow.range( - "year", datetime(2013, 1, 2, 3, 4, 5), datetime(2016, 4, 5, 6, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2014, 1, 2, 3, 4, 5), - arrow.Arrow(2015, 1, 2, 3, 4, 5), - arrow.Arrow(2016, 1, 2, 3, 4, 5), - ] - - def test_quarter(self): - - result = list( - arrow.Arrow.range( - "quarter", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) - ) - ) - - assert result == [ - arrow.Arrow(2013, 2, 3, 4, 5, 6), - arrow.Arrow(2013, 5, 3, 4, 5, 6), - ] - - def test_month(self): - - result = list( - arrow.Arrow.range( - "month", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) - ) - ) - - assert result == [ - arrow.Arrow(2013, 2, 3, 4, 5, 6), - arrow.Arrow(2013, 3, 3, 4, 5, 6), - arrow.Arrow(2013, 4, 3, 4, 5, 6), - arrow.Arrow(2013, 5, 3, 4, 5, 6), - ] - - def test_week(self): - - result = list( - arrow.Arrow.range( - "week", datetime(2013, 9, 1, 2, 3, 4), datetime(2013, 10, 1, 2, 3, 4) - ) - ) - - assert result == [ - arrow.Arrow(2013, 9, 1, 2, 3, 4), - arrow.Arrow(2013, 9, 8, 2, 3, 4), - arrow.Arrow(2013, 9, 15, 2, 3, 4), - arrow.Arrow(2013, 9, 22, 2, 3, 4), - arrow.Arrow(2013, 9, 29, 2, 3, 4), - ] - - def test_day(self): - - result = list( - arrow.Arrow.range( - "day", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 5, 6, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 3, 3, 4, 5), - arrow.Arrow(2013, 1, 4, 3, 4, 5), - arrow.Arrow(2013, 1, 5, 3, 4, 5), - ] - - def test_hour(self): - - result = list( - arrow.Arrow.range( - "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 6, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 2, 4, 4, 5), - arrow.Arrow(2013, 1, 2, 5, 4, 5), - arrow.Arrow(2013, 1, 2, 6, 4, 5), - ] - - result = list( - arrow.Arrow.range( - "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 5) - ) - ) - - assert result == [arrow.Arrow(2013, 1, 2, 3, 4, 5)] - - def test_minute(self): - - result = list( - arrow.Arrow.range( - "minute", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 2, 3, 5, 5), - arrow.Arrow(2013, 1, 2, 3, 6, 5), - arrow.Arrow(2013, 1, 2, 3, 7, 5), - ] - - def test_second(self): - - result = list( - arrow.Arrow.range( - "second", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 2, 3, 4, 6), - arrow.Arrow(2013, 1, 2, 3, 4, 7), - arrow.Arrow(2013, 1, 2, 3, 4, 8), - ] - - def test_arrow(self): - - result = list( - arrow.Arrow.range( - "day", - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 5, 6, 7, 8), - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 3, 3, 4, 5), - arrow.Arrow(2013, 1, 4, 3, 4, 5), - arrow.Arrow(2013, 1, 5, 3, 4, 5), - ] - - def test_naive_tz(self): - - result = arrow.Arrow.range( - "year", datetime(2013, 1, 2, 3), datetime(2016, 4, 5, 6), "US/Pacific" - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Pacific") - - def test_aware_same_tz(self): - - result = arrow.Arrow.range( - "day", - arrow.Arrow(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")), - arrow.Arrow(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Pacific") - - def test_aware_different_tz(self): - - result = arrow.Arrow.range( - "day", - datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), - datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Eastern") - - def test_aware_tz(self): - - result = arrow.Arrow.range( - "day", - datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), - datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), - tz=tz.gettz("US/Central"), - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Central") - - def test_imaginary(self): - # issue #72, avoid duplication in utc column - - before = arrow.Arrow(2018, 3, 10, 23, tzinfo="US/Pacific") - after = arrow.Arrow(2018, 3, 11, 4, tzinfo="US/Pacific") - - pacific_range = [t for t in arrow.Arrow.range("hour", before, after)] - utc_range = [t.to("utc") for t in arrow.Arrow.range("hour", before, after)] - - assert len(pacific_range) == len(set(pacific_range)) - assert len(utc_range) == len(set(utc_range)) - - def test_unsupported(self): - - with pytest.raises(AttributeError): - next(arrow.Arrow.range("abc", datetime.utcnow(), datetime.utcnow())) - - def test_range_over_months_ending_on_different_days(self): - # regression test for issue #842 - result = list(arrow.Arrow.range("month", datetime(2015, 1, 31), limit=4)) - assert result == [ - arrow.Arrow(2015, 1, 31), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 3, 31), - arrow.Arrow(2015, 4, 30), - ] - - result = list(arrow.Arrow.range("month", datetime(2015, 1, 30), limit=3)) - assert result == [ - arrow.Arrow(2015, 1, 30), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 3, 30), - ] - - result = list(arrow.Arrow.range("month", datetime(2015, 2, 28), limit=3)) - assert result == [ - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 3, 28), - arrow.Arrow(2015, 4, 28), - ] - - result = list(arrow.Arrow.range("month", datetime(2015, 3, 31), limit=3)) - assert result == [ - arrow.Arrow(2015, 3, 31), - arrow.Arrow(2015, 4, 30), - arrow.Arrow(2015, 5, 31), - ] - - def test_range_over_quarter_months_ending_on_different_days(self): - result = list(arrow.Arrow.range("quarter", datetime(2014, 11, 30), limit=3)) - assert result == [ - arrow.Arrow(2014, 11, 30), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 5, 30), - ] - - def test_range_over_year_maintains_end_date_across_leap_year(self): - result = list(arrow.Arrow.range("year", datetime(2012, 2, 29), limit=5)) - assert result == [ - arrow.Arrow(2012, 2, 29), - arrow.Arrow(2013, 2, 28), - arrow.Arrow(2014, 2, 28), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2016, 2, 29), - ] - - -class TestArrowSpanRange: - def test_year(self): - - result = list( - arrow.Arrow.span_range("year", datetime(2013, 2, 1), datetime(2016, 3, 31)) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1), - arrow.Arrow(2013, 12, 31, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2014, 1, 1), - arrow.Arrow(2014, 12, 31, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2015, 1, 1), - arrow.Arrow(2015, 12, 31, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2016, 1, 1), - arrow.Arrow(2016, 12, 31, 23, 59, 59, 999999), - ), - ] - - def test_quarter(self): - - result = list( - arrow.Arrow.span_range( - "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15) - ) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 6, 30, 23, 59, 59, 999999)), - ] - - def test_month(self): - - result = list( - arrow.Arrow.span_range("month", datetime(2013, 1, 2), datetime(2013, 4, 15)) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 1, 31, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 2, 1), arrow.Arrow(2013, 2, 28, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 3, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 4, 30, 23, 59, 59, 999999)), - ] - - def test_week(self): - - result = list( - arrow.Arrow.span_range("week", datetime(2013, 2, 2), datetime(2013, 2, 28)) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 28), arrow.Arrow(2013, 2, 3, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 2, 4), arrow.Arrow(2013, 2, 10, 23, 59, 59, 999999)), - ( - arrow.Arrow(2013, 2, 11), - arrow.Arrow(2013, 2, 17, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 2, 18), - arrow.Arrow(2013, 2, 24, 23, 59, 59, 999999), - ), - (arrow.Arrow(2013, 2, 25), arrow.Arrow(2013, 3, 3, 23, 59, 59, 999999)), - ] - - def test_day(self): - - result = list( - arrow.Arrow.span_range( - "day", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0), - arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 2, 0), - arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 3, 0), - arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 4, 0), - arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), - ), - ] - - def test_days(self): - - result = list( - arrow.Arrow.span_range( - "days", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0), - arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 2, 0), - arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 3, 0), - arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 4, 0), - arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), - ), - ] - - def test_hour(self): - - result = list( - arrow.Arrow.span_range( - "hour", datetime(2013, 1, 1, 0, 30), datetime(2013, 1, 1, 3, 30) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0), - arrow.Arrow(2013, 1, 1, 0, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 1), - arrow.Arrow(2013, 1, 1, 1, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 2), - arrow.Arrow(2013, 1, 1, 2, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 3), - arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999), - ), - ] - - result = list( - arrow.Arrow.span_range( - "hour", datetime(2013, 1, 1, 3, 30), datetime(2013, 1, 1, 3, 30) - ) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1, 3), arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999)) - ] - - def test_minute(self): - - result = list( - arrow.Arrow.span_range( - "minute", datetime(2013, 1, 1, 0, 0, 30), datetime(2013, 1, 1, 0, 3, 30) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0, 0), - arrow.Arrow(2013, 1, 1, 0, 0, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 1), - arrow.Arrow(2013, 1, 1, 0, 1, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 2), - arrow.Arrow(2013, 1, 1, 0, 2, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 3), - arrow.Arrow(2013, 1, 1, 0, 3, 59, 999999), - ), - ] - - def test_second(self): - - result = list( - arrow.Arrow.span_range( - "second", datetime(2013, 1, 1), datetime(2013, 1, 1, 0, 0, 3) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0, 0, 0), - arrow.Arrow(2013, 1, 1, 0, 0, 0, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 0, 1), - arrow.Arrow(2013, 1, 1, 0, 0, 1, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 0, 2), - arrow.Arrow(2013, 1, 1, 0, 0, 2, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 0, 3), - arrow.Arrow(2013, 1, 1, 0, 0, 3, 999999), - ), - ] - - def test_naive_tz(self): - - tzinfo = tz.gettz("US/Pacific") - - result = arrow.Arrow.span_range( - "hour", datetime(2013, 1, 1, 0), datetime(2013, 1, 1, 3, 59), "US/Pacific" - ) - - for f, c in result: - assert f.tzinfo == tzinfo - assert c.tzinfo == tzinfo - - def test_aware_same_tz(self): - - tzinfo = tz.gettz("US/Pacific") - - result = arrow.Arrow.span_range( - "hour", - datetime(2013, 1, 1, 0, tzinfo=tzinfo), - datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo), - ) - - for f, c in result: - assert f.tzinfo == tzinfo - assert c.tzinfo == tzinfo - - def test_aware_different_tz(self): - - tzinfo1 = tz.gettz("US/Pacific") - tzinfo2 = tz.gettz("US/Eastern") - - result = arrow.Arrow.span_range( - "hour", - datetime(2013, 1, 1, 0, tzinfo=tzinfo1), - datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo2), - ) - - for f, c in result: - assert f.tzinfo == tzinfo1 - assert c.tzinfo == tzinfo1 - - def test_aware_tz(self): - - result = arrow.Arrow.span_range( - "hour", - datetime(2013, 1, 1, 0, tzinfo=tz.gettz("US/Eastern")), - datetime(2013, 1, 1, 2, 59, tzinfo=tz.gettz("US/Eastern")), - tz="US/Central", - ) - - for f, c in result: - assert f.tzinfo == tz.gettz("US/Central") - assert c.tzinfo == tz.gettz("US/Central") - - def test_bounds_param_is_passed(self): - - result = list( - arrow.Arrow.span_range( - "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15), bounds="[]" - ) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 4, 1)), - (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 7, 1)), - ] - - -class TestArrowInterval: - def test_incorrect_input(self): - with pytest.raises(ValueError): - list( - arrow.Arrow.interval( - "month", datetime(2013, 1, 2), datetime(2013, 4, 15), 0 - ) - ) - - def test_correct(self): - result = list( - arrow.Arrow.interval( - "hour", datetime(2013, 5, 5, 12, 30), datetime(2013, 5, 5, 17, 15), 2 - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 5, 5, 12), - arrow.Arrow(2013, 5, 5, 13, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 5, 5, 14), - arrow.Arrow(2013, 5, 5, 15, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 5, 5, 16), - arrow.Arrow(2013, 5, 5, 17, 59, 59, 999999), - ), - ] - - def test_bounds_param_is_passed(self): - result = list( - arrow.Arrow.interval( - "hour", - datetime(2013, 5, 5, 12, 30), - datetime(2013, 5, 5, 17, 15), - 2, - bounds="[]", - ) - ) - - assert result == [ - (arrow.Arrow(2013, 5, 5, 12), arrow.Arrow(2013, 5, 5, 14)), - (arrow.Arrow(2013, 5, 5, 14), arrow.Arrow(2013, 5, 5, 16)), - (arrow.Arrow(2013, 5, 5, 16), arrow.Arrow(2013, 5, 5, 18)), - ] - - -@pytest.mark.usefixtures("time_2013_02_15") -class TestArrowSpan: - def test_span_attribute(self): - - with pytest.raises(AttributeError): - self.arrow.span("span") - - def test_span_year(self): - - floor, ceil = self.arrow.span("year") - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_quarter(self): - - floor, ceil = self.arrow.span("quarter") - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 3, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_quarter_count(self): - - floor, ceil = self.arrow.span("quarter", 2) - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 6, 30, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_year_count(self): - - floor, ceil = self.arrow.span("year", 2) - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2014, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_month(self): - - floor, ceil = self.arrow.span("month") - - assert floor == datetime(2013, 2, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 28, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_week(self): - - floor, ceil = self.arrow.span("week") - - assert floor == datetime(2013, 2, 11, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 17, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_day(self): - - floor, ceil = self.arrow.span("day") - - assert floor == datetime(2013, 2, 15, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_hour(self): - - floor, ceil = self.arrow.span("hour") - - assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_minute(self): - - floor, ceil = self.arrow.span("minute") - - assert floor == datetime(2013, 2, 15, 3, 41, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 41, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_second(self): - - floor, ceil = self.arrow.span("second") - - assert floor == datetime(2013, 2, 15, 3, 41, 22, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 41, 22, 999999, tzinfo=tz.tzutc()) - - def test_span_microsecond(self): - - floor, ceil = self.arrow.span("microsecond") - - assert floor == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) - - def test_floor(self): - - floor, ceil = self.arrow.span("month") - - assert floor == self.arrow.floor("month") - assert ceil == self.arrow.ceil("month") - - def test_span_inclusive_inclusive(self): - - floor, ceil = self.arrow.span("hour", bounds="[]") - - assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) - - def test_span_exclusive_inclusive(self): - - floor, ceil = self.arrow.span("hour", bounds="(]") - - assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) - - def test_span_exclusive_exclusive(self): - - floor, ceil = self.arrow.span("hour", bounds="()") - - assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_bounds_are_validated(self): - - with pytest.raises(ValueError): - floor, ceil = self.arrow.span("hour", bounds="][") - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowHumanize: - def test_granularity(self): - - assert self.now.humanize(granularity="second") == "just now" - - later1 = self.now.shift(seconds=1) - assert self.now.humanize(later1, granularity="second") == "just now" - assert later1.humanize(self.now, granularity="second") == "just now" - assert self.now.humanize(later1, granularity="minute") == "0 minutes ago" - assert later1.humanize(self.now, granularity="minute") == "in 0 minutes" - - later100 = self.now.shift(seconds=100) - assert self.now.humanize(later100, granularity="second") == "100 seconds ago" - assert later100.humanize(self.now, granularity="second") == "in 100 seconds" - assert self.now.humanize(later100, granularity="minute") == "a minute ago" - assert later100.humanize(self.now, granularity="minute") == "in a minute" - assert self.now.humanize(later100, granularity="hour") == "0 hours ago" - assert later100.humanize(self.now, granularity="hour") == "in 0 hours" - - later4000 = self.now.shift(seconds=4000) - assert self.now.humanize(later4000, granularity="minute") == "66 minutes ago" - assert later4000.humanize(self.now, granularity="minute") == "in 66 minutes" - assert self.now.humanize(later4000, granularity="hour") == "an hour ago" - assert later4000.humanize(self.now, granularity="hour") == "in an hour" - assert self.now.humanize(later4000, granularity="day") == "0 days ago" - assert later4000.humanize(self.now, granularity="day") == "in 0 days" - - later105 = self.now.shift(seconds=10 ** 5) - assert self.now.humanize(later105, granularity="hour") == "27 hours ago" - assert later105.humanize(self.now, granularity="hour") == "in 27 hours" - assert self.now.humanize(later105, granularity="day") == "a day ago" - assert later105.humanize(self.now, granularity="day") == "in a day" - assert self.now.humanize(later105, granularity="week") == "0 weeks ago" - assert later105.humanize(self.now, granularity="week") == "in 0 weeks" - assert self.now.humanize(later105, granularity="month") == "0 months ago" - assert later105.humanize(self.now, granularity="month") == "in 0 months" - assert self.now.humanize(later105, granularity=["month"]) == "0 months ago" - assert later105.humanize(self.now, granularity=["month"]) == "in 0 months" - - later106 = self.now.shift(seconds=3 * 10 ** 6) - assert self.now.humanize(later106, granularity="day") == "34 days ago" - assert later106.humanize(self.now, granularity="day") == "in 34 days" - assert self.now.humanize(later106, granularity="week") == "4 weeks ago" - assert later106.humanize(self.now, granularity="week") == "in 4 weeks" - assert self.now.humanize(later106, granularity="month") == "a month ago" - assert later106.humanize(self.now, granularity="month") == "in a month" - assert self.now.humanize(later106, granularity="year") == "0 years ago" - assert later106.humanize(self.now, granularity="year") == "in 0 years" - - later506 = self.now.shift(seconds=50 * 10 ** 6) - assert self.now.humanize(later506, granularity="week") == "82 weeks ago" - assert later506.humanize(self.now, granularity="week") == "in 82 weeks" - assert self.now.humanize(later506, granularity="month") == "18 months ago" - assert later506.humanize(self.now, granularity="month") == "in 18 months" - assert self.now.humanize(later506, granularity="year") == "a year ago" - assert later506.humanize(self.now, granularity="year") == "in a year" - - later108 = self.now.shift(seconds=10 ** 8) - assert self.now.humanize(later108, granularity="year") == "3 years ago" - assert later108.humanize(self.now, granularity="year") == "in 3 years" - - later108onlydistance = self.now.shift(seconds=10 ** 8) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity="year" - ) - == "3 years" - ) - assert ( - later108onlydistance.humanize( - self.now, only_distance=True, granularity="year" - ) - == "3 years" - ) - - with pytest.raises(AttributeError): - self.now.humanize(later108, granularity="years") - - def test_multiple_granularity(self): - assert self.now.humanize(granularity="second") == "just now" - assert self.now.humanize(granularity=["second"]) == "just now" - assert ( - self.now.humanize(granularity=["year", "month", "day", "hour", "second"]) - == "in 0 years 0 months 0 days 0 hours and 0 seconds" - ) - - later4000 = self.now.shift(seconds=4000) - assert ( - later4000.humanize(self.now, granularity=["hour", "minute"]) - == "in an hour and 6 minutes" - ) - assert ( - self.now.humanize(later4000, granularity=["hour", "minute"]) - == "an hour and 6 minutes ago" - ) - assert ( - later4000.humanize( - self.now, granularity=["hour", "minute"], only_distance=True - ) - == "an hour and 6 minutes" - ) - assert ( - later4000.humanize(self.now, granularity=["day", "hour", "minute"]) - == "in 0 days an hour and 6 minutes" - ) - assert ( - self.now.humanize(later4000, granularity=["day", "hour", "minute"]) - == "0 days an hour and 6 minutes ago" - ) - - later105 = self.now.shift(seconds=10 ** 5) - assert ( - self.now.humanize(later105, granularity=["hour", "day", "minute"]) - == "a day 3 hours and 46 minutes ago" - ) - with pytest.raises(AttributeError): - self.now.humanize(later105, granularity=["error", "second"]) - - later108onlydistance = self.now.shift(seconds=10 ** 8) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity=["year"] - ) - == "3 years" - ) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity=["month", "week"] - ) - == "37 months and 4 weeks" - ) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity=["year", "second"] - ) - == "3 years and 5327200 seconds" - ) - - one_min_one_sec_ago = self.now.shift(minutes=-1, seconds=-1) - assert ( - one_min_one_sec_ago.humanize(self.now, granularity=["minute", "second"]) - == "a minute and a second ago" - ) - - one_min_two_secs_ago = self.now.shift(minutes=-1, seconds=-2) - assert ( - one_min_two_secs_ago.humanize(self.now, granularity=["minute", "second"]) - == "a minute and 2 seconds ago" - ) - - def test_seconds(self): - - later = self.now.shift(seconds=10) - - # regression test for issue #727 - assert self.now.humanize(later) == "10 seconds ago" - assert later.humanize(self.now) == "in 10 seconds" - - assert self.now.humanize(later, only_distance=True) == "10 seconds" - assert later.humanize(self.now, only_distance=True) == "10 seconds" - - def test_minute(self): - - later = self.now.shift(minutes=1) - - assert self.now.humanize(later) == "a minute ago" - assert later.humanize(self.now) == "in a minute" - - assert self.now.humanize(later, only_distance=True) == "a minute" - assert later.humanize(self.now, only_distance=True) == "a minute" - - def test_minutes(self): - - later = self.now.shift(minutes=2) - - assert self.now.humanize(later) == "2 minutes ago" - assert later.humanize(self.now) == "in 2 minutes" - - assert self.now.humanize(later, only_distance=True) == "2 minutes" - assert later.humanize(self.now, only_distance=True) == "2 minutes" - - def test_hour(self): - - later = self.now.shift(hours=1) - - assert self.now.humanize(later) == "an hour ago" - assert later.humanize(self.now) == "in an hour" - - assert self.now.humanize(later, only_distance=True) == "an hour" - assert later.humanize(self.now, only_distance=True) == "an hour" - - def test_hours(self): - - later = self.now.shift(hours=2) - - assert self.now.humanize(later) == "2 hours ago" - assert later.humanize(self.now) == "in 2 hours" - - assert self.now.humanize(later, only_distance=True) == "2 hours" - assert later.humanize(self.now, only_distance=True) == "2 hours" - - def test_day(self): - - later = self.now.shift(days=1) - - assert self.now.humanize(later) == "a day ago" - assert later.humanize(self.now) == "in a day" - - # regression test for issue #697 - less_than_48_hours = self.now.shift( - days=1, hours=23, seconds=59, microseconds=999999 - ) - assert self.now.humanize(less_than_48_hours) == "a day ago" - assert less_than_48_hours.humanize(self.now) == "in a day" - - less_than_48_hours_date = less_than_48_hours._datetime.date() - with pytest.raises(TypeError): - # humanize other argument does not take raw datetime.date objects - self.now.humanize(less_than_48_hours_date) - - # convert from date to arrow object - less_than_48_hours_date = arrow.Arrow.fromdate(less_than_48_hours_date) - assert self.now.humanize(less_than_48_hours_date) == "a day ago" - assert less_than_48_hours_date.humanize(self.now) == "in a day" - - assert self.now.humanize(later, only_distance=True) == "a day" - assert later.humanize(self.now, only_distance=True) == "a day" - - def test_days(self): - - later = self.now.shift(days=2) - - assert self.now.humanize(later) == "2 days ago" - assert later.humanize(self.now) == "in 2 days" - - assert self.now.humanize(later, only_distance=True) == "2 days" - assert later.humanize(self.now, only_distance=True) == "2 days" - - # Regression tests for humanize bug referenced in issue 541 - later = self.now.shift(days=3) - assert later.humanize(self.now) == "in 3 days" - - later = self.now.shift(days=3, seconds=1) - assert later.humanize(self.now) == "in 3 days" - - later = self.now.shift(days=4) - assert later.humanize(self.now) == "in 4 days" - - def test_week(self): - - later = self.now.shift(weeks=1) - - assert self.now.humanize(later) == "a week ago" - assert later.humanize(self.now) == "in a week" - - assert self.now.humanize(later, only_distance=True) == "a week" - assert later.humanize(self.now, only_distance=True) == "a week" - - def test_weeks(self): - - later = self.now.shift(weeks=2) - - assert self.now.humanize(later) == "2 weeks ago" - assert later.humanize(self.now) == "in 2 weeks" - - assert self.now.humanize(later, only_distance=True) == "2 weeks" - assert later.humanize(self.now, only_distance=True) == "2 weeks" - - def test_month(self): - - later = self.now.shift(months=1) - - assert self.now.humanize(later) == "a month ago" - assert later.humanize(self.now) == "in a month" - - assert self.now.humanize(later, only_distance=True) == "a month" - assert later.humanize(self.now, only_distance=True) == "a month" - - def test_months(self): - - later = self.now.shift(months=2) - earlier = self.now.shift(months=-2) - - assert earlier.humanize(self.now) == "2 months ago" - assert later.humanize(self.now) == "in 2 months" - - assert self.now.humanize(later, only_distance=True) == "2 months" - assert later.humanize(self.now, only_distance=True) == "2 months" - - def test_year(self): - - later = self.now.shift(years=1) - - assert self.now.humanize(later) == "a year ago" - assert later.humanize(self.now) == "in a year" - - assert self.now.humanize(later, only_distance=True) == "a year" - assert later.humanize(self.now, only_distance=True) == "a year" - - def test_years(self): - - later = self.now.shift(years=2) - - assert self.now.humanize(later) == "2 years ago" - assert later.humanize(self.now) == "in 2 years" - - assert self.now.humanize(later, only_distance=True) == "2 years" - assert later.humanize(self.now, only_distance=True) == "2 years" - - arw = arrow.Arrow(2014, 7, 2) - - result = arw.humanize(self.datetime) - - assert result == "in 2 years" - - def test_arrow(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - result = arw.humanize(arrow.Arrow.fromdatetime(self.datetime)) - - assert result == "just now" - - def test_datetime_tzinfo(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - result = arw.humanize(self.datetime.replace(tzinfo=tz.tzutc())) - - assert result == "just now" - - def test_other(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - with pytest.raises(TypeError): - arw.humanize(object()) - - def test_invalid_locale(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - with pytest.raises(ValueError): - arw.humanize(locale="klingon") - - def test_none(self): - - arw = arrow.Arrow.utcnow() - - result = arw.humanize() - - assert result == "just now" - - result = arw.humanize(None) - - assert result == "just now" - - def test_untranslated_granularity(self, mocker): - - arw = arrow.Arrow.utcnow() - later = arw.shift(weeks=1) - - # simulate an untranslated timeframe key - mocker.patch.dict("arrow.locales.EnglishLocale.timeframes") - del arrow.locales.EnglishLocale.timeframes["week"] - with pytest.raises(ValueError): - arw.humanize(later, granularity="week") - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowHumanizeTestsWithLocale: - def test_now(self): - - arw = arrow.Arrow(2013, 1, 1, 0, 0, 0) - - result = arw.humanize(self.datetime, locale="ru") - - assert result == "сейчас" - - def test_seconds(self): - arw = arrow.Arrow(2013, 1, 1, 0, 0, 44) - - result = arw.humanize(self.datetime, locale="ru") - - assert result == "через 44 несколько секунд" - - def test_years(self): - - arw = arrow.Arrow(2011, 7, 2) - - result = arw.humanize(self.datetime, locale="ru") - - assert result == "2 года назад" - - -class TestArrowIsBetween: - def test_start_before_end(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - result = target.is_between(start, end) - assert not result - - def test_exclusive_exclusive_bounds(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 27)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 10)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 36)) - result = target.is_between(start, end, "()") - assert result - result = target.is_between(start, end) - assert result - - def test_exclusive_exclusive_bounds_same_date(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - result = target.is_between(start, end, "()") - assert not result - - def test_inclusive_exclusive_bounds(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 4)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) - result = target.is_between(start, end, "[)") - assert not result - - def test_exclusive_inclusive_bounds(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - result = target.is_between(start, end, "(]") - assert result - - def test_inclusive_inclusive_bounds_same_date(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - result = target.is_between(start, end, "[]") - assert result - - def test_type_error_exception(self): - with pytest.raises(TypeError): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = datetime(2013, 5, 5) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) - target.is_between(start, end) - - with pytest.raises(TypeError): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - end = datetime(2013, 5, 8) - target.is_between(start, end) - - with pytest.raises(TypeError): - target.is_between(None, None) - - def test_value_error_exception(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) - with pytest.raises(ValueError): - target.is_between(start, end, "][") - with pytest.raises(ValueError): - target.is_between(start, end, "") - with pytest.raises(ValueError): - target.is_between(start, end, "]") - with pytest.raises(ValueError): - target.is_between(start, end, "[") - with pytest.raises(ValueError): - target.is_between(start, end, "hello") - - -class TestArrowUtil: - def test_get_datetime(self): - - get_datetime = arrow.Arrow._get_datetime - - arw = arrow.Arrow.utcnow() - dt = datetime.utcnow() - timestamp = time.time() - - assert get_datetime(arw) == arw.datetime - assert get_datetime(dt) == dt - assert ( - get_datetime(timestamp) == arrow.Arrow.utcfromtimestamp(timestamp).datetime - ) - - with pytest.raises(ValueError) as raise_ctx: - get_datetime("abc") - assert "not recognized as a datetime or timestamp" in str(raise_ctx.value) - - def test_get_tzinfo(self): - - get_tzinfo = arrow.Arrow._get_tzinfo - - with pytest.raises(ValueError) as raise_ctx: - get_tzinfo("abc") - assert "not recognized as a timezone" in str(raise_ctx.value) - - def test_get_iteration_params(self): - - assert arrow.Arrow._get_iteration_params("end", None) == ("end", sys.maxsize) - assert arrow.Arrow._get_iteration_params(None, 100) == (arrow.Arrow.max, 100) - assert arrow.Arrow._get_iteration_params(100, 120) == (100, 120) - - with pytest.raises(ValueError): - arrow.Arrow._get_iteration_params(None, None) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py deleted file mode 100644 index 2b8df5168ff..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -import time -from datetime import date, datetime - -import pytest -from dateutil import tz - -from arrow.parser import ParserError - -from .utils import assert_datetime_equality - - -@pytest.mark.usefixtures("arrow_factory") -class TestGet: - def test_no_args(self): - - assert_datetime_equality( - self.factory.get(), datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - def test_timestamp_one_arg_no_arg(self): - - no_arg = self.factory.get(1406430900).timestamp - one_arg = self.factory.get("1406430900", "X").timestamp - - assert no_arg == one_arg - - def test_one_arg_none(self): - - assert_datetime_equality( - self.factory.get(None), datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - def test_struct_time(self): - - assert_datetime_equality( - self.factory.get(time.gmtime()), - datetime.utcnow().replace(tzinfo=tz.tzutc()), - ) - - def test_one_arg_timestamp(self): - - int_timestamp = int(time.time()) - timestamp_dt = datetime.utcfromtimestamp(int_timestamp).replace( - tzinfo=tz.tzutc() - ) - - assert self.factory.get(int_timestamp) == timestamp_dt - - with pytest.raises(ParserError): - self.factory.get(str(int_timestamp)) - - float_timestamp = time.time() - timestamp_dt = datetime.utcfromtimestamp(float_timestamp).replace( - tzinfo=tz.tzutc() - ) - - assert self.factory.get(float_timestamp) == timestamp_dt - - with pytest.raises(ParserError): - self.factory.get(str(float_timestamp)) - - # Regression test for issue #216 - # Python 3 raises OverflowError, Python 2 raises ValueError - timestamp = 99999999999999999999999999.99999999999999999999999999 - with pytest.raises((OverflowError, ValueError)): - self.factory.get(timestamp) - - def test_one_arg_expanded_timestamp(self): - - millisecond_timestamp = 1591328104308 - microsecond_timestamp = 1591328104308505 - - # Regression test for issue #796 - assert self.factory.get(millisecond_timestamp) == datetime.utcfromtimestamp( - 1591328104.308 - ).replace(tzinfo=tz.tzutc()) - assert self.factory.get(microsecond_timestamp) == datetime.utcfromtimestamp( - 1591328104.308505 - ).replace(tzinfo=tz.tzutc()) - - def test_one_arg_timestamp_with_tzinfo(self): - - timestamp = time.time() - timestamp_dt = datetime.fromtimestamp(timestamp, tz=tz.tzutc()).astimezone( - tz.gettz("US/Pacific") - ) - timezone = tz.gettz("US/Pacific") - - assert_datetime_equality( - self.factory.get(timestamp, tzinfo=timezone), timestamp_dt - ) - - def test_one_arg_arrow(self): - - arw = self.factory.utcnow() - result = self.factory.get(arw) - - assert arw == result - - def test_one_arg_datetime(self): - - dt = datetime.utcnow().replace(tzinfo=tz.tzutc()) - - assert self.factory.get(dt) == dt - - def test_one_arg_date(self): - - d = date.today() - dt = datetime(d.year, d.month, d.day, tzinfo=tz.tzutc()) - - assert self.factory.get(d) == dt - - def test_one_arg_tzinfo(self): - - self.expected = ( - datetime.utcnow() - .replace(tzinfo=tz.tzutc()) - .astimezone(tz.gettz("US/Pacific")) - ) - - assert_datetime_equality( - self.factory.get(tz.gettz("US/Pacific")), self.expected - ) - - # regression test for issue #658 - def test_one_arg_dateparser_datetime(self): - dateparser = pytest.importorskip("dateparser") - expected = datetime(1990, 1, 1).replace(tzinfo=tz.tzutc()) - # dateparser outputs: datetime.datetime(1990, 1, 1, 0, 0, tzinfo=) - parsed_date = dateparser.parse("1990-01-01T00:00:00+00:00") - dt_output = self.factory.get(parsed_date)._datetime.replace(tzinfo=tz.tzutc()) - assert dt_output == expected - - def test_kwarg_tzinfo(self): - - self.expected = ( - datetime.utcnow() - .replace(tzinfo=tz.tzutc()) - .astimezone(tz.gettz("US/Pacific")) - ) - - assert_datetime_equality( - self.factory.get(tzinfo=tz.gettz("US/Pacific")), self.expected - ) - - def test_kwarg_tzinfo_string(self): - - self.expected = ( - datetime.utcnow() - .replace(tzinfo=tz.tzutc()) - .astimezone(tz.gettz("US/Pacific")) - ) - - assert_datetime_equality(self.factory.get(tzinfo="US/Pacific"), self.expected) - - with pytest.raises(ParserError): - self.factory.get(tzinfo="US/PacificInvalidTzinfo") - - def test_kwarg_normalize_whitespace(self): - result = self.factory.get( - "Jun 1 2005 1:33PM", - "MMM D YYYY H:mmA", - tzinfo=tz.tzutc(), - normalize_whitespace=True, - ) - assert result._datetime == datetime(2005, 6, 1, 13, 33, tzinfo=tz.tzutc()) - - result = self.factory.get( - "\t 2013-05-05T12:30:45.123456 \t \n", - tzinfo=tz.tzutc(), - normalize_whitespace=True, - ) - assert result._datetime == datetime( - 2013, 5, 5, 12, 30, 45, 123456, tzinfo=tz.tzutc() - ) - - def test_one_arg_iso_str(self): - - dt = datetime.utcnow() - - assert_datetime_equality( - self.factory.get(dt.isoformat()), dt.replace(tzinfo=tz.tzutc()) - ) - - def test_one_arg_iso_calendar(self): - - pairs = [ - (datetime(2004, 1, 4), (2004, 1, 7)), - (datetime(2008, 12, 30), (2009, 1, 2)), - (datetime(2010, 1, 2), (2009, 53, 6)), - (datetime(2000, 2, 29), (2000, 9, 2)), - (datetime(2005, 1, 1), (2004, 53, 6)), - (datetime(2010, 1, 4), (2010, 1, 1)), - (datetime(2010, 1, 3), (2009, 53, 7)), - (datetime(2003, 12, 29), (2004, 1, 1)), - ] - - for pair in pairs: - dt, iso = pair - assert self.factory.get(iso) == self.factory.get(dt) - - with pytest.raises(TypeError): - self.factory.get((2014, 7, 1, 4)) - - with pytest.raises(TypeError): - self.factory.get((2014, 7)) - - with pytest.raises(ValueError): - self.factory.get((2014, 70, 1)) - - with pytest.raises(ValueError): - self.factory.get((2014, 7, 10)) - - def test_one_arg_other(self): - - with pytest.raises(TypeError): - self.factory.get(object()) - - def test_one_arg_bool(self): - - with pytest.raises(TypeError): - self.factory.get(False) - - with pytest.raises(TypeError): - self.factory.get(True) - - def test_two_args_datetime_tzinfo(self): - - result = self.factory.get(datetime(2013, 1, 1), tz.gettz("US/Pacific")) - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_datetime_tz_str(self): - - result = self.factory.get(datetime(2013, 1, 1), "US/Pacific") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_date_tzinfo(self): - - result = self.factory.get(date(2013, 1, 1), tz.gettz("US/Pacific")) - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_date_tz_str(self): - - result = self.factory.get(date(2013, 1, 1), "US/Pacific") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_datetime_other(self): - - with pytest.raises(TypeError): - self.factory.get(datetime.utcnow(), object()) - - def test_two_args_date_other(self): - - with pytest.raises(TypeError): - self.factory.get(date.today(), object()) - - def test_two_args_str_str(self): - - result = self.factory.get("2013-01-01", "YYYY-MM-DD") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_two_args_str_tzinfo(self): - - result = self.factory.get("2013-01-01", tzinfo=tz.gettz("US/Pacific")) - - assert_datetime_equality( - result._datetime, datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - ) - - def test_two_args_twitter_format(self): - - # format returned by twitter API for created_at: - twitter_date = "Fri Apr 08 21:08:54 +0000 2016" - result = self.factory.get(twitter_date, "ddd MMM DD HH:mm:ss Z YYYY") - - assert result._datetime == datetime(2016, 4, 8, 21, 8, 54, tzinfo=tz.tzutc()) - - def test_two_args_str_list(self): - - result = self.factory.get("2013-01-01", ["MM/DD/YYYY", "YYYY-MM-DD"]) - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_two_args_unicode_unicode(self): - - result = self.factory.get(u"2013-01-01", u"YYYY-MM-DD") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_two_args_other(self): - - with pytest.raises(TypeError): - self.factory.get(object(), object()) - - def test_three_args_with_tzinfo(self): - - timefmt = "YYYYMMDD" - d = "20150514" - - assert self.factory.get(d, timefmt, tzinfo=tz.tzlocal()) == datetime( - 2015, 5, 14, tzinfo=tz.tzlocal() - ) - - def test_three_args(self): - - assert self.factory.get(2013, 1, 1) == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_full_kwargs(self): - - assert ( - self.factory.get( - year=2016, - month=7, - day=14, - hour=7, - minute=16, - second=45, - microsecond=631092, - ) - == datetime(2016, 7, 14, 7, 16, 45, 631092, tzinfo=tz.tzutc()) - ) - - def test_three_kwargs(self): - - assert self.factory.get(year=2016, month=7, day=14) == datetime( - 2016, 7, 14, 0, 0, tzinfo=tz.tzutc() - ) - - def test_tzinfo_string_kwargs(self): - result = self.factory.get("2019072807", "YYYYMMDDHH", tzinfo="UTC") - assert result._datetime == datetime(2019, 7, 28, 7, 0, 0, 0, tzinfo=tz.tzutc()) - - def test_insufficient_kwargs(self): - - with pytest.raises(TypeError): - self.factory.get(year=2016) - - with pytest.raises(TypeError): - self.factory.get(year=2016, month=7) - - def test_locale(self): - result = self.factory.get("2010", "YYYY", locale="ja") - assert result._datetime == datetime(2010, 1, 1, 0, 0, 0, 0, tzinfo=tz.tzutc()) - - # regression test for issue #701 - result = self.factory.get( - "Montag, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY", locale="de" - ) - assert result._datetime == datetime(2019, 9, 9, 0, 0, 0, 0, tzinfo=tz.tzutc()) - - def test_locale_kwarg_only(self): - res = self.factory.get(locale="ja") - assert res.tzinfo == tz.tzutc() - - def test_locale_with_tzinfo(self): - res = self.factory.get(locale="ja", tzinfo=tz.gettz("Asia/Tokyo")) - assert res.tzinfo == tz.gettz("Asia/Tokyo") - - -@pytest.mark.usefixtures("arrow_factory") -class TestUtcNow: - def test_utcnow(self): - - assert_datetime_equality( - self.factory.utcnow()._datetime, - datetime.utcnow().replace(tzinfo=tz.tzutc()), - ) - - -@pytest.mark.usefixtures("arrow_factory") -class TestNow: - def test_no_tz(self): - - assert_datetime_equality(self.factory.now(), datetime.now(tz.tzlocal())) - - def test_tzinfo(self): - - assert_datetime_equality( - self.factory.now(tz.gettz("EST")), datetime.now(tz.gettz("EST")) - ) - - def test_tz_str(self): - - assert_datetime_equality(self.factory.now("EST"), datetime.now(tz.gettz("EST"))) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py deleted file mode 100644 index e97aeb5dcc7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py +++ /dev/null @@ -1,282 +0,0 @@ -# -*- coding: utf-8 -*- -from datetime import datetime - -import pytest -import pytz -from dateutil import tz as dateutil_tz - -from arrow import ( - FORMAT_ATOM, - FORMAT_COOKIE, - FORMAT_RFC822, - FORMAT_RFC850, - FORMAT_RFC1036, - FORMAT_RFC1123, - FORMAT_RFC2822, - FORMAT_RFC3339, - FORMAT_RSS, - FORMAT_W3C, -) - -from .utils import make_full_tz_list - - -@pytest.mark.usefixtures("arrow_formatter") -class TestFormatterFormatToken: - def test_format(self): - - dt = datetime(2013, 2, 5, 12, 32, 51) - - result = self.formatter.format(dt, "MM-DD-YYYY hh:mm:ss a") - - assert result == "02-05-2013 12:32:51 pm" - - def test_year(self): - - dt = datetime(2013, 1, 1) - assert self.formatter._format_token(dt, "YYYY") == "2013" - assert self.formatter._format_token(dt, "YY") == "13" - - def test_month(self): - - dt = datetime(2013, 1, 1) - assert self.formatter._format_token(dt, "MMMM") == "January" - assert self.formatter._format_token(dt, "MMM") == "Jan" - assert self.formatter._format_token(dt, "MM") == "01" - assert self.formatter._format_token(dt, "M") == "1" - - def test_day(self): - - dt = datetime(2013, 2, 1) - assert self.formatter._format_token(dt, "DDDD") == "032" - assert self.formatter._format_token(dt, "DDD") == "32" - assert self.formatter._format_token(dt, "DD") == "01" - assert self.formatter._format_token(dt, "D") == "1" - assert self.formatter._format_token(dt, "Do") == "1st" - - assert self.formatter._format_token(dt, "dddd") == "Friday" - assert self.formatter._format_token(dt, "ddd") == "Fri" - assert self.formatter._format_token(dt, "d") == "5" - - def test_hour(self): - - dt = datetime(2013, 1, 1, 2) - assert self.formatter._format_token(dt, "HH") == "02" - assert self.formatter._format_token(dt, "H") == "2" - - dt = datetime(2013, 1, 1, 13) - assert self.formatter._format_token(dt, "HH") == "13" - assert self.formatter._format_token(dt, "H") == "13" - - dt = datetime(2013, 1, 1, 2) - assert self.formatter._format_token(dt, "hh") == "02" - assert self.formatter._format_token(dt, "h") == "2" - - dt = datetime(2013, 1, 1, 13) - assert self.formatter._format_token(dt, "hh") == "01" - assert self.formatter._format_token(dt, "h") == "1" - - # test that 12-hour time converts to '12' at midnight - dt = datetime(2013, 1, 1, 0) - assert self.formatter._format_token(dt, "hh") == "12" - assert self.formatter._format_token(dt, "h") == "12" - - def test_minute(self): - - dt = datetime(2013, 1, 1, 0, 1) - assert self.formatter._format_token(dt, "mm") == "01" - assert self.formatter._format_token(dt, "m") == "1" - - def test_second(self): - - dt = datetime(2013, 1, 1, 0, 0, 1) - assert self.formatter._format_token(dt, "ss") == "01" - assert self.formatter._format_token(dt, "s") == "1" - - def test_sub_second(self): - - dt = datetime(2013, 1, 1, 0, 0, 0, 123456) - assert self.formatter._format_token(dt, "SSSSSS") == "123456" - assert self.formatter._format_token(dt, "SSSSS") == "12345" - assert self.formatter._format_token(dt, "SSSS") == "1234" - assert self.formatter._format_token(dt, "SSS") == "123" - assert self.formatter._format_token(dt, "SS") == "12" - assert self.formatter._format_token(dt, "S") == "1" - - dt = datetime(2013, 1, 1, 0, 0, 0, 2000) - assert self.formatter._format_token(dt, "SSSSSS") == "002000" - assert self.formatter._format_token(dt, "SSSSS") == "00200" - assert self.formatter._format_token(dt, "SSSS") == "0020" - assert self.formatter._format_token(dt, "SSS") == "002" - assert self.formatter._format_token(dt, "SS") == "00" - assert self.formatter._format_token(dt, "S") == "0" - - def test_timestamp(self): - - timestamp = 1588437009.8952794 - dt = datetime.utcfromtimestamp(timestamp) - expected = str(int(timestamp)) - assert self.formatter._format_token(dt, "X") == expected - - # Must round because time.time() may return a float with greater - # than 6 digits of precision - expected = str(int(timestamp * 1000000)) - assert self.formatter._format_token(dt, "x") == expected - - def test_timezone(self): - - dt = datetime.utcnow().replace(tzinfo=dateutil_tz.gettz("US/Pacific")) - - result = self.formatter._format_token(dt, "ZZ") - assert result == "-07:00" or result == "-08:00" - - result = self.formatter._format_token(dt, "Z") - assert result == "-0700" or result == "-0800" - - @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) - def test_timezone_formatter(self, full_tz_name): - - # This test will fail if we use "now" as date as soon as we change from/to DST - dt = datetime(1986, 2, 14, tzinfo=pytz.timezone("UTC")).replace( - tzinfo=dateutil_tz.gettz(full_tz_name) - ) - abbreviation = dt.tzname() - - result = self.formatter._format_token(dt, "ZZZ") - assert result == abbreviation - - def test_am_pm(self): - - dt = datetime(2012, 1, 1, 11) - assert self.formatter._format_token(dt, "a") == "am" - assert self.formatter._format_token(dt, "A") == "AM" - - dt = datetime(2012, 1, 1, 13) - assert self.formatter._format_token(dt, "a") == "pm" - assert self.formatter._format_token(dt, "A") == "PM" - - def test_week(self): - dt = datetime(2017, 5, 19) - assert self.formatter._format_token(dt, "W") == "2017-W20-5" - - # make sure week is zero padded when needed - dt_early = datetime(2011, 1, 20) - assert self.formatter._format_token(dt_early, "W") == "2011-W03-4" - - def test_nonsense(self): - dt = datetime(2012, 1, 1, 11) - assert self.formatter._format_token(dt, None) is None - assert self.formatter._format_token(dt, "NONSENSE") is None - - def test_escape(self): - - assert ( - self.formatter.format( - datetime(2015, 12, 10, 17, 9), "MMMM D, YYYY [at] h:mma" - ) - == "December 10, 2015 at 5:09pm" - ) - - assert ( - self.formatter.format( - datetime(2015, 12, 10, 17, 9), "[MMMM] M D, YYYY [at] h:mma" - ) - == "MMMM 12 10, 2015 at 5:09pm" - ) - - assert ( - self.formatter.format( - datetime(1990, 11, 25), - "[It happened on] MMMM Do [in the year] YYYY [a long time ago]", - ) - == "It happened on November 25th in the year 1990 a long time ago" - ) - - assert ( - self.formatter.format( - datetime(1990, 11, 25), - "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]", - ) - == "It happened on November 25th in the year 1990 a long time ago" - ) - - assert ( - self.formatter.format( - datetime(1, 1, 1), "[I'm][ entirely][ escaped,][ weee!]" - ) - == "I'm entirely escaped, weee!" - ) - - # Special RegEx characters - assert ( - self.formatter.format( - datetime(2017, 12, 31, 2, 0), "MMM DD, YYYY |^${}().*+?<>-& h:mm A" - ) - == "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM" - ) - - # Escaping is atomic: brackets inside brackets are treated literally - assert self.formatter.format(datetime(1, 1, 1), "[[[ ]]") == "[[ ]" - - -@pytest.mark.usefixtures("arrow_formatter", "time_1975_12_25") -class TestFormatterBuiltinFormats: - def test_atom(self): - assert ( - self.formatter.format(self.datetime, FORMAT_ATOM) - == "1975-12-25 14:15:16-05:00" - ) - - def test_cookie(self): - assert ( - self.formatter.format(self.datetime, FORMAT_COOKIE) - == "Thursday, 25-Dec-1975 14:15:16 EST" - ) - - def test_rfc_822(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC822) - == "Thu, 25 Dec 75 14:15:16 -0500" - ) - - def test_rfc_850(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC850) - == "Thursday, 25-Dec-75 14:15:16 EST" - ) - - def test_rfc_1036(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC1036) - == "Thu, 25 Dec 75 14:15:16 -0500" - ) - - def test_rfc_1123(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC1123) - == "Thu, 25 Dec 1975 14:15:16 -0500" - ) - - def test_rfc_2822(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC2822) - == "Thu, 25 Dec 1975 14:15:16 -0500" - ) - - def test_rfc3339(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC3339) - == "1975-12-25 14:15:16-05:00" - ) - - def test_rss(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RSS) - == "Thu, 25 Dec 1975 14:15:16 -0500" - ) - - def test_w3c(self): - assert ( - self.formatter.format(self.datetime, FORMAT_W3C) - == "1975-12-25 14:15:16-05:00" - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py deleted file mode 100644 index 006ccdd5bac..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py +++ /dev/null @@ -1,1352 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import pytest - -from arrow import arrow, locales - - -@pytest.mark.usefixtures("lang_locales") -class TestLocaleValidation: - """Validate locales to ensure that translations are valid and complete""" - - def test_locale_validation(self): - - for _, locale_cls in self.locales.items(): - # 7 days + 1 spacer to allow for 1-indexing of months - assert len(locale_cls.day_names) == 8 - assert locale_cls.day_names[0] == "" - # ensure that all string from index 1 onward are valid (not blank or None) - assert all(locale_cls.day_names[1:]) - - assert len(locale_cls.day_abbreviations) == 8 - assert locale_cls.day_abbreviations[0] == "" - assert all(locale_cls.day_abbreviations[1:]) - - # 12 months + 1 spacer to allow for 1-indexing of months - assert len(locale_cls.month_names) == 13 - assert locale_cls.month_names[0] == "" - assert all(locale_cls.month_names[1:]) - - assert len(locale_cls.month_abbreviations) == 13 - assert locale_cls.month_abbreviations[0] == "" - assert all(locale_cls.month_abbreviations[1:]) - - assert len(locale_cls.names) > 0 - assert locale_cls.past is not None - assert locale_cls.future is not None - - -class TestModule: - def test_get_locale(self, mocker): - mock_locale = mocker.Mock() - mock_locale_cls = mocker.Mock() - mock_locale_cls.return_value = mock_locale - - with pytest.raises(ValueError): - arrow.locales.get_locale("locale_name") - - cls_dict = arrow.locales._locales - mocker.patch.dict(cls_dict, {"locale_name": mock_locale_cls}) - - result = arrow.locales.get_locale("locale_name") - - assert result == mock_locale - - def test_get_locale_by_class_name(self, mocker): - mock_locale_cls = mocker.Mock() - mock_locale_obj = mock_locale_cls.return_value = mocker.Mock() - - globals_fn = mocker.Mock() - globals_fn.return_value = {"NonExistentLocale": mock_locale_cls} - - with pytest.raises(ValueError): - arrow.locales.get_locale_by_class_name("NonExistentLocale") - - mocker.patch.object(locales, "globals", globals_fn) - result = arrow.locales.get_locale_by_class_name("NonExistentLocale") - - mock_locale_cls.assert_called_once_with() - assert result == mock_locale_obj - - def test_locales(self): - - assert len(locales._locales) > 0 - - -@pytest.mark.usefixtures("lang_locale") -class TestEnglishLocale: - def test_describe(self): - assert self.locale.describe("now", only_distance=True) == "instantly" - assert self.locale.describe("now", only_distance=False) == "just now" - - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 hours" - assert self.locale._format_timeframe("hour", 0) == "an hour" - - def test_format_relative_now(self): - - result = self.locale._format_relative("just now", "now", 0) - - assert result == "just now" - - def test_format_relative_past(self): - - result = self.locale._format_relative("an hour", "hour", 1) - - assert result == "in an hour" - - def test_format_relative_future(self): - - result = self.locale._format_relative("an hour", "hour", -1) - - assert result == "an hour ago" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(0) == "0th" - assert self.locale.ordinal_number(1) == "1st" - assert self.locale.ordinal_number(2) == "2nd" - assert self.locale.ordinal_number(3) == "3rd" - assert self.locale.ordinal_number(4) == "4th" - assert self.locale.ordinal_number(10) == "10th" - assert self.locale.ordinal_number(11) == "11th" - assert self.locale.ordinal_number(12) == "12th" - assert self.locale.ordinal_number(13) == "13th" - assert self.locale.ordinal_number(14) == "14th" - assert self.locale.ordinal_number(21) == "21st" - assert self.locale.ordinal_number(22) == "22nd" - assert self.locale.ordinal_number(23) == "23rd" - assert self.locale.ordinal_number(24) == "24th" - - assert self.locale.ordinal_number(100) == "100th" - assert self.locale.ordinal_number(101) == "101st" - assert self.locale.ordinal_number(102) == "102nd" - assert self.locale.ordinal_number(103) == "103rd" - assert self.locale.ordinal_number(104) == "104th" - assert self.locale.ordinal_number(110) == "110th" - assert self.locale.ordinal_number(111) == "111th" - assert self.locale.ordinal_number(112) == "112th" - assert self.locale.ordinal_number(113) == "113th" - assert self.locale.ordinal_number(114) == "114th" - assert self.locale.ordinal_number(121) == "121st" - assert self.locale.ordinal_number(122) == "122nd" - assert self.locale.ordinal_number(123) == "123rd" - assert self.locale.ordinal_number(124) == "124th" - - def test_meridian_invalid_token(self): - assert self.locale.meridian(7, None) is None - assert self.locale.meridian(7, "B") is None - assert self.locale.meridian(7, "NONSENSE") is None - - -@pytest.mark.usefixtures("lang_locale") -class TestItalianLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1º" - - -@pytest.mark.usefixtures("lang_locale") -class TestSpanishLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1º" - - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "ahora" - assert self.locale._format_timeframe("seconds", 1) == "1 segundos" - assert self.locale._format_timeframe("seconds", 3) == "3 segundos" - assert self.locale._format_timeframe("seconds", 30) == "30 segundos" - assert self.locale._format_timeframe("minute", 1) == "un minuto" - assert self.locale._format_timeframe("minutes", 4) == "4 minutos" - assert self.locale._format_timeframe("minutes", 40) == "40 minutos" - assert self.locale._format_timeframe("hour", 1) == "una hora" - assert self.locale._format_timeframe("hours", 5) == "5 horas" - assert self.locale._format_timeframe("hours", 23) == "23 horas" - assert self.locale._format_timeframe("day", 1) == "un día" - assert self.locale._format_timeframe("days", 6) == "6 días" - assert self.locale._format_timeframe("days", 12) == "12 días" - assert self.locale._format_timeframe("week", 1) == "una semana" - assert self.locale._format_timeframe("weeks", 2) == "2 semanas" - assert self.locale._format_timeframe("weeks", 3) == "3 semanas" - assert self.locale._format_timeframe("month", 1) == "un mes" - assert self.locale._format_timeframe("months", 7) == "7 meses" - assert self.locale._format_timeframe("months", 11) == "11 meses" - assert self.locale._format_timeframe("year", 1) == "un año" - assert self.locale._format_timeframe("years", 8) == "8 años" - assert self.locale._format_timeframe("years", 12) == "12 años" - - assert self.locale._format_timeframe("now", 0) == "ahora" - assert self.locale._format_timeframe("seconds", -1) == "1 segundos" - assert self.locale._format_timeframe("seconds", -9) == "9 segundos" - assert self.locale._format_timeframe("seconds", -12) == "12 segundos" - assert self.locale._format_timeframe("minute", -1) == "un minuto" - assert self.locale._format_timeframe("minutes", -2) == "2 minutos" - assert self.locale._format_timeframe("minutes", -10) == "10 minutos" - assert self.locale._format_timeframe("hour", -1) == "una hora" - assert self.locale._format_timeframe("hours", -3) == "3 horas" - assert self.locale._format_timeframe("hours", -11) == "11 horas" - assert self.locale._format_timeframe("day", -1) == "un día" - assert self.locale._format_timeframe("days", -2) == "2 días" - assert self.locale._format_timeframe("days", -12) == "12 días" - assert self.locale._format_timeframe("week", -1) == "una semana" - assert self.locale._format_timeframe("weeks", -2) == "2 semanas" - assert self.locale._format_timeframe("weeks", -3) == "3 semanas" - assert self.locale._format_timeframe("month", -1) == "un mes" - assert self.locale._format_timeframe("months", -3) == "3 meses" - assert self.locale._format_timeframe("months", -13) == "13 meses" - assert self.locale._format_timeframe("year", -1) == "un año" - assert self.locale._format_timeframe("years", -4) == "4 años" - assert self.locale._format_timeframe("years", -14) == "14 años" - - -@pytest.mark.usefixtures("lang_locale") -class TestFrenchLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1er" - assert self.locale.ordinal_number(2) == "2e" - - def test_month_abbreviation(self): - assert "juil" in self.locale.month_abbreviations - - -@pytest.mark.usefixtures("lang_locale") -class TestFrenchCanadianLocale: - def test_month_abbreviation(self): - assert "juill" in self.locale.month_abbreviations - - -@pytest.mark.usefixtures("lang_locale") -class TestRussianLocale: - def test_plurals2(self): - assert self.locale._format_timeframe("hours", 0) == "0 часов" - assert self.locale._format_timeframe("hours", 1) == "1 час" - assert self.locale._format_timeframe("hours", 2) == "2 часа" - assert self.locale._format_timeframe("hours", 4) == "4 часа" - assert self.locale._format_timeframe("hours", 5) == "5 часов" - assert self.locale._format_timeframe("hours", 21) == "21 час" - assert self.locale._format_timeframe("hours", 22) == "22 часа" - assert self.locale._format_timeframe("hours", 25) == "25 часов" - - # feminine grammatical gender should be tested separately - assert self.locale._format_timeframe("minutes", 0) == "0 минут" - assert self.locale._format_timeframe("minutes", 1) == "1 минуту" - assert self.locale._format_timeframe("minutes", 2) == "2 минуты" - assert self.locale._format_timeframe("minutes", 4) == "4 минуты" - assert self.locale._format_timeframe("minutes", 5) == "5 минут" - assert self.locale._format_timeframe("minutes", 21) == "21 минуту" - assert self.locale._format_timeframe("minutes", 22) == "22 минуты" - assert self.locale._format_timeframe("minutes", 25) == "25 минут" - - -@pytest.mark.usefixtures("lang_locale") -class TestPolishLocale: - def test_plurals(self): - - assert self.locale._format_timeframe("seconds", 0) == "0 sekund" - assert self.locale._format_timeframe("second", 1) == "sekundę" - assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" - assert self.locale._format_timeframe("seconds", 5) == "5 sekund" - assert self.locale._format_timeframe("seconds", 21) == "21 sekund" - assert self.locale._format_timeframe("seconds", 22) == "22 sekundy" - assert self.locale._format_timeframe("seconds", 25) == "25 sekund" - - assert self.locale._format_timeframe("minutes", 0) == "0 minut" - assert self.locale._format_timeframe("minute", 1) == "minutę" - assert self.locale._format_timeframe("minutes", 2) == "2 minuty" - assert self.locale._format_timeframe("minutes", 5) == "5 minut" - assert self.locale._format_timeframe("minutes", 21) == "21 minut" - assert self.locale._format_timeframe("minutes", 22) == "22 minuty" - assert self.locale._format_timeframe("minutes", 25) == "25 minut" - - assert self.locale._format_timeframe("hours", 0) == "0 godzin" - assert self.locale._format_timeframe("hour", 1) == "godzinę" - assert self.locale._format_timeframe("hours", 2) == "2 godziny" - assert self.locale._format_timeframe("hours", 5) == "5 godzin" - assert self.locale._format_timeframe("hours", 21) == "21 godzin" - assert self.locale._format_timeframe("hours", 22) == "22 godziny" - assert self.locale._format_timeframe("hours", 25) == "25 godzin" - - assert self.locale._format_timeframe("weeks", 0) == "0 tygodni" - assert self.locale._format_timeframe("week", 1) == "tydzień" - assert self.locale._format_timeframe("weeks", 2) == "2 tygodnie" - assert self.locale._format_timeframe("weeks", 5) == "5 tygodni" - assert self.locale._format_timeframe("weeks", 21) == "21 tygodni" - assert self.locale._format_timeframe("weeks", 22) == "22 tygodnie" - assert self.locale._format_timeframe("weeks", 25) == "25 tygodni" - - assert self.locale._format_timeframe("months", 0) == "0 miesięcy" - assert self.locale._format_timeframe("month", 1) == "miesiąc" - assert self.locale._format_timeframe("months", 2) == "2 miesiące" - assert self.locale._format_timeframe("months", 5) == "5 miesięcy" - assert self.locale._format_timeframe("months", 21) == "21 miesięcy" - assert self.locale._format_timeframe("months", 22) == "22 miesiące" - assert self.locale._format_timeframe("months", 25) == "25 miesięcy" - - assert self.locale._format_timeframe("years", 0) == "0 lat" - assert self.locale._format_timeframe("year", 1) == "rok" - assert self.locale._format_timeframe("years", 2) == "2 lata" - assert self.locale._format_timeframe("years", 5) == "5 lat" - assert self.locale._format_timeframe("years", 21) == "21 lat" - assert self.locale._format_timeframe("years", 22) == "22 lata" - assert self.locale._format_timeframe("years", 25) == "25 lat" - - -@pytest.mark.usefixtures("lang_locale") -class TestIcelandicLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("minute", -1) == "einni mínútu" - assert self.locale._format_timeframe("minute", 1) == "eina mínútu" - - assert self.locale._format_timeframe("hours", -2) == "2 tímum" - assert self.locale._format_timeframe("hours", 2) == "2 tíma" - assert self.locale._format_timeframe("now", 0) == "rétt í þessu" - - -@pytest.mark.usefixtures("lang_locale") -class TestMalayalamLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 മണിക്കൂർ" - assert self.locale._format_timeframe("hour", 0) == "ഒരു മണിക്കൂർ" - - def test_format_relative_now(self): - - result = self.locale._format_relative("ഇപ്പോൾ", "now", 0) - - assert result == "ഇപ്പോൾ" - - def test_format_relative_past(self): - - result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", 1) - assert result == "ഒരു മണിക്കൂർ ശേഷം" - - def test_format_relative_future(self): - - result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", -1) - assert result == "ഒരു മണിക്കൂർ മുമ്പ്" - - -@pytest.mark.usefixtures("lang_locale") -class TestHindiLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 घंटे" - assert self.locale._format_timeframe("hour", 0) == "एक घंटा" - - def test_format_relative_now(self): - - result = self.locale._format_relative("अभी", "now", 0) - assert result == "अभी" - - def test_format_relative_past(self): - - result = self.locale._format_relative("एक घंटा", "hour", 1) - assert result == "एक घंटा बाद" - - def test_format_relative_future(self): - - result = self.locale._format_relative("एक घंटा", "hour", -1) - assert result == "एक घंटा पहले" - - -@pytest.mark.usefixtures("lang_locale") -class TestCzechLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 hodiny" - assert self.locale._format_timeframe("hours", 5) == "5 hodin" - assert self.locale._format_timeframe("hour", 0) == "0 hodin" - assert self.locale._format_timeframe("hours", -2) == "2 hodinami" - assert self.locale._format_timeframe("hours", -5) == "5 hodinami" - assert self.locale._format_timeframe("now", 0) == "Teď" - - assert self.locale._format_timeframe("weeks", 2) == "2 týdny" - assert self.locale._format_timeframe("weeks", 5) == "5 týdnů" - assert self.locale._format_timeframe("week", 0) == "0 týdnů" - assert self.locale._format_timeframe("weeks", -2) == "2 týdny" - assert self.locale._format_timeframe("weeks", -5) == "5 týdny" - - def test_format_relative_now(self): - - result = self.locale._format_relative("Teď", "now", 0) - assert result == "Teď" - - def test_format_relative_future(self): - - result = self.locale._format_relative("hodinu", "hour", 1) - assert result == "Za hodinu" - - def test_format_relative_past(self): - - result = self.locale._format_relative("hodinou", "hour", -1) - assert result == "Před hodinou" - - -@pytest.mark.usefixtures("lang_locale") -class TestSlovakLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("seconds", -5) == "5 sekundami" - assert self.locale._format_timeframe("seconds", -2) == "2 sekundami" - assert self.locale._format_timeframe("second", -1) == "sekundou" - assert self.locale._format_timeframe("second", 0) == "0 sekúnd" - assert self.locale._format_timeframe("second", 1) == "sekundu" - assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" - assert self.locale._format_timeframe("seconds", 5) == "5 sekúnd" - - assert self.locale._format_timeframe("minutes", -5) == "5 minútami" - assert self.locale._format_timeframe("minutes", -2) == "2 minútami" - assert self.locale._format_timeframe("minute", -1) == "minútou" - assert self.locale._format_timeframe("minute", 0) == "0 minút" - assert self.locale._format_timeframe("minute", 1) == "minútu" - assert self.locale._format_timeframe("minutes", 2) == "2 minúty" - assert self.locale._format_timeframe("minutes", 5) == "5 minút" - - assert self.locale._format_timeframe("hours", -5) == "5 hodinami" - assert self.locale._format_timeframe("hours", -2) == "2 hodinami" - assert self.locale._format_timeframe("hour", -1) == "hodinou" - assert self.locale._format_timeframe("hour", 0) == "0 hodín" - assert self.locale._format_timeframe("hour", 1) == "hodinu" - assert self.locale._format_timeframe("hours", 2) == "2 hodiny" - assert self.locale._format_timeframe("hours", 5) == "5 hodín" - - assert self.locale._format_timeframe("days", -5) == "5 dňami" - assert self.locale._format_timeframe("days", -2) == "2 dňami" - assert self.locale._format_timeframe("day", -1) == "dňom" - assert self.locale._format_timeframe("day", 0) == "0 dní" - assert self.locale._format_timeframe("day", 1) == "deň" - assert self.locale._format_timeframe("days", 2) == "2 dni" - assert self.locale._format_timeframe("days", 5) == "5 dní" - - assert self.locale._format_timeframe("weeks", -5) == "5 týždňami" - assert self.locale._format_timeframe("weeks", -2) == "2 týždňami" - assert self.locale._format_timeframe("week", -1) == "týždňom" - assert self.locale._format_timeframe("week", 0) == "0 týždňov" - assert self.locale._format_timeframe("week", 1) == "týždeň" - assert self.locale._format_timeframe("weeks", 2) == "2 týždne" - assert self.locale._format_timeframe("weeks", 5) == "5 týždňov" - - assert self.locale._format_timeframe("months", -5) == "5 mesiacmi" - assert self.locale._format_timeframe("months", -2) == "2 mesiacmi" - assert self.locale._format_timeframe("month", -1) == "mesiacom" - assert self.locale._format_timeframe("month", 0) == "0 mesiacov" - assert self.locale._format_timeframe("month", 1) == "mesiac" - assert self.locale._format_timeframe("months", 2) == "2 mesiace" - assert self.locale._format_timeframe("months", 5) == "5 mesiacov" - - assert self.locale._format_timeframe("years", -5) == "5 rokmi" - assert self.locale._format_timeframe("years", -2) == "2 rokmi" - assert self.locale._format_timeframe("year", -1) == "rokom" - assert self.locale._format_timeframe("year", 0) == "0 rokov" - assert self.locale._format_timeframe("year", 1) == "rok" - assert self.locale._format_timeframe("years", 2) == "2 roky" - assert self.locale._format_timeframe("years", 5) == "5 rokov" - - assert self.locale._format_timeframe("now", 0) == "Teraz" - - def test_format_relative_now(self): - - result = self.locale._format_relative("Teraz", "now", 0) - assert result == "Teraz" - - def test_format_relative_future(self): - - result = self.locale._format_relative("hodinu", "hour", 1) - assert result == "O hodinu" - - def test_format_relative_past(self): - - result = self.locale._format_relative("hodinou", "hour", -1) - assert result == "Pred hodinou" - - -@pytest.mark.usefixtures("lang_locale") -class TestBulgarianLocale: - def test_plurals2(self): - assert self.locale._format_timeframe("hours", 0) == "0 часа" - assert self.locale._format_timeframe("hours", 1) == "1 час" - assert self.locale._format_timeframe("hours", 2) == "2 часа" - assert self.locale._format_timeframe("hours", 4) == "4 часа" - assert self.locale._format_timeframe("hours", 5) == "5 часа" - assert self.locale._format_timeframe("hours", 21) == "21 час" - assert self.locale._format_timeframe("hours", 22) == "22 часа" - assert self.locale._format_timeframe("hours", 25) == "25 часа" - - # feminine grammatical gender should be tested separately - assert self.locale._format_timeframe("minutes", 0) == "0 минути" - assert self.locale._format_timeframe("minutes", 1) == "1 минута" - assert self.locale._format_timeframe("minutes", 2) == "2 минути" - assert self.locale._format_timeframe("minutes", 4) == "4 минути" - assert self.locale._format_timeframe("minutes", 5) == "5 минути" - assert self.locale._format_timeframe("minutes", 21) == "21 минута" - assert self.locale._format_timeframe("minutes", 22) == "22 минути" - assert self.locale._format_timeframe("minutes", 25) == "25 минути" - - -@pytest.mark.usefixtures("lang_locale") -class TestMacedonianLocale: - def test_singles_mk(self): - assert self.locale._format_timeframe("second", 1) == "една секунда" - assert self.locale._format_timeframe("minute", 1) == "една минута" - assert self.locale._format_timeframe("hour", 1) == "еден саат" - assert self.locale._format_timeframe("day", 1) == "еден ден" - assert self.locale._format_timeframe("week", 1) == "една недела" - assert self.locale._format_timeframe("month", 1) == "еден месец" - assert self.locale._format_timeframe("year", 1) == "една година" - - def test_meridians_mk(self): - assert self.locale.meridian(7, "A") == "претпладне" - assert self.locale.meridian(18, "A") == "попладне" - assert self.locale.meridian(10, "a") == "дп" - assert self.locale.meridian(22, "a") == "пп" - - def test_describe_mk(self): - assert self.locale.describe("second", only_distance=True) == "една секунда" - assert self.locale.describe("second", only_distance=False) == "за една секунда" - assert self.locale.describe("minute", only_distance=True) == "една минута" - assert self.locale.describe("minute", only_distance=False) == "за една минута" - assert self.locale.describe("hour", only_distance=True) == "еден саат" - assert self.locale.describe("hour", only_distance=False) == "за еден саат" - assert self.locale.describe("day", only_distance=True) == "еден ден" - assert self.locale.describe("day", only_distance=False) == "за еден ден" - assert self.locale.describe("week", only_distance=True) == "една недела" - assert self.locale.describe("week", only_distance=False) == "за една недела" - assert self.locale.describe("month", only_distance=True) == "еден месец" - assert self.locale.describe("month", only_distance=False) == "за еден месец" - assert self.locale.describe("year", only_distance=True) == "една година" - assert self.locale.describe("year", only_distance=False) == "за една година" - - def test_relative_mk(self): - # time - assert self.locale._format_relative("сега", "now", 0) == "сега" - assert self.locale._format_relative("1 секунда", "seconds", 1) == "за 1 секунда" - assert self.locale._format_relative("1 минута", "minutes", 1) == "за 1 минута" - assert self.locale._format_relative("1 саат", "hours", 1) == "за 1 саат" - assert self.locale._format_relative("1 ден", "days", 1) == "за 1 ден" - assert self.locale._format_relative("1 недела", "weeks", 1) == "за 1 недела" - assert self.locale._format_relative("1 месец", "months", 1) == "за 1 месец" - assert self.locale._format_relative("1 година", "years", 1) == "за 1 година" - assert ( - self.locale._format_relative("1 секунда", "seconds", -1) == "пред 1 секунда" - ) - assert ( - self.locale._format_relative("1 минута", "minutes", -1) == "пред 1 минута" - ) - assert self.locale._format_relative("1 саат", "hours", -1) == "пред 1 саат" - assert self.locale._format_relative("1 ден", "days", -1) == "пред 1 ден" - assert self.locale._format_relative("1 недела", "weeks", -1) == "пред 1 недела" - assert self.locale._format_relative("1 месец", "months", -1) == "пред 1 месец" - assert self.locale._format_relative("1 година", "years", -1) == "пред 1 година" - - def test_plurals_mk(self): - # Seconds - assert self.locale._format_timeframe("seconds", 0) == "0 секунди" - assert self.locale._format_timeframe("seconds", 1) == "1 секунда" - assert self.locale._format_timeframe("seconds", 2) == "2 секунди" - assert self.locale._format_timeframe("seconds", 4) == "4 секунди" - assert self.locale._format_timeframe("seconds", 5) == "5 секунди" - assert self.locale._format_timeframe("seconds", 21) == "21 секунда" - assert self.locale._format_timeframe("seconds", 22) == "22 секунди" - assert self.locale._format_timeframe("seconds", 25) == "25 секунди" - - # Minutes - assert self.locale._format_timeframe("minutes", 0) == "0 минути" - assert self.locale._format_timeframe("minutes", 1) == "1 минута" - assert self.locale._format_timeframe("minutes", 2) == "2 минути" - assert self.locale._format_timeframe("minutes", 4) == "4 минути" - assert self.locale._format_timeframe("minutes", 5) == "5 минути" - assert self.locale._format_timeframe("minutes", 21) == "21 минута" - assert self.locale._format_timeframe("minutes", 22) == "22 минути" - assert self.locale._format_timeframe("minutes", 25) == "25 минути" - - # Hours - assert self.locale._format_timeframe("hours", 0) == "0 саати" - assert self.locale._format_timeframe("hours", 1) == "1 саат" - assert self.locale._format_timeframe("hours", 2) == "2 саати" - assert self.locale._format_timeframe("hours", 4) == "4 саати" - assert self.locale._format_timeframe("hours", 5) == "5 саати" - assert self.locale._format_timeframe("hours", 21) == "21 саат" - assert self.locale._format_timeframe("hours", 22) == "22 саати" - assert self.locale._format_timeframe("hours", 25) == "25 саати" - - # Days - assert self.locale._format_timeframe("days", 0) == "0 дена" - assert self.locale._format_timeframe("days", 1) == "1 ден" - assert self.locale._format_timeframe("days", 2) == "2 дена" - assert self.locale._format_timeframe("days", 3) == "3 дена" - assert self.locale._format_timeframe("days", 21) == "21 ден" - - # Weeks - assert self.locale._format_timeframe("weeks", 0) == "0 недели" - assert self.locale._format_timeframe("weeks", 1) == "1 недела" - assert self.locale._format_timeframe("weeks", 2) == "2 недели" - assert self.locale._format_timeframe("weeks", 4) == "4 недели" - assert self.locale._format_timeframe("weeks", 5) == "5 недели" - assert self.locale._format_timeframe("weeks", 21) == "21 недела" - assert self.locale._format_timeframe("weeks", 22) == "22 недели" - assert self.locale._format_timeframe("weeks", 25) == "25 недели" - - # Months - assert self.locale._format_timeframe("months", 0) == "0 месеци" - assert self.locale._format_timeframe("months", 1) == "1 месец" - assert self.locale._format_timeframe("months", 2) == "2 месеци" - assert self.locale._format_timeframe("months", 4) == "4 месеци" - assert self.locale._format_timeframe("months", 5) == "5 месеци" - assert self.locale._format_timeframe("months", 21) == "21 месец" - assert self.locale._format_timeframe("months", 22) == "22 месеци" - assert self.locale._format_timeframe("months", 25) == "25 месеци" - - # Years - assert self.locale._format_timeframe("years", 1) == "1 година" - assert self.locale._format_timeframe("years", 2) == "2 години" - assert self.locale._format_timeframe("years", 5) == "5 години" - - def test_multi_describe_mk(self): - describe = self.locale.describe_multi - - fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] - assert describe(fulltest) == "за 5 години 1 недела 1 саат 6 минути" - seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] - assert describe(seconds4000_0days) == "за 0 дена 1 саат 6 минути" - seconds4000 = [("hours", 1), ("minutes", 6)] - assert describe(seconds4000) == "за 1 саат 6 минути" - assert describe(seconds4000, only_distance=True) == "1 саат 6 минути" - seconds3700 = [("hours", 1), ("minutes", 1)] - assert describe(seconds3700) == "за 1 саат 1 минута" - seconds300_0hours = [("hours", 0), ("minutes", 5)] - assert describe(seconds300_0hours) == "за 0 саати 5 минути" - seconds300 = [("minutes", 5)] - assert describe(seconds300) == "за 5 минути" - seconds60 = [("minutes", 1)] - assert describe(seconds60) == "за 1 минута" - assert describe(seconds60, only_distance=True) == "1 минута" - seconds60 = [("seconds", 1)] - assert describe(seconds60) == "за 1 секунда" - assert describe(seconds60, only_distance=True) == "1 секунда" - - -@pytest.mark.usefixtures("time_2013_01_01") -@pytest.mark.usefixtures("lang_locale") -class TestHebrewLocale: - def test_couple_of_timeframe(self): - assert self.locale._format_timeframe("days", 1) == "יום" - assert self.locale._format_timeframe("days", 2) == "יומיים" - assert self.locale._format_timeframe("days", 3) == "3 ימים" - - assert self.locale._format_timeframe("hours", 1) == "שעה" - assert self.locale._format_timeframe("hours", 2) == "שעתיים" - assert self.locale._format_timeframe("hours", 3) == "3 שעות" - - assert self.locale._format_timeframe("week", 1) == "שבוע" - assert self.locale._format_timeframe("weeks", 2) == "שבועיים" - assert self.locale._format_timeframe("weeks", 3) == "3 שבועות" - - assert self.locale._format_timeframe("months", 1) == "חודש" - assert self.locale._format_timeframe("months", 2) == "חודשיים" - assert self.locale._format_timeframe("months", 4) == "4 חודשים" - - assert self.locale._format_timeframe("years", 1) == "שנה" - assert self.locale._format_timeframe("years", 2) == "שנתיים" - assert self.locale._format_timeframe("years", 5) == "5 שנים" - - def test_describe_multi(self): - describe = self.locale.describe_multi - - fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] - assert describe(fulltest) == "בעוד 5 שנים, שבוע, שעה ו־6 דקות" - seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] - assert describe(seconds4000_0days) == "בעוד 0 ימים, שעה ו־6 דקות" - seconds4000 = [("hours", 1), ("minutes", 6)] - assert describe(seconds4000) == "בעוד שעה ו־6 דקות" - assert describe(seconds4000, only_distance=True) == "שעה ו־6 דקות" - seconds3700 = [("hours", 1), ("minutes", 1)] - assert describe(seconds3700) == "בעוד שעה ודקה" - seconds300_0hours = [("hours", 0), ("minutes", 5)] - assert describe(seconds300_0hours) == "בעוד 0 שעות ו־5 דקות" - seconds300 = [("minutes", 5)] - assert describe(seconds300) == "בעוד 5 דקות" - seconds60 = [("minutes", 1)] - assert describe(seconds60) == "בעוד דקה" - assert describe(seconds60, only_distance=True) == "דקה" - - -@pytest.mark.usefixtures("lang_locale") -class TestMarathiLocale: - def test_dateCoreFunctionality(self): - dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) - assert self.locale.month_name(dt.month) == "एप्रिल" - assert self.locale.month_abbreviation(dt.month) == "एप्रि" - assert self.locale.day_name(dt.isoweekday()) == "शनिवार" - assert self.locale.day_abbreviation(dt.isoweekday()) == "शनि" - - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == "2 तास" - assert self.locale._format_timeframe("hour", 0) == "एक तास" - - def test_format_relative_now(self): - result = self.locale._format_relative("सद्य", "now", 0) - assert result == "सद्य" - - def test_format_relative_past(self): - result = self.locale._format_relative("एक तास", "hour", 1) - assert result == "एक तास नंतर" - - def test_format_relative_future(self): - result = self.locale._format_relative("एक तास", "hour", -1) - assert result == "एक तास आधी" - - # Not currently implemented - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1" - - -@pytest.mark.usefixtures("lang_locale") -class TestFinnishLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == ("2 tuntia", "2 tunnin") - assert self.locale._format_timeframe("hour", 0) == ("tunti", "tunnin") - - def test_format_relative_now(self): - result = self.locale._format_relative(["juuri nyt", "juuri nyt"], "now", 0) - assert result == "juuri nyt" - - def test_format_relative_past(self): - result = self.locale._format_relative(["tunti", "tunnin"], "hour", 1) - assert result == "tunnin kuluttua" - - def test_format_relative_future(self): - result = self.locale._format_relative(["tunti", "tunnin"], "hour", -1) - assert result == "tunti sitten" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1." - - -@pytest.mark.usefixtures("lang_locale") -class TestGermanLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1." - - def test_define(self): - assert self.locale.describe("minute", only_distance=True) == "eine Minute" - assert self.locale.describe("minute", only_distance=False) == "in einer Minute" - assert self.locale.describe("hour", only_distance=True) == "eine Stunde" - assert self.locale.describe("hour", only_distance=False) == "in einer Stunde" - assert self.locale.describe("day", only_distance=True) == "ein Tag" - assert self.locale.describe("day", only_distance=False) == "in einem Tag" - assert self.locale.describe("week", only_distance=True) == "eine Woche" - assert self.locale.describe("week", only_distance=False) == "in einer Woche" - assert self.locale.describe("month", only_distance=True) == "ein Monat" - assert self.locale.describe("month", only_distance=False) == "in einem Monat" - assert self.locale.describe("year", only_distance=True) == "ein Jahr" - assert self.locale.describe("year", only_distance=False) == "in einem Jahr" - - def test_weekday(self): - dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) - assert self.locale.day_name(dt.isoweekday()) == "Samstag" - assert self.locale.day_abbreviation(dt.isoweekday()) == "Sa" - - -@pytest.mark.usefixtures("lang_locale") -class TestHungarianLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == "2 óra" - assert self.locale._format_timeframe("hour", 0) == "egy órával" - assert self.locale._format_timeframe("hours", -2) == "2 órával" - assert self.locale._format_timeframe("now", 0) == "éppen most" - - -@pytest.mark.usefixtures("lang_locale") -class TestEsperantoLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == "2 horoj" - assert self.locale._format_timeframe("hour", 0) == "un horo" - assert self.locale._format_timeframe("hours", -2) == "2 horoj" - assert self.locale._format_timeframe("now", 0) == "nun" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1a" - - -@pytest.mark.usefixtures("lang_locale") -class TestThaiLocale: - def test_year_full(self): - assert self.locale.year_full(2015) == "2558" - - def test_year_abbreviation(self): - assert self.locale.year_abbreviation(2015) == "58" - - def test_format_relative_now(self): - result = self.locale._format_relative("ขณะนี้", "now", 0) - assert result == "ขณะนี้" - - def test_format_relative_past(self): - result = self.locale._format_relative("1 ชั่วโมง", "hour", 1) - assert result == "ในอีก 1 ชั่วโมง" - result = self.locale._format_relative("{0} ชั่วโมง", "hours", 2) - assert result == "ในอีก {0} ชั่วโมง" - result = self.locale._format_relative("ไม่กี่วินาที", "seconds", 42) - assert result == "ในอีกไม่กี่วินาที" - - def test_format_relative_future(self): - result = self.locale._format_relative("1 ชั่วโมง", "hour", -1) - assert result == "1 ชั่วโมง ที่ผ่านมา" - - -@pytest.mark.usefixtures("lang_locale") -class TestBengaliLocale: - def test_ordinal_number(self): - assert self.locale._ordinal_number(0) == "0তম" - assert self.locale._ordinal_number(1) == "1ম" - assert self.locale._ordinal_number(3) == "3য়" - assert self.locale._ordinal_number(4) == "4র্থ" - assert self.locale._ordinal_number(5) == "5ম" - assert self.locale._ordinal_number(6) == "6ষ্ঠ" - assert self.locale._ordinal_number(10) == "10ম" - assert self.locale._ordinal_number(11) == "11তম" - assert self.locale._ordinal_number(42) == "42তম" - assert self.locale._ordinal_number(-1) is None - - -@pytest.mark.usefixtures("lang_locale") -class TestRomanianLocale: - def test_timeframes(self): - - assert self.locale._format_timeframe("hours", 2) == "2 ore" - assert self.locale._format_timeframe("months", 2) == "2 luni" - - assert self.locale._format_timeframe("days", 2) == "2 zile" - assert self.locale._format_timeframe("years", 2) == "2 ani" - - assert self.locale._format_timeframe("hours", 3) == "3 ore" - assert self.locale._format_timeframe("months", 4) == "4 luni" - assert self.locale._format_timeframe("days", 3) == "3 zile" - assert self.locale._format_timeframe("years", 5) == "5 ani" - - def test_relative_timeframes(self): - assert self.locale._format_relative("acum", "now", 0) == "acum" - assert self.locale._format_relative("o oră", "hour", 1) == "peste o oră" - assert self.locale._format_relative("o oră", "hour", -1) == "o oră în urmă" - assert self.locale._format_relative("un minut", "minute", 1) == "peste un minut" - assert ( - self.locale._format_relative("un minut", "minute", -1) == "un minut în urmă" - ) - assert ( - self.locale._format_relative("câteva secunde", "seconds", -1) - == "câteva secunde în urmă" - ) - assert ( - self.locale._format_relative("câteva secunde", "seconds", 1) - == "peste câteva secunde" - ) - assert self.locale._format_relative("o zi", "day", -1) == "o zi în urmă" - assert self.locale._format_relative("o zi", "day", 1) == "peste o zi" - - -@pytest.mark.usefixtures("lang_locale") -class TestArabicLocale: - def test_timeframes(self): - - # single - assert self.locale._format_timeframe("minute", 1) == "دقيقة" - assert self.locale._format_timeframe("hour", 1) == "ساعة" - assert self.locale._format_timeframe("day", 1) == "يوم" - assert self.locale._format_timeframe("month", 1) == "شهر" - assert self.locale._format_timeframe("year", 1) == "سنة" - - # double - assert self.locale._format_timeframe("minutes", 2) == "دقيقتين" - assert self.locale._format_timeframe("hours", 2) == "ساعتين" - assert self.locale._format_timeframe("days", 2) == "يومين" - assert self.locale._format_timeframe("months", 2) == "شهرين" - assert self.locale._format_timeframe("years", 2) == "سنتين" - - # up to ten - assert self.locale._format_timeframe("minutes", 3) == "3 دقائق" - assert self.locale._format_timeframe("hours", 4) == "4 ساعات" - assert self.locale._format_timeframe("days", 5) == "5 أيام" - assert self.locale._format_timeframe("months", 6) == "6 أشهر" - assert self.locale._format_timeframe("years", 10) == "10 سنوات" - - # more than ten - assert self.locale._format_timeframe("minutes", 11) == "11 دقيقة" - assert self.locale._format_timeframe("hours", 19) == "19 ساعة" - assert self.locale._format_timeframe("months", 24) == "24 شهر" - assert self.locale._format_timeframe("days", 50) == "50 يوم" - assert self.locale._format_timeframe("years", 115) == "115 سنة" - - -@pytest.mark.usefixtures("lang_locale") -class TestNepaliLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 3) == "3 घण्टा" - assert self.locale._format_timeframe("hour", 0) == "एक घण्टा" - - def test_format_relative_now(self): - result = self.locale._format_relative("अहिले", "now", 0) - assert result == "अहिले" - - def test_format_relative_future(self): - result = self.locale._format_relative("एक घण्टा", "hour", 1) - assert result == "एक घण्टा पछी" - - def test_format_relative_past(self): - result = self.locale._format_relative("एक घण्टा", "hour", -1) - assert result == "एक घण्टा पहिले" - - -@pytest.mark.usefixtures("lang_locale") -class TestIndonesianLocale: - def test_timeframes(self): - assert self.locale._format_timeframe("hours", 2) == "2 jam" - assert self.locale._format_timeframe("months", 2) == "2 bulan" - - assert self.locale._format_timeframe("days", 2) == "2 hari" - assert self.locale._format_timeframe("years", 2) == "2 tahun" - - assert self.locale._format_timeframe("hours", 3) == "3 jam" - assert self.locale._format_timeframe("months", 4) == "4 bulan" - assert self.locale._format_timeframe("days", 3) == "3 hari" - assert self.locale._format_timeframe("years", 5) == "5 tahun" - - def test_format_relative_now(self): - assert self.locale._format_relative("baru saja", "now", 0) == "baru saja" - - def test_format_relative_past(self): - assert self.locale._format_relative("1 jam", "hour", 1) == "dalam 1 jam" - assert self.locale._format_relative("1 detik", "seconds", 1) == "dalam 1 detik" - - def test_format_relative_future(self): - assert self.locale._format_relative("1 jam", "hour", -1) == "1 jam yang lalu" - - -@pytest.mark.usefixtures("lang_locale") -class TestTagalogLocale: - def test_singles_tl(self): - assert self.locale._format_timeframe("second", 1) == "isang segundo" - assert self.locale._format_timeframe("minute", 1) == "isang minuto" - assert self.locale._format_timeframe("hour", 1) == "isang oras" - assert self.locale._format_timeframe("day", 1) == "isang araw" - assert self.locale._format_timeframe("week", 1) == "isang linggo" - assert self.locale._format_timeframe("month", 1) == "isang buwan" - assert self.locale._format_timeframe("year", 1) == "isang taon" - - def test_meridians_tl(self): - assert self.locale.meridian(7, "A") == "ng umaga" - assert self.locale.meridian(18, "A") == "ng hapon" - assert self.locale.meridian(10, "a") == "nu" - assert self.locale.meridian(22, "a") == "nh" - - def test_describe_tl(self): - assert self.locale.describe("second", only_distance=True) == "isang segundo" - assert ( - self.locale.describe("second", only_distance=False) - == "isang segundo mula ngayon" - ) - assert self.locale.describe("minute", only_distance=True) == "isang minuto" - assert ( - self.locale.describe("minute", only_distance=False) - == "isang minuto mula ngayon" - ) - assert self.locale.describe("hour", only_distance=True) == "isang oras" - assert ( - self.locale.describe("hour", only_distance=False) - == "isang oras mula ngayon" - ) - assert self.locale.describe("day", only_distance=True) == "isang araw" - assert ( - self.locale.describe("day", only_distance=False) == "isang araw mula ngayon" - ) - assert self.locale.describe("week", only_distance=True) == "isang linggo" - assert ( - self.locale.describe("week", only_distance=False) - == "isang linggo mula ngayon" - ) - assert self.locale.describe("month", only_distance=True) == "isang buwan" - assert ( - self.locale.describe("month", only_distance=False) - == "isang buwan mula ngayon" - ) - assert self.locale.describe("year", only_distance=True) == "isang taon" - assert ( - self.locale.describe("year", only_distance=False) - == "isang taon mula ngayon" - ) - - def test_relative_tl(self): - # time - assert self.locale._format_relative("ngayon", "now", 0) == "ngayon" - assert ( - self.locale._format_relative("1 segundo", "seconds", 1) - == "1 segundo mula ngayon" - ) - assert ( - self.locale._format_relative("1 minuto", "minutes", 1) - == "1 minuto mula ngayon" - ) - assert ( - self.locale._format_relative("1 oras", "hours", 1) == "1 oras mula ngayon" - ) - assert self.locale._format_relative("1 araw", "days", 1) == "1 araw mula ngayon" - assert ( - self.locale._format_relative("1 linggo", "weeks", 1) - == "1 linggo mula ngayon" - ) - assert ( - self.locale._format_relative("1 buwan", "months", 1) - == "1 buwan mula ngayon" - ) - assert ( - self.locale._format_relative("1 taon", "years", 1) == "1 taon mula ngayon" - ) - assert ( - self.locale._format_relative("1 segundo", "seconds", -1) - == "nakaraang 1 segundo" - ) - assert ( - self.locale._format_relative("1 minuto", "minutes", -1) - == "nakaraang 1 minuto" - ) - assert self.locale._format_relative("1 oras", "hours", -1) == "nakaraang 1 oras" - assert self.locale._format_relative("1 araw", "days", -1) == "nakaraang 1 araw" - assert ( - self.locale._format_relative("1 linggo", "weeks", -1) - == "nakaraang 1 linggo" - ) - assert ( - self.locale._format_relative("1 buwan", "months", -1) == "nakaraang 1 buwan" - ) - assert self.locale._format_relative("1 taon", "years", -1) == "nakaraang 1 taon" - - def test_plurals_tl(self): - # Seconds - assert self.locale._format_timeframe("seconds", 0) == "0 segundo" - assert self.locale._format_timeframe("seconds", 1) == "1 segundo" - assert self.locale._format_timeframe("seconds", 2) == "2 segundo" - assert self.locale._format_timeframe("seconds", 4) == "4 segundo" - assert self.locale._format_timeframe("seconds", 5) == "5 segundo" - assert self.locale._format_timeframe("seconds", 21) == "21 segundo" - assert self.locale._format_timeframe("seconds", 22) == "22 segundo" - assert self.locale._format_timeframe("seconds", 25) == "25 segundo" - - # Minutes - assert self.locale._format_timeframe("minutes", 0) == "0 minuto" - assert self.locale._format_timeframe("minutes", 1) == "1 minuto" - assert self.locale._format_timeframe("minutes", 2) == "2 minuto" - assert self.locale._format_timeframe("minutes", 4) == "4 minuto" - assert self.locale._format_timeframe("minutes", 5) == "5 minuto" - assert self.locale._format_timeframe("minutes", 21) == "21 minuto" - assert self.locale._format_timeframe("minutes", 22) == "22 minuto" - assert self.locale._format_timeframe("minutes", 25) == "25 minuto" - - # Hours - assert self.locale._format_timeframe("hours", 0) == "0 oras" - assert self.locale._format_timeframe("hours", 1) == "1 oras" - assert self.locale._format_timeframe("hours", 2) == "2 oras" - assert self.locale._format_timeframe("hours", 4) == "4 oras" - assert self.locale._format_timeframe("hours", 5) == "5 oras" - assert self.locale._format_timeframe("hours", 21) == "21 oras" - assert self.locale._format_timeframe("hours", 22) == "22 oras" - assert self.locale._format_timeframe("hours", 25) == "25 oras" - - # Days - assert self.locale._format_timeframe("days", 0) == "0 araw" - assert self.locale._format_timeframe("days", 1) == "1 araw" - assert self.locale._format_timeframe("days", 2) == "2 araw" - assert self.locale._format_timeframe("days", 3) == "3 araw" - assert self.locale._format_timeframe("days", 21) == "21 araw" - - # Weeks - assert self.locale._format_timeframe("weeks", 0) == "0 linggo" - assert self.locale._format_timeframe("weeks", 1) == "1 linggo" - assert self.locale._format_timeframe("weeks", 2) == "2 linggo" - assert self.locale._format_timeframe("weeks", 4) == "4 linggo" - assert self.locale._format_timeframe("weeks", 5) == "5 linggo" - assert self.locale._format_timeframe("weeks", 21) == "21 linggo" - assert self.locale._format_timeframe("weeks", 22) == "22 linggo" - assert self.locale._format_timeframe("weeks", 25) == "25 linggo" - - # Months - assert self.locale._format_timeframe("months", 0) == "0 buwan" - assert self.locale._format_timeframe("months", 1) == "1 buwan" - assert self.locale._format_timeframe("months", 2) == "2 buwan" - assert self.locale._format_timeframe("months", 4) == "4 buwan" - assert self.locale._format_timeframe("months", 5) == "5 buwan" - assert self.locale._format_timeframe("months", 21) == "21 buwan" - assert self.locale._format_timeframe("months", 22) == "22 buwan" - assert self.locale._format_timeframe("months", 25) == "25 buwan" - - # Years - assert self.locale._format_timeframe("years", 1) == "1 taon" - assert self.locale._format_timeframe("years", 2) == "2 taon" - assert self.locale._format_timeframe("years", 5) == "5 taon" - - def test_multi_describe_tl(self): - describe = self.locale.describe_multi - - fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] - assert describe(fulltest) == "5 taon 1 linggo 1 oras 6 minuto mula ngayon" - seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] - assert describe(seconds4000_0days) == "0 araw 1 oras 6 minuto mula ngayon" - seconds4000 = [("hours", 1), ("minutes", 6)] - assert describe(seconds4000) == "1 oras 6 minuto mula ngayon" - assert describe(seconds4000, only_distance=True) == "1 oras 6 minuto" - seconds3700 = [("hours", 1), ("minutes", 1)] - assert describe(seconds3700) == "1 oras 1 minuto mula ngayon" - seconds300_0hours = [("hours", 0), ("minutes", 5)] - assert describe(seconds300_0hours) == "0 oras 5 minuto mula ngayon" - seconds300 = [("minutes", 5)] - assert describe(seconds300) == "5 minuto mula ngayon" - seconds60 = [("minutes", 1)] - assert describe(seconds60) == "1 minuto mula ngayon" - assert describe(seconds60, only_distance=True) == "1 minuto" - seconds60 = [("seconds", 1)] - assert describe(seconds60) == "1 segundo mula ngayon" - assert describe(seconds60, only_distance=True) == "1 segundo" - - def test_ordinal_number_tl(self): - assert self.locale.ordinal_number(0) == "ika-0" - assert self.locale.ordinal_number(1) == "ika-1" - assert self.locale.ordinal_number(2) == "ika-2" - assert self.locale.ordinal_number(3) == "ika-3" - assert self.locale.ordinal_number(10) == "ika-10" - assert self.locale.ordinal_number(23) == "ika-23" - assert self.locale.ordinal_number(100) == "ika-100" - assert self.locale.ordinal_number(103) == "ika-103" - assert self.locale.ordinal_number(114) == "ika-114" - - -@pytest.mark.usefixtures("lang_locale") -class TestEstonianLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "just nüüd" - assert self.locale._format_timeframe("second", 1) == "ühe sekundi" - assert self.locale._format_timeframe("seconds", 3) == "3 sekundi" - assert self.locale._format_timeframe("seconds", 30) == "30 sekundi" - assert self.locale._format_timeframe("minute", 1) == "ühe minuti" - assert self.locale._format_timeframe("minutes", 4) == "4 minuti" - assert self.locale._format_timeframe("minutes", 40) == "40 minuti" - assert self.locale._format_timeframe("hour", 1) == "tunni aja" - assert self.locale._format_timeframe("hours", 5) == "5 tunni" - assert self.locale._format_timeframe("hours", 23) == "23 tunni" - assert self.locale._format_timeframe("day", 1) == "ühe päeva" - assert self.locale._format_timeframe("days", 6) == "6 päeva" - assert self.locale._format_timeframe("days", 12) == "12 päeva" - assert self.locale._format_timeframe("month", 1) == "ühe kuu" - assert self.locale._format_timeframe("months", 7) == "7 kuu" - assert self.locale._format_timeframe("months", 11) == "11 kuu" - assert self.locale._format_timeframe("year", 1) == "ühe aasta" - assert self.locale._format_timeframe("years", 8) == "8 aasta" - assert self.locale._format_timeframe("years", 12) == "12 aasta" - - assert self.locale._format_timeframe("now", 0) == "just nüüd" - assert self.locale._format_timeframe("second", -1) == "üks sekund" - assert self.locale._format_timeframe("seconds", -9) == "9 sekundit" - assert self.locale._format_timeframe("seconds", -12) == "12 sekundit" - assert self.locale._format_timeframe("minute", -1) == "üks minut" - assert self.locale._format_timeframe("minutes", -2) == "2 minutit" - assert self.locale._format_timeframe("minutes", -10) == "10 minutit" - assert self.locale._format_timeframe("hour", -1) == "tund aega" - assert self.locale._format_timeframe("hours", -3) == "3 tundi" - assert self.locale._format_timeframe("hours", -11) == "11 tundi" - assert self.locale._format_timeframe("day", -1) == "üks päev" - assert self.locale._format_timeframe("days", -2) == "2 päeva" - assert self.locale._format_timeframe("days", -12) == "12 päeva" - assert self.locale._format_timeframe("month", -1) == "üks kuu" - assert self.locale._format_timeframe("months", -3) == "3 kuud" - assert self.locale._format_timeframe("months", -13) == "13 kuud" - assert self.locale._format_timeframe("year", -1) == "üks aasta" - assert self.locale._format_timeframe("years", -4) == "4 aastat" - assert self.locale._format_timeframe("years", -14) == "14 aastat" - - -@pytest.mark.usefixtures("lang_locale") -class TestPortugueseLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "agora" - assert self.locale._format_timeframe("second", 1) == "um segundo" - assert self.locale._format_timeframe("seconds", 30) == "30 segundos" - assert self.locale._format_timeframe("minute", 1) == "um minuto" - assert self.locale._format_timeframe("minutes", 40) == "40 minutos" - assert self.locale._format_timeframe("hour", 1) == "uma hora" - assert self.locale._format_timeframe("hours", 23) == "23 horas" - assert self.locale._format_timeframe("day", 1) == "um dia" - assert self.locale._format_timeframe("days", 12) == "12 dias" - assert self.locale._format_timeframe("month", 1) == "um mês" - assert self.locale._format_timeframe("months", 11) == "11 meses" - assert self.locale._format_timeframe("year", 1) == "um ano" - assert self.locale._format_timeframe("years", 12) == "12 anos" - - -@pytest.mark.usefixtures("lang_locale") -class TestBrazilianPortugueseLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "agora" - assert self.locale._format_timeframe("second", 1) == "um segundo" - assert self.locale._format_timeframe("seconds", 30) == "30 segundos" - assert self.locale._format_timeframe("minute", 1) == "um minuto" - assert self.locale._format_timeframe("minutes", 40) == "40 minutos" - assert self.locale._format_timeframe("hour", 1) == "uma hora" - assert self.locale._format_timeframe("hours", 23) == "23 horas" - assert self.locale._format_timeframe("day", 1) == "um dia" - assert self.locale._format_timeframe("days", 12) == "12 dias" - assert self.locale._format_timeframe("month", 1) == "um mês" - assert self.locale._format_timeframe("months", 11) == "11 meses" - assert self.locale._format_timeframe("year", 1) == "um ano" - assert self.locale._format_timeframe("years", 12) == "12 anos" - assert self.locale._format_relative("uma hora", "hour", -1) == "faz uma hora" - - -@pytest.mark.usefixtures("lang_locale") -class TestHongKongLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "剛才" - assert self.locale._format_timeframe("second", 1) == "1秒" - assert self.locale._format_timeframe("seconds", 30) == "30秒" - assert self.locale._format_timeframe("minute", 1) == "1分鐘" - assert self.locale._format_timeframe("minutes", 40) == "40分鐘" - assert self.locale._format_timeframe("hour", 1) == "1小時" - assert self.locale._format_timeframe("hours", 23) == "23小時" - assert self.locale._format_timeframe("day", 1) == "1天" - assert self.locale._format_timeframe("days", 12) == "12天" - assert self.locale._format_timeframe("week", 1) == "1星期" - assert self.locale._format_timeframe("weeks", 38) == "38星期" - assert self.locale._format_timeframe("month", 1) == "1個月" - assert self.locale._format_timeframe("months", 11) == "11個月" - assert self.locale._format_timeframe("year", 1) == "1年" - assert self.locale._format_timeframe("years", 12) == "12年" - - -@pytest.mark.usefixtures("lang_locale") -class TestChineseTWLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "剛才" - assert self.locale._format_timeframe("second", 1) == "1秒" - assert self.locale._format_timeframe("seconds", 30) == "30秒" - assert self.locale._format_timeframe("minute", 1) == "1分鐘" - assert self.locale._format_timeframe("minutes", 40) == "40分鐘" - assert self.locale._format_timeframe("hour", 1) == "1小時" - assert self.locale._format_timeframe("hours", 23) == "23小時" - assert self.locale._format_timeframe("day", 1) == "1天" - assert self.locale._format_timeframe("days", 12) == "12天" - assert self.locale._format_timeframe("week", 1) == "1週" - assert self.locale._format_timeframe("weeks", 38) == "38週" - assert self.locale._format_timeframe("month", 1) == "1個月" - assert self.locale._format_timeframe("months", 11) == "11個月" - assert self.locale._format_timeframe("year", 1) == "1年" - assert self.locale._format_timeframe("years", 12) == "12年" - - -@pytest.mark.usefixtures("lang_locale") -class TestSwahiliLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "sasa hivi" - assert self.locale._format_timeframe("second", 1) == "sekunde" - assert self.locale._format_timeframe("seconds", 3) == "sekunde 3" - assert self.locale._format_timeframe("seconds", 30) == "sekunde 30" - assert self.locale._format_timeframe("minute", 1) == "dakika moja" - assert self.locale._format_timeframe("minutes", 4) == "dakika 4" - assert self.locale._format_timeframe("minutes", 40) == "dakika 40" - assert self.locale._format_timeframe("hour", 1) == "saa moja" - assert self.locale._format_timeframe("hours", 5) == "saa 5" - assert self.locale._format_timeframe("hours", 23) == "saa 23" - assert self.locale._format_timeframe("day", 1) == "siku moja" - assert self.locale._format_timeframe("days", 6) == "siku 6" - assert self.locale._format_timeframe("days", 12) == "siku 12" - assert self.locale._format_timeframe("month", 1) == "mwezi moja" - assert self.locale._format_timeframe("months", 7) == "miezi 7" - assert self.locale._format_timeframe("week", 1) == "wiki moja" - assert self.locale._format_timeframe("weeks", 2) == "wiki 2" - assert self.locale._format_timeframe("months", 11) == "miezi 11" - assert self.locale._format_timeframe("year", 1) == "mwaka moja" - assert self.locale._format_timeframe("years", 8) == "miaka 8" - assert self.locale._format_timeframe("years", 12) == "miaka 12" - - def test_format_relative_now(self): - result = self.locale._format_relative("sasa hivi", "now", 0) - assert result == "sasa hivi" - - def test_format_relative_past(self): - result = self.locale._format_relative("saa moja", "hour", 1) - assert result == "muda wa saa moja" - - def test_format_relative_future(self): - result = self.locale._format_relative("saa moja", "hour", -1) - assert result == "saa moja iliyopita" - - -@pytest.mark.usefixtures("lang_locale") -class TestKoreanLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "지금" - assert self.locale._format_timeframe("second", 1) == "1초" - assert self.locale._format_timeframe("seconds", 2) == "2초" - assert self.locale._format_timeframe("minute", 1) == "1분" - assert self.locale._format_timeframe("minutes", 2) == "2분" - assert self.locale._format_timeframe("hour", 1) == "한시간" - assert self.locale._format_timeframe("hours", 2) == "2시간" - assert self.locale._format_timeframe("day", 1) == "하루" - assert self.locale._format_timeframe("days", 2) == "2일" - assert self.locale._format_timeframe("week", 1) == "1주" - assert self.locale._format_timeframe("weeks", 2) == "2주" - assert self.locale._format_timeframe("month", 1) == "한달" - assert self.locale._format_timeframe("months", 2) == "2개월" - assert self.locale._format_timeframe("year", 1) == "1년" - assert self.locale._format_timeframe("years", 2) == "2년" - - def test_format_relative(self): - assert self.locale._format_relative("지금", "now", 0) == "지금" - - assert self.locale._format_relative("1초", "second", 1) == "1초 후" - assert self.locale._format_relative("2초", "seconds", 2) == "2초 후" - assert self.locale._format_relative("1분", "minute", 1) == "1분 후" - assert self.locale._format_relative("2분", "minutes", 2) == "2분 후" - assert self.locale._format_relative("한시간", "hour", 1) == "한시간 후" - assert self.locale._format_relative("2시간", "hours", 2) == "2시간 후" - assert self.locale._format_relative("하루", "day", 1) == "내일" - assert self.locale._format_relative("2일", "days", 2) == "모레" - assert self.locale._format_relative("3일", "days", 3) == "글피" - assert self.locale._format_relative("4일", "days", 4) == "그글피" - assert self.locale._format_relative("5일", "days", 5) == "5일 후" - assert self.locale._format_relative("1주", "week", 1) == "1주 후" - assert self.locale._format_relative("2주", "weeks", 2) == "2주 후" - assert self.locale._format_relative("한달", "month", 1) == "한달 후" - assert self.locale._format_relative("2개월", "months", 2) == "2개월 후" - assert self.locale._format_relative("1년", "year", 1) == "내년" - assert self.locale._format_relative("2년", "years", 2) == "내후년" - assert self.locale._format_relative("3년", "years", 3) == "3년 후" - - assert self.locale._format_relative("1초", "second", -1) == "1초 전" - assert self.locale._format_relative("2초", "seconds", -2) == "2초 전" - assert self.locale._format_relative("1분", "minute", -1) == "1분 전" - assert self.locale._format_relative("2분", "minutes", -2) == "2분 전" - assert self.locale._format_relative("한시간", "hour", -1) == "한시간 전" - assert self.locale._format_relative("2시간", "hours", -2) == "2시간 전" - assert self.locale._format_relative("하루", "day", -1) == "어제" - assert self.locale._format_relative("2일", "days", -2) == "그제" - assert self.locale._format_relative("3일", "days", -3) == "그끄제" - assert self.locale._format_relative("4일", "days", -4) == "4일 전" - assert self.locale._format_relative("1주", "week", -1) == "1주 전" - assert self.locale._format_relative("2주", "weeks", -2) == "2주 전" - assert self.locale._format_relative("한달", "month", -1) == "한달 전" - assert self.locale._format_relative("2개월", "months", -2) == "2개월 전" - assert self.locale._format_relative("1년", "year", -1) == "작년" - assert self.locale._format_relative("2년", "years", -2) == "제작년" - assert self.locale._format_relative("3년", "years", -3) == "3년 전" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(0) == "0번째" - assert self.locale.ordinal_number(1) == "첫번째" - assert self.locale.ordinal_number(2) == "두번째" - assert self.locale.ordinal_number(3) == "세번째" - assert self.locale.ordinal_number(4) == "네번째" - assert self.locale.ordinal_number(5) == "다섯번째" - assert self.locale.ordinal_number(6) == "여섯번째" - assert self.locale.ordinal_number(7) == "일곱번째" - assert self.locale.ordinal_number(8) == "여덟번째" - assert self.locale.ordinal_number(9) == "아홉번째" - assert self.locale.ordinal_number(10) == "열번째" - assert self.locale.ordinal_number(11) == "11번째" - assert self.locale.ordinal_number(12) == "12번째" - assert self.locale.ordinal_number(100) == "100번째" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py deleted file mode 100644 index 9fb4e68f3cf..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py +++ /dev/null @@ -1,1657 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import calendar -import os -import time -from datetime import datetime - -import pytest -from dateutil import tz - -import arrow -from arrow import formatter, parser -from arrow.constants import MAX_TIMESTAMP_US -from arrow.parser import DateTimeParser, ParserError, ParserMatchError - -from .utils import make_full_tz_list - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParser: - def test_parse_multiformat(self, mocker): - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_a", - side_effect=parser.ParserError, - ) - - with pytest.raises(parser.ParserError): - self.parser._parse_multiformat("str", ["fmt_a"]) - - mock_datetime = mocker.Mock() - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_b", - return_value=mock_datetime, - ) - - result = self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) - assert result == mock_datetime - - def test_parse_multiformat_all_fail(self, mocker): - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_a", - side_effect=parser.ParserError, - ) - - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_b", - side_effect=parser.ParserError, - ) - - with pytest.raises(parser.ParserError): - self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) - - def test_parse_multiformat_unself_expected_fail(self, mocker): - class UnselfExpectedError(Exception): - pass - - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_a", - side_effect=UnselfExpectedError, - ) - - with pytest.raises(UnselfExpectedError): - self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) - - def test_parse_token_nonsense(self): - parts = {} - self.parser._parse_token("NONSENSE", "1900", parts) - assert parts == {} - - def test_parse_token_invalid_meridians(self): - parts = {} - self.parser._parse_token("A", "a..m", parts) - assert parts == {} - self.parser._parse_token("a", "p..m", parts) - assert parts == {} - - def test_parser_no_caching(self, mocker): - - mocked_parser = mocker.patch( - "arrow.parser.DateTimeParser._generate_pattern_re", fmt="fmt_a" - ) - self.parser = parser.DateTimeParser(cache_size=0) - for _ in range(100): - self.parser._generate_pattern_re("fmt_a") - assert mocked_parser.call_count == 100 - - def test_parser_1_line_caching(self, mocker): - mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") - self.parser = parser.DateTimeParser(cache_size=1) - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - assert mocked_parser.call_count == 1 - assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_b") - assert mocked_parser.call_count == 2 - assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - assert mocked_parser.call_count == 3 - assert mocked_parser.call_args_list[2] == mocker.call(fmt="fmt_a") - - def test_parser_multiple_line_caching(self, mocker): - mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") - self.parser = parser.DateTimeParser(cache_size=2) - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - assert mocked_parser.call_count == 1 - assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_b") - assert mocked_parser.call_count == 2 - assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") - - # fmt_a and fmt_b are in the cache, so no new calls should be made - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_b") - assert mocked_parser.call_count == 2 - assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") - assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") - - def test_YY_and_YYYY_format_list(self): - - assert self.parser.parse("15/01/19", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( - 2019, 1, 15 - ) - - # Regression test for issue #580 - assert self.parser.parse("15/01/2019", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( - 2019, 1, 15 - ) - - assert ( - self.parser.parse( - "15/01/2019T04:05:06.789120Z", - ["D/M/YYThh:mm:ss.SZ", "D/M/YYYYThh:mm:ss.SZ"], - ) - == datetime(2019, 1, 15, 4, 5, 6, 789120, tzinfo=tz.tzutc()) - ) - - # regression test for issue #447 - def test_timestamp_format_list(self): - # should not match on the "X" token - assert ( - self.parser.parse( - "15 Jul 2000", - ["MM/DD/YYYY", "YYYY-MM-DD", "X", "DD-MMMM-YYYY", "D MMM YYYY"], - ) - == datetime(2000, 7, 15) - ) - - with pytest.raises(ParserError): - self.parser.parse("15 Jul", "X") - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserParse: - def test_parse_list(self, mocker): - - mocker.patch( - "arrow.parser.DateTimeParser._parse_multiformat", - string="str", - formats=["fmt_a", "fmt_b"], - return_value="result", - ) - - result = self.parser.parse("str", ["fmt_a", "fmt_b"]) - assert result == "result" - - def test_parse_unrecognized_token(self, mocker): - - mocker.patch.dict("arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP") - del arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP["YYYY"] - - # need to make another local parser to apply patch changes - _parser = parser.DateTimeParser() - with pytest.raises(parser.ParserError): - _parser.parse("2013-01-01", "YYYY-MM-DD") - - def test_parse_parse_no_match(self): - - with pytest.raises(ParserError): - self.parser.parse("01-01", "YYYY-MM-DD") - - def test_parse_separators(self): - - with pytest.raises(ParserError): - self.parser.parse("1403549231", "YYYY-MM-DD") - - def test_parse_numbers(self): - - self.expected = datetime(2012, 1, 1, 12, 5, 10) - assert ( - self.parser.parse("2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss") - == self.expected - ) - - def test_parse_year_two_digit(self): - - self.expected = datetime(1979, 1, 1, 12, 5, 10) - assert ( - self.parser.parse("79-01-01 12:05:10", "YY-MM-DD HH:mm:ss") == self.expected - ) - - def test_parse_timestamp(self): - - tz_utc = tz.tzutc() - int_timestamp = int(time.time()) - self.expected = datetime.fromtimestamp(int_timestamp, tz=tz_utc) - assert self.parser.parse("{:d}".format(int_timestamp), "X") == self.expected - - float_timestamp = time.time() - self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) - assert self.parser.parse("{:f}".format(float_timestamp), "X") == self.expected - - # test handling of ns timestamp (arrow will round to 6 digits regardless) - self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:f}123".format(float_timestamp), "X") == self.expected - ) - - # test ps timestamp (arrow will round to 6 digits regardless) - self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:f}123456".format(float_timestamp), "X") - == self.expected - ) - - # NOTE: negative timestamps cannot be handled by datetime on Window - # Must use timedelta to handle them. ref: https://stackoverflow.com/questions/36179914 - if os.name != "nt": - # regression test for issue #662 - negative_int_timestamp = -int_timestamp - self.expected = datetime.fromtimestamp(negative_int_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:d}".format(negative_int_timestamp), "X") - == self.expected - ) - - negative_float_timestamp = -float_timestamp - self.expected = datetime.fromtimestamp(negative_float_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:f}".format(negative_float_timestamp), "X") - == self.expected - ) - - # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will - # break cases like "15 Jul 2000" and a format list (see issue #447) - with pytest.raises(ParserError): - natural_lang_string = "Meet me at {} at the restaurant.".format( - float_timestamp - ) - self.parser.parse(natural_lang_string, "X") - - with pytest.raises(ParserError): - self.parser.parse("1565982019.", "X") - - with pytest.raises(ParserError): - self.parser.parse(".1565982019", "X") - - def test_parse_expanded_timestamp(self): - # test expanded timestamps that include milliseconds - # and microseconds as multiples rather than decimals - # requested in issue #357 - - tz_utc = tz.tzutc() - timestamp = 1569982581.413132 - timestamp_milli = int(round(timestamp * 1000)) - timestamp_micro = int(round(timestamp * 1000000)) - - # "x" token should parse integer timestamps below MAX_TIMESTAMP normally - self.expected = datetime.fromtimestamp(int(timestamp), tz=tz_utc) - assert self.parser.parse("{:d}".format(int(timestamp)), "x") == self.expected - - self.expected = datetime.fromtimestamp(round(timestamp, 3), tz=tz_utc) - assert self.parser.parse("{:d}".format(timestamp_milli), "x") == self.expected - - self.expected = datetime.fromtimestamp(timestamp, tz=tz_utc) - assert self.parser.parse("{:d}".format(timestamp_micro), "x") == self.expected - - # anything above max µs timestamp should fail - with pytest.raises(ValueError): - self.parser.parse("{:d}".format(int(MAX_TIMESTAMP_US) + 1), "x") - - # floats are not allowed with the "x" token - with pytest.raises(ParserMatchError): - self.parser.parse("{:f}".format(timestamp), "x") - - def test_parse_names(self): - - self.expected = datetime(2012, 1, 1) - - assert self.parser.parse("January 1, 2012", "MMMM D, YYYY") == self.expected - assert self.parser.parse("Jan 1, 2012", "MMM D, YYYY") == self.expected - - def test_parse_pm(self): - - self.expected = datetime(1, 1, 1, 13, 0, 0) - assert self.parser.parse("1 pm", "H a") == self.expected - assert self.parser.parse("1 pm", "h a") == self.expected - - self.expected = datetime(1, 1, 1, 1, 0, 0) - assert self.parser.parse("1 am", "H A") == self.expected - assert self.parser.parse("1 am", "h A") == self.expected - - self.expected = datetime(1, 1, 1, 0, 0, 0) - assert self.parser.parse("12 am", "H A") == self.expected - assert self.parser.parse("12 am", "h A") == self.expected - - self.expected = datetime(1, 1, 1, 12, 0, 0) - assert self.parser.parse("12 pm", "H A") == self.expected - assert self.parser.parse("12 pm", "h A") == self.expected - - def test_parse_tz_hours_only(self): - - self.expected = datetime(2025, 10, 17, 5, 30, 10, tzinfo=tz.tzoffset(None, 0)) - parsed = self.parser.parse("2025-10-17 05:30:10+00", "YYYY-MM-DD HH:mm:ssZ") - assert parsed == self.expected - - def test_parse_tz_zz(self): - - self.expected = datetime(2013, 1, 1, tzinfo=tz.tzoffset(None, -7 * 3600)) - assert self.parser.parse("2013-01-01 -07:00", "YYYY-MM-DD ZZ") == self.expected - - @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) - def test_parse_tz_name_zzz(self, full_tz_name): - - self.expected = datetime(2013, 1, 1, tzinfo=tz.gettz(full_tz_name)) - assert ( - self.parser.parse("2013-01-01 {}".format(full_tz_name), "YYYY-MM-DD ZZZ") - == self.expected - ) - - # note that offsets are not timezones - with pytest.raises(ParserError): - self.parser.parse("2013-01-01 12:30:45.9+1000", "YYYY-MM-DDZZZ") - - with pytest.raises(ParserError): - self.parser.parse("2013-01-01 12:30:45.9+10:00", "YYYY-MM-DDZZZ") - - with pytest.raises(ParserError): - self.parser.parse("2013-01-01 12:30:45.9-10", "YYYY-MM-DDZZZ") - - def test_parse_subsecond(self): - self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) - assert ( - self.parser.parse("2013-01-01 12:30:45.9", "YYYY-MM-DD HH:mm:ss.S") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) - assert ( - self.parser.parse("2013-01-01 12:30:45.98", "YYYY-MM-DD HH:mm:ss.SS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) - assert ( - self.parser.parse("2013-01-01 12:30:45.987", "YYYY-MM-DD HH:mm:ss.SSS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) - assert ( - self.parser.parse("2013-01-01 12:30:45.9876", "YYYY-MM-DD HH:mm:ss.SSSS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) - assert ( - self.parser.parse("2013-01-01 12:30:45.98765", "YYYY-MM-DD HH:mm:ss.SSSSS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - assert ( - self.parser.parse( - "2013-01-01 12:30:45.987654", "YYYY-MM-DD HH:mm:ss.SSSSSS" - ) - == self.expected - ) - - def test_parse_subsecond_rounding(self): - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - datetime_format = "YYYY-MM-DD HH:mm:ss.S" - - # round up - string = "2013-01-01 12:30:45.9876539" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # round down - string = "2013-01-01 12:30:45.98765432" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # round half-up - string = "2013-01-01 12:30:45.987653521" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # round half-down - string = "2013-01-01 12:30:45.9876545210" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # overflow (zero out the subseconds and increment the seconds) - # regression tests for issue #636 - def test_parse_subsecond_rounding_overflow(self): - datetime_format = "YYYY-MM-DD HH:mm:ss.S" - - self.expected = datetime(2013, 1, 1, 12, 30, 46) - string = "2013-01-01 12:30:45.9999995" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - self.expected = datetime(2013, 1, 1, 12, 31, 0) - string = "2013-01-01 12:30:59.9999999" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - self.expected = datetime(2013, 1, 2, 0, 0, 0) - string = "2013-01-01 23:59:59.9999999" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # 6 digits should remain unrounded - self.expected = datetime(2013, 1, 1, 12, 30, 45, 999999) - string = "2013-01-01 12:30:45.999999" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # Regression tests for issue #560 - def test_parse_long_year(self): - with pytest.raises(ParserError): - self.parser.parse("09 January 123456789101112", "DD MMMM YYYY") - - with pytest.raises(ParserError): - self.parser.parse("123456789101112 09 January", "YYYY DD MMMM") - - with pytest.raises(ParserError): - self.parser.parse("68096653015/01/19", "YY/M/DD") - - def test_parse_with_extra_words_at_start_and_end_invalid(self): - input_format_pairs = [ - ("blah2016", "YYYY"), - ("blah2016blah", "YYYY"), - ("2016blah", "YYYY"), - ("2016-05blah", "YYYY-MM"), - ("2016-05-16blah", "YYYY-MM-DD"), - ("2016-05-16T04:05:06.789120blah", "YYYY-MM-DDThh:mm:ss.S"), - ("2016-05-16T04:05:06.789120ZblahZ", "YYYY-MM-DDThh:mm:ss.SZ"), - ("2016-05-16T04:05:06.789120Zblah", "YYYY-MM-DDThh:mm:ss.SZ"), - ("2016-05-16T04:05:06.789120blahZ", "YYYY-MM-DDThh:mm:ss.SZ"), - ] - - for pair in input_format_pairs: - with pytest.raises(ParserError): - self.parser.parse(pair[0], pair[1]) - - def test_parse_with_extra_words_at_start_and_end_valid(self): - # Spaces surrounding the parsable date are ok because we - # allow the parsing of natural language input. Additionally, a single - # character of specific punctuation before or after the date is okay. - # See docs for full list of valid punctuation. - - assert self.parser.parse("blah 2016 blah", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse("blah 2016", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse("2016 blah", "YYYY") == datetime(2016, 1, 1) - - # test one additional space along with space divider - assert self.parser.parse( - "blah 2016-05-16 04:05:06.789120", "YYYY-MM-DD hh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert self.parser.parse( - "2016-05-16 04:05:06.789120 blah", "YYYY-MM-DD hh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - # test one additional space along with T divider - assert self.parser.parse( - "blah 2016-05-16T04:05:06.789120", "YYYY-MM-DDThh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert self.parser.parse( - "2016-05-16T04:05:06.789120 blah", "YYYY-MM-DDThh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert ( - self.parser.parse( - "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", - "YYYY-MM-DDThh:mm:ss.S", - ) - == datetime(2016, 5, 16, 4, 5, 6, 789120) - ) - - assert ( - self.parser.parse( - "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", - "YYYY-MM-DD hh:mm:ss.S", - ) - == datetime(2016, 5, 16, 4, 5, 6, 789120) - ) - - # regression test for issue #701 - # tests cases of a partial match surrounded by punctuation - # for the list of valid punctuation, see documentation - def test_parse_with_punctuation_fences(self): - assert self.parser.parse( - "Meet me at my house on Halloween (2019-31-10)", "YYYY-DD-MM" - ) == datetime(2019, 10, 31) - - assert self.parser.parse( - "Monday, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY" - ) == datetime(2019, 9, 9) - - assert self.parser.parse("A date is 11.11.2011.", "DD.MM.YYYY") == datetime( - 2011, 11, 11 - ) - - with pytest.raises(ParserMatchError): - self.parser.parse("11.11.2011.1 is not a valid date.", "DD.MM.YYYY") - - with pytest.raises(ParserMatchError): - self.parser.parse( - "This date has too many punctuation marks following it (11.11.2011).", - "DD.MM.YYYY", - ) - - def test_parse_with_leading_and_trailing_whitespace(self): - assert self.parser.parse(" 2016", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse("2016 ", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse(" 2016 ", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse( - " 2016-05-16 04:05:06.789120 ", "YYYY-MM-DD hh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert self.parser.parse( - " 2016-05-16T04:05:06.789120 ", "YYYY-MM-DDThh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - def test_parse_YYYY_DDDD(self): - assert self.parser.parse("1998-136", "YYYY-DDDD") == datetime(1998, 5, 16) - - assert self.parser.parse("1998-006", "YYYY-DDDD") == datetime(1998, 1, 6) - - with pytest.raises(ParserError): - self.parser.parse("1998-456", "YYYY-DDDD") - - def test_parse_YYYY_DDD(self): - assert self.parser.parse("1998-6", "YYYY-DDD") == datetime(1998, 1, 6) - - assert self.parser.parse("1998-136", "YYYY-DDD") == datetime(1998, 5, 16) - - with pytest.raises(ParserError): - self.parser.parse("1998-756", "YYYY-DDD") - - # month cannot be passed with DDD and DDDD tokens - def test_parse_YYYY_MM_DDDD(self): - with pytest.raises(ParserError): - self.parser.parse("2015-01-009", "YYYY-MM-DDDD") - - # year is required with the DDD and DDDD tokens - def test_parse_DDD_only(self): - with pytest.raises(ParserError): - self.parser.parse("5", "DDD") - - def test_parse_DDDD_only(self): - with pytest.raises(ParserError): - self.parser.parse("145", "DDDD") - - def test_parse_ddd_and_dddd(self): - fr_parser = parser.DateTimeParser("fr") - - # Day of week should be ignored when a day is passed - # 2019-10-17 is a Thursday, so we know day of week - # is ignored if the same date is outputted - expected = datetime(2019, 10, 17) - assert self.parser.parse("Tue 2019-10-17", "ddd YYYY-MM-DD") == expected - assert fr_parser.parse("mar 2019-10-17", "ddd YYYY-MM-DD") == expected - assert self.parser.parse("Tuesday 2019-10-17", "dddd YYYY-MM-DD") == expected - assert fr_parser.parse("mardi 2019-10-17", "dddd YYYY-MM-DD") == expected - - # Get first Tuesday after epoch - expected = datetime(1970, 1, 6) - assert self.parser.parse("Tue", "ddd") == expected - assert fr_parser.parse("mar", "ddd") == expected - assert self.parser.parse("Tuesday", "dddd") == expected - assert fr_parser.parse("mardi", "dddd") == expected - - # Get first Tuesday in 2020 - expected = datetime(2020, 1, 7) - assert self.parser.parse("Tue 2020", "ddd YYYY") == expected - assert fr_parser.parse("mar 2020", "ddd YYYY") == expected - assert self.parser.parse("Tuesday 2020", "dddd YYYY") == expected - assert fr_parser.parse("mardi 2020", "dddd YYYY") == expected - - # Get first Tuesday in February 2020 - expected = datetime(2020, 2, 4) - assert self.parser.parse("Tue 02 2020", "ddd MM YYYY") == expected - assert fr_parser.parse("mar 02 2020", "ddd MM YYYY") == expected - assert self.parser.parse("Tuesday 02 2020", "dddd MM YYYY") == expected - assert fr_parser.parse("mardi 02 2020", "dddd MM YYYY") == expected - - # Get first Tuesday in February after epoch - expected = datetime(1970, 2, 3) - assert self.parser.parse("Tue 02", "ddd MM") == expected - assert fr_parser.parse("mar 02", "ddd MM") == expected - assert self.parser.parse("Tuesday 02", "dddd MM") == expected - assert fr_parser.parse("mardi 02", "dddd MM") == expected - - # Times remain intact - expected = datetime(2020, 2, 4, 10, 25, 54, 123456, tz.tzoffset(None, -3600)) - assert ( - self.parser.parse( - "Tue 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - assert ( - fr_parser.parse( - "mar 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - assert ( - self.parser.parse( - "Tuesday 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - assert ( - fr_parser.parse( - "mardi 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - - def test_parse_ddd_and_dddd_ignore_case(self): - # Regression test for issue #851 - expected = datetime(2019, 6, 24) - assert ( - self.parser.parse("MONDAY, June 24, 2019", "dddd, MMMM DD, YYYY") - == expected - ) - - def test_parse_ddd_and_dddd_then_format(self): - # Regression test for issue #446 - arw_formatter = formatter.DateTimeFormatter() - assert arw_formatter.format(self.parser.parse("Mon", "ddd"), "ddd") == "Mon" - assert ( - arw_formatter.format(self.parser.parse("Monday", "dddd"), "dddd") - == "Monday" - ) - assert arw_formatter.format(self.parser.parse("Tue", "ddd"), "ddd") == "Tue" - assert ( - arw_formatter.format(self.parser.parse("Tuesday", "dddd"), "dddd") - == "Tuesday" - ) - assert arw_formatter.format(self.parser.parse("Wed", "ddd"), "ddd") == "Wed" - assert ( - arw_formatter.format(self.parser.parse("Wednesday", "dddd"), "dddd") - == "Wednesday" - ) - assert arw_formatter.format(self.parser.parse("Thu", "ddd"), "ddd") == "Thu" - assert ( - arw_formatter.format(self.parser.parse("Thursday", "dddd"), "dddd") - == "Thursday" - ) - assert arw_formatter.format(self.parser.parse("Fri", "ddd"), "ddd") == "Fri" - assert ( - arw_formatter.format(self.parser.parse("Friday", "dddd"), "dddd") - == "Friday" - ) - assert arw_formatter.format(self.parser.parse("Sat", "ddd"), "ddd") == "Sat" - assert ( - arw_formatter.format(self.parser.parse("Saturday", "dddd"), "dddd") - == "Saturday" - ) - assert arw_formatter.format(self.parser.parse("Sun", "ddd"), "ddd") == "Sun" - assert ( - arw_formatter.format(self.parser.parse("Sunday", "dddd"), "dddd") - == "Sunday" - ) - - def test_parse_HH_24(self): - assert self.parser.parse( - "2019-10-30T24:00:00", "YYYY-MM-DDTHH:mm:ss" - ) == datetime(2019, 10, 31, 0, 0, 0, 0) - assert self.parser.parse("2019-10-30T24:00", "YYYY-MM-DDTHH:mm") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse("2019-10-30T24", "YYYY-MM-DDTHH") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse( - "2019-10-30T24:00:00.0", "YYYY-MM-DDTHH:mm:ss.S" - ) == datetime(2019, 10, 31, 0, 0, 0, 0) - assert self.parser.parse( - "2019-10-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" - ) == datetime(2019, 11, 1, 0, 0, 0, 0) - assert self.parser.parse( - "2019-12-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" - ) == datetime(2020, 1, 1, 0, 0, 0, 0) - assert self.parser.parse( - "2019-12-31T23:59:59.9999999", "YYYY-MM-DDTHH:mm:ss.S" - ) == datetime(2020, 1, 1, 0, 0, 0, 0) - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:01:00", "YYYY-MM-DDTHH:mm:ss") - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:00:01", "YYYY-MM-DDTHH:mm:ss") - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:00:00.1", "YYYY-MM-DDTHH:mm:ss.S") - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:00:00.999999", "YYYY-MM-DDTHH:mm:ss.S") - - def test_parse_W(self): - - assert self.parser.parse("2011-W05-4", "W") == datetime(2011, 2, 3) - assert self.parser.parse("2011W054", "W") == datetime(2011, 2, 3) - assert self.parser.parse("2011-W05", "W") == datetime(2011, 1, 31) - assert self.parser.parse("2011W05", "W") == datetime(2011, 1, 31) - assert self.parser.parse("2011-W05-4T14:17:01", "WTHH:mm:ss") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - assert self.parser.parse("2011W054T14:17:01", "WTHH:mm:ss") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - assert self.parser.parse("2011-W05T14:17:01", "WTHH:mm:ss") == datetime( - 2011, 1, 31, 14, 17, 1 - ) - assert self.parser.parse("2011W05T141701", "WTHHmmss") == datetime( - 2011, 1, 31, 14, 17, 1 - ) - assert self.parser.parse("2011W054T141701", "WTHHmmss") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - - bad_formats = [ - "201W22", - "1995-W1-4", - "2001-W34-90", - "2001--W34", - "2011-W03--3", - "thstrdjtrsrd676776r65", - "2002-W66-1T14:17:01", - "2002-W23-03T14:17:01", - ] - - for fmt in bad_formats: - with pytest.raises(ParserError): - self.parser.parse(fmt, "W") - - def test_parse_normalize_whitespace(self): - assert self.parser.parse( - "Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True - ) == datetime(2005, 6, 1, 13, 33) - - with pytest.raises(ParserError): - self.parser.parse("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA") - - assert ( - self.parser.parse( - "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", - "YYYY-MM-DD T HH:mm:ss S", - normalize_whitespace=True, - ) - == datetime(2013, 5, 5, 12, 30, 45, 123456) - ) - - with pytest.raises(ParserError): - self.parser.parse( - "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", - "YYYY-MM-DD T HH:mm:ss S", - ) - - assert self.parser.parse( - " \n Jun 1\t 2005\n ", "MMM D YYYY", normalize_whitespace=True - ) == datetime(2005, 6, 1) - - with pytest.raises(ParserError): - self.parser.parse(" \n Jun 1\t 2005\n ", "MMM D YYYY") - - -@pytest.mark.usefixtures("dt_parser_regex") -class TestDateTimeParserRegex: - def test_format_year(self): - - assert self.format_regex.findall("YYYY-YY") == ["YYYY", "YY"] - - def test_format_month(self): - - assert self.format_regex.findall("MMMM-MMM-MM-M") == ["MMMM", "MMM", "MM", "M"] - - def test_format_day(self): - - assert self.format_regex.findall("DDDD-DDD-DD-D") == ["DDDD", "DDD", "DD", "D"] - - def test_format_hour(self): - - assert self.format_regex.findall("HH-H-hh-h") == ["HH", "H", "hh", "h"] - - def test_format_minute(self): - - assert self.format_regex.findall("mm-m") == ["mm", "m"] - - def test_format_second(self): - - assert self.format_regex.findall("ss-s") == ["ss", "s"] - - def test_format_subsecond(self): - - assert self.format_regex.findall("SSSSSS-SSSSS-SSSS-SSS-SS-S") == [ - "SSSSSS", - "SSSSS", - "SSSS", - "SSS", - "SS", - "S", - ] - - def test_format_tz(self): - - assert self.format_regex.findall("ZZZ-ZZ-Z") == ["ZZZ", "ZZ", "Z"] - - def test_format_am_pm(self): - - assert self.format_regex.findall("A-a") == ["A", "a"] - - def test_format_timestamp(self): - - assert self.format_regex.findall("X") == ["X"] - - def test_format_timestamp_milli(self): - - assert self.format_regex.findall("x") == ["x"] - - def test_escape(self): - - escape_regex = parser.DateTimeParser._ESCAPE_RE - - assert escape_regex.findall("2018-03-09 8 [h] 40 [hello]") == ["[h]", "[hello]"] - - def test_month_names(self): - p = parser.DateTimeParser("en_us") - - text = "_".join(calendar.month_name[1:]) - - result = p._input_re_map["MMMM"].findall(text) - - assert result == calendar.month_name[1:] - - def test_month_abbreviations(self): - p = parser.DateTimeParser("en_us") - - text = "_".join(calendar.month_abbr[1:]) - - result = p._input_re_map["MMM"].findall(text) - - assert result == calendar.month_abbr[1:] - - def test_digits(self): - - assert parser.DateTimeParser._ONE_OR_TWO_DIGIT_RE.findall("4-56") == ["4", "56"] - assert parser.DateTimeParser._ONE_OR_TWO_OR_THREE_DIGIT_RE.findall( - "4-56-789" - ) == ["4", "56", "789"] - assert parser.DateTimeParser._ONE_OR_MORE_DIGIT_RE.findall( - "4-56-789-1234-12345" - ) == ["4", "56", "789", "1234", "12345"] - assert parser.DateTimeParser._TWO_DIGIT_RE.findall("12-3-45") == ["12", "45"] - assert parser.DateTimeParser._THREE_DIGIT_RE.findall("123-4-56") == ["123"] - assert parser.DateTimeParser._FOUR_DIGIT_RE.findall("1234-56") == ["1234"] - - def test_tz(self): - tz_z_re = parser.DateTimeParser._TZ_Z_RE - assert tz_z_re.findall("-0700") == [("-", "07", "00")] - assert tz_z_re.findall("+07") == [("+", "07", "")] - assert tz_z_re.search("15/01/2019T04:05:06.789120Z") is not None - assert tz_z_re.search("15/01/2019T04:05:06.789120") is None - - tz_zz_re = parser.DateTimeParser._TZ_ZZ_RE - assert tz_zz_re.findall("-07:00") == [("-", "07", "00")] - assert tz_zz_re.findall("+07") == [("+", "07", "")] - assert tz_zz_re.search("15/01/2019T04:05:06.789120Z") is not None - assert tz_zz_re.search("15/01/2019T04:05:06.789120") is None - - tz_name_re = parser.DateTimeParser._TZ_NAME_RE - assert tz_name_re.findall("Europe/Warsaw") == ["Europe/Warsaw"] - assert tz_name_re.findall("GMT") == ["GMT"] - - def test_timestamp(self): - timestamp_re = parser.DateTimeParser._TIMESTAMP_RE - assert timestamp_re.findall("1565707550.452729") == ["1565707550.452729"] - assert timestamp_re.findall("-1565707550.452729") == ["-1565707550.452729"] - assert timestamp_re.findall("-1565707550") == ["-1565707550"] - assert timestamp_re.findall("1565707550") == ["1565707550"] - assert timestamp_re.findall("1565707550.") == [] - assert timestamp_re.findall(".1565707550") == [] - - def test_timestamp_milli(self): - timestamp_expanded_re = parser.DateTimeParser._TIMESTAMP_EXPANDED_RE - assert timestamp_expanded_re.findall("-1565707550") == ["-1565707550"] - assert timestamp_expanded_re.findall("1565707550") == ["1565707550"] - assert timestamp_expanded_re.findall("1565707550.452729") == [] - assert timestamp_expanded_re.findall("1565707550.") == [] - assert timestamp_expanded_re.findall(".1565707550") == [] - - def test_time(self): - time_re = parser.DateTimeParser._TIME_RE - time_seperators = [":", ""] - - for sep in time_seperators: - assert time_re.findall("12") == [("12", "", "", "", "")] - assert time_re.findall("12{sep}35".format(sep=sep)) == [ - ("12", "35", "", "", "") - ] - assert time_re.findall("12{sep}35{sep}46".format(sep=sep)) == [ - ("12", "35", "46", "", "") - ] - assert time_re.findall("12{sep}35{sep}46.952313".format(sep=sep)) == [ - ("12", "35", "46", ".", "952313") - ] - assert time_re.findall("12{sep}35{sep}46,952313".format(sep=sep)) == [ - ("12", "35", "46", ",", "952313") - ] - - assert time_re.findall("12:") == [] - assert time_re.findall("12:35:46.") == [] - assert time_re.findall("12:35:46,") == [] - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserISO: - def test_YYYY(self): - - assert self.parser.parse_iso("2013") == datetime(2013, 1, 1) - - def test_YYYY_DDDD(self): - assert self.parser.parse_iso("1998-136") == datetime(1998, 5, 16) - - assert self.parser.parse_iso("1998-006") == datetime(1998, 1, 6) - - with pytest.raises(ParserError): - self.parser.parse_iso("1998-456") - - # 2016 is a leap year, so Feb 29 exists (leap day) - assert self.parser.parse_iso("2016-059") == datetime(2016, 2, 28) - assert self.parser.parse_iso("2016-060") == datetime(2016, 2, 29) - assert self.parser.parse_iso("2016-061") == datetime(2016, 3, 1) - - # 2017 is not a leap year, so Feb 29 does not exist - assert self.parser.parse_iso("2017-059") == datetime(2017, 2, 28) - assert self.parser.parse_iso("2017-060") == datetime(2017, 3, 1) - assert self.parser.parse_iso("2017-061") == datetime(2017, 3, 2) - - # Since 2016 is a leap year, the 366th day falls in the same year - assert self.parser.parse_iso("2016-366") == datetime(2016, 12, 31) - - # Since 2017 is not a leap year, the 366th day falls in the next year - assert self.parser.parse_iso("2017-366") == datetime(2018, 1, 1) - - def test_YYYY_DDDD_HH_mm_ssZ(self): - - assert self.parser.parse_iso("2013-036 04:05:06+01:00") == datetime( - 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-036 04:05:06Z") == datetime( - 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc() - ) - - def test_YYYY_MM_DDDD(self): - with pytest.raises(ParserError): - self.parser.parse_iso("2014-05-125") - - def test_YYYY_MM(self): - - for separator in DateTimeParser.SEPARATORS: - assert self.parser.parse_iso(separator.join(("2013", "02"))) == datetime( - 2013, 2, 1 - ) - - def test_YYYY_MM_DD(self): - - for separator in DateTimeParser.SEPARATORS: - assert self.parser.parse_iso( - separator.join(("2013", "02", "03")) - ) == datetime(2013, 2, 3) - - def test_YYYY_MM_DDTHH_mmZ(self): - - assert self.parser.parse_iso("2013-02-03T04:05+01:00") == datetime( - 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DDTHH_mm(self): - - assert self.parser.parse_iso("2013-02-03T04:05") == datetime(2013, 2, 3, 4, 5) - - def test_YYYY_MM_DDTHH(self): - - assert self.parser.parse_iso("2013-02-03T04") == datetime(2013, 2, 3, 4) - - def test_YYYY_MM_DDTHHZ(self): - - assert self.parser.parse_iso("2013-02-03T04+01:00") == datetime( - 2013, 2, 3, 4, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DDTHH_mm_ssZ(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DDTHH_mm_ss(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06") == datetime( - 2013, 2, 3, 4, 5, 6 - ) - - def test_YYYY_MM_DD_HH_mmZ(self): - - assert self.parser.parse_iso("2013-02-03 04:05+01:00") == datetime( - 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DD_HH_mm(self): - - assert self.parser.parse_iso("2013-02-03 04:05") == datetime(2013, 2, 3, 4, 5) - - def test_YYYY_MM_DD_HH(self): - - assert self.parser.parse_iso("2013-02-03 04") == datetime(2013, 2, 3, 4) - - def test_invalid_time(self): - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03 044") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03 04:05:06.") - - def test_YYYY_MM_DD_HH_mm_ssZ(self): - - assert self.parser.parse_iso("2013-02-03 04:05:06+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DD_HH_mm_ss(self): - - assert self.parser.parse_iso("2013-02-03 04:05:06") == datetime( - 2013, 2, 3, 4, 5, 6 - ) - - def test_YYYY_MM_DDTHH_mm_ss_S(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06.7") == datetime( - 2013, 2, 3, 4, 5, 6, 700000 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78") == datetime( - 2013, 2, 3, 4, 5, 6, 780000 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.789") == datetime( - 2013, 2, 3, 4, 5, 6, 789000 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.7891") == datetime( - 2013, 2, 3, 4, 5, 6, 789100 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78912") == datetime( - 2013, 2, 3, 4, 5, 6, 789120 - ) - - # ISO 8601:2004(E), ISO, 2004-12-01, 4.2.2.4 ... the decimal fraction - # shall be divided from the integer part by the decimal sign specified - # in ISO 31-0, i.e. the comma [,] or full stop [.]. Of these, the comma - # is the preferred sign. - assert self.parser.parse_iso("2013-02-03T04:05:06,789123678") == datetime( - 2013, 2, 3, 4, 5, 6, 789124 - ) - - # there is no limit on the number of decimal places - assert self.parser.parse_iso("2013-02-03T04:05:06.789123678") == datetime( - 2013, 2, 3, 4, 5, 6, 789124 - ) - - def test_YYYY_MM_DDTHH_mm_ss_SZ(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06.7+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 700000, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 780000, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.789+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 789000, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.7891+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 789100, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78912+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03 04:05:06.78912Z") == datetime( - 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzutc() - ) - - def test_W(self): - - assert self.parser.parse_iso("2011-W05-4") == datetime(2011, 2, 3) - - assert self.parser.parse_iso("2011-W05-4T14:17:01") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - - assert self.parser.parse_iso("2011W054") == datetime(2011, 2, 3) - - assert self.parser.parse_iso("2011W054T141701") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - - def test_invalid_Z(self): - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912z") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912zz") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912Zz") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912ZZ") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912+Z") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912-Z") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912 Z") - - def test_parse_subsecond(self): - self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) - assert self.parser.parse_iso("2013-01-01 12:30:45.9") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) - assert self.parser.parse_iso("2013-01-01 12:30:45.98") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) - assert self.parser.parse_iso("2013-01-01 12:30:45.987") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) - assert self.parser.parse_iso("2013-01-01 12:30:45.9876") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) - assert self.parser.parse_iso("2013-01-01 12:30:45.98765") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - assert self.parser.parse_iso("2013-01-01 12:30:45.987654") == self.expected - - # use comma as subsecond separator - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - assert self.parser.parse_iso("2013-01-01 12:30:45,987654") == self.expected - - def test_gnu_date(self): - """Regression tests for parsing output from GNU date.""" - # date -Ins - assert self.parser.parse_iso("2016-11-16T09:46:30,895636557-0800") == datetime( - 2016, 11, 16, 9, 46, 30, 895636, tzinfo=tz.tzoffset(None, -3600 * 8) - ) - - # date --rfc-3339=ns - assert self.parser.parse_iso("2016-11-16 09:51:14.682141526-08:00") == datetime( - 2016, 11, 16, 9, 51, 14, 682142, tzinfo=tz.tzoffset(None, -3600 * 8) - ) - - def test_isoformat(self): - - dt = datetime.utcnow() - - assert self.parser.parse_iso(dt.isoformat()) == dt - - def test_parse_iso_normalize_whitespace(self): - assert self.parser.parse_iso( - "2013-036 \t 04:05:06Z", normalize_whitespace=True - ) == datetime(2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc()) - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-036 \t 04:05:06Z") - - assert self.parser.parse_iso( - "\t 2013-05-05T12:30:45.123456 \t \n", normalize_whitespace=True - ) == datetime(2013, 5, 5, 12, 30, 45, 123456) - - with pytest.raises(ParserError): - self.parser.parse_iso("\t 2013-05-05T12:30:45.123456 \t \n") - - def test_parse_iso_with_leading_and_trailing_whitespace(self): - datetime_string = " 2016-11-15T06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = " 2016-11-15T06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = "2016-11-15T06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = "2016-11-15T 06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - # leading whitespace - datetime_string = " 2016-11-15 06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - # trailing whitespace - datetime_string = "2016-11-15 06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = " 2016-11-15 06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - # two dividing spaces - datetime_string = "2016-11-15 06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - def test_parse_iso_with_extra_words_at_start_and_end_invalid(self): - test_inputs = [ - "blah2016", - "blah2016blah", - "blah 2016 blah", - "blah 2016", - "2016 blah", - "blah 2016-05-16 04:05:06.789120", - "2016-05-16 04:05:06.789120 blah", - "blah 2016-05-16T04:05:06.789120", - "2016-05-16T04:05:06.789120 blah", - "2016blah", - "2016-05blah", - "2016-05-16blah", - "2016-05-16T04:05:06.789120blah", - "2016-05-16T04:05:06.789120ZblahZ", - "2016-05-16T04:05:06.789120Zblah", - "2016-05-16T04:05:06.789120blahZ", - "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", - "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", - ] - - for ti in test_inputs: - with pytest.raises(ParserError): - self.parser.parse_iso(ti) - - def test_iso8601_basic_format(self): - assert self.parser.parse_iso("20180517") == datetime(2018, 5, 17) - - assert self.parser.parse_iso("20180517T10") == datetime(2018, 5, 17, 10) - - assert self.parser.parse_iso("20180517T105513.843456") == datetime( - 2018, 5, 17, 10, 55, 13, 843456 - ) - - assert self.parser.parse_iso("20180517T105513Z") == datetime( - 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzutc() - ) - - assert self.parser.parse_iso("20180517T105513.843456-0700") == datetime( - 2018, 5, 17, 10, 55, 13, 843456, tzinfo=tz.tzoffset(None, -25200) - ) - - assert self.parser.parse_iso("20180517T105513-0700") == datetime( - 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) - ) - - assert self.parser.parse_iso("20180517T105513-07") == datetime( - 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) - ) - - # ordinal in basic format: YYYYDDDD - assert self.parser.parse_iso("1998136") == datetime(1998, 5, 16) - - # timezone requires +- seperator - with pytest.raises(ParserError): - self.parser.parse_iso("20180517T1055130700") - - with pytest.raises(ParserError): - self.parser.parse_iso("20180517T10551307") - - # too many digits in date - with pytest.raises(ParserError): - self.parser.parse_iso("201860517T105513Z") - - # too many digits in time - with pytest.raises(ParserError): - self.parser.parse_iso("20180517T1055213Z") - - def test_midnight_end_day(self): - assert self.parser.parse_iso("2019-10-30T24:00:00") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-10-30T24:00") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-10-30T24:00:00.0") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-10-31T24:00:00") == datetime( - 2019, 11, 1, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-12-31T24:00:00") == datetime( - 2020, 1, 1, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-12-31T23:59:59.9999999") == datetime( - 2020, 1, 1, 0, 0, 0, 0 - ) - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:01:00") - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:00:01") - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:00:00.1") - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:00:00.999999") - - -@pytest.mark.usefixtures("tzinfo_parser") -class TestTzinfoParser: - def test_parse_local(self): - - assert self.parser.parse("local") == tz.tzlocal() - - def test_parse_utc(self): - - assert self.parser.parse("utc") == tz.tzutc() - assert self.parser.parse("UTC") == tz.tzutc() - - def test_parse_iso(self): - - assert self.parser.parse("01:00") == tz.tzoffset(None, 3600) - assert self.parser.parse("11:35") == tz.tzoffset(None, 11 * 3600 + 2100) - assert self.parser.parse("+01:00") == tz.tzoffset(None, 3600) - assert self.parser.parse("-01:00") == tz.tzoffset(None, -3600) - - assert self.parser.parse("0100") == tz.tzoffset(None, 3600) - assert self.parser.parse("+0100") == tz.tzoffset(None, 3600) - assert self.parser.parse("-0100") == tz.tzoffset(None, -3600) - - assert self.parser.parse("01") == tz.tzoffset(None, 3600) - assert self.parser.parse("+01") == tz.tzoffset(None, 3600) - assert self.parser.parse("-01") == tz.tzoffset(None, -3600) - - def test_parse_str(self): - - assert self.parser.parse("US/Pacific") == tz.gettz("US/Pacific") - - def test_parse_fails(self): - - with pytest.raises(parser.ParserError): - self.parser.parse("fail") - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserMonthName: - def test_shortmonth_capitalized(self): - - assert self.parser.parse("2013-Jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - def test_shortmonth_allupper(self): - - assert self.parser.parse("2013-JAN-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - def test_shortmonth_alllower(self): - - assert self.parser.parse("2013-jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - def test_month_capitalized(self): - - assert self.parser.parse("2013-January-01", "YYYY-MMMM-DD") == datetime( - 2013, 1, 1 - ) - - def test_month_allupper(self): - - assert self.parser.parse("2013-JANUARY-01", "YYYY-MMMM-DD") == datetime( - 2013, 1, 1 - ) - - def test_month_alllower(self): - - assert self.parser.parse("2013-january-01", "YYYY-MMMM-DD") == datetime( - 2013, 1, 1 - ) - - def test_localized_month_name(self): - parser_ = parser.DateTimeParser("fr_fr") - - assert parser_.parse("2013-Janvier-01", "YYYY-MMMM-DD") == datetime(2013, 1, 1) - - def test_localized_month_abbreviation(self): - parser_ = parser.DateTimeParser("it_it") - - assert parser_.parse("2013-Gen-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserMeridians: - def test_meridians_lowercase(self): - assert self.parser.parse("2013-01-01 5am", "YYYY-MM-DD ha") == datetime( - 2013, 1, 1, 5 - ) - - assert self.parser.parse("2013-01-01 5pm", "YYYY-MM-DD ha") == datetime( - 2013, 1, 1, 17 - ) - - def test_meridians_capitalized(self): - assert self.parser.parse("2013-01-01 5AM", "YYYY-MM-DD hA") == datetime( - 2013, 1, 1, 5 - ) - - assert self.parser.parse("2013-01-01 5PM", "YYYY-MM-DD hA") == datetime( - 2013, 1, 1, 17 - ) - - def test_localized_meridians_lowercase(self): - parser_ = parser.DateTimeParser("hu_hu") - assert parser_.parse("2013-01-01 5 de", "YYYY-MM-DD h a") == datetime( - 2013, 1, 1, 5 - ) - - assert parser_.parse("2013-01-01 5 du", "YYYY-MM-DD h a") == datetime( - 2013, 1, 1, 17 - ) - - def test_localized_meridians_capitalized(self): - parser_ = parser.DateTimeParser("hu_hu") - assert parser_.parse("2013-01-01 5 DE", "YYYY-MM-DD h A") == datetime( - 2013, 1, 1, 5 - ) - - assert parser_.parse("2013-01-01 5 DU", "YYYY-MM-DD h A") == datetime( - 2013, 1, 1, 17 - ) - - # regression test for issue #607 - def test_es_meridians(self): - parser_ = parser.DateTimeParser("es") - - assert parser_.parse( - "Junio 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a" - ) == datetime(2019, 6, 30, 20, 0) - - with pytest.raises(ParserError): - parser_.parse( - "Junio 30, 2019 - 08:00 pasdfasdfm", "MMMM DD, YYYY - hh:mm a" - ) - - def test_fr_meridians(self): - parser_ = parser.DateTimeParser("fr") - - # the French locale always uses a 24 hour clock, so it does not support meridians - with pytest.raises(ParserError): - parser_.parse("Janvier 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a") - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserMonthOrdinalDay: - def test_english(self): - parser_ = parser.DateTimeParser("en_us") - - assert parser_.parse("January 1st, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 1 - ) - assert parser_.parse("January 2nd, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 2 - ) - assert parser_.parse("January 3rd, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 3 - ) - assert parser_.parse("January 4th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 4 - ) - assert parser_.parse("January 11th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 11 - ) - assert parser_.parse("January 12th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 12 - ) - assert parser_.parse("January 13th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 13 - ) - assert parser_.parse("January 21st, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 21 - ) - assert parser_.parse("January 31st, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 31 - ) - - with pytest.raises(ParserError): - parser_.parse("January 1th, 2013", "MMMM Do, YYYY") - - with pytest.raises(ParserError): - parser_.parse("January 11st, 2013", "MMMM Do, YYYY") - - def test_italian(self): - parser_ = parser.DateTimeParser("it_it") - - assert parser_.parse("Gennaio 1º, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 1 - ) - - def test_spanish(self): - parser_ = parser.DateTimeParser("es_es") - - assert parser_.parse("Enero 1º, 2013", "MMMM Do, YYYY") == datetime(2013, 1, 1) - - def test_french(self): - parser_ = parser.DateTimeParser("fr_fr") - - assert parser_.parse("Janvier 1er, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 1 - ) - - assert parser_.parse("Janvier 2e, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 2 - ) - - assert parser_.parse("Janvier 11e, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 11 - ) - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserSearchDate: - def test_parse_search(self): - - assert self.parser.parse( - "Today is 25 of September of 2003", "DD of MMMM of YYYY" - ) == datetime(2003, 9, 25) - - def test_parse_search_with_numbers(self): - - assert self.parser.parse( - "2000 people met the 2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss" - ) == datetime(2012, 1, 1, 12, 5, 10) - - assert self.parser.parse( - "Call 01-02-03 on 79-01-01 12:05:10", "YY-MM-DD HH:mm:ss" - ) == datetime(1979, 1, 1, 12, 5, 10) - - def test_parse_search_with_names(self): - - assert self.parser.parse("June was born in May 1980", "MMMM YYYY") == datetime( - 1980, 5, 1 - ) - - def test_parse_search_locale_with_names(self): - p = parser.DateTimeParser("sv_se") - - assert p.parse("Jan föddes den 31 Dec 1980", "DD MMM YYYY") == datetime( - 1980, 12, 31 - ) - - assert p.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") == datetime( - 1975, 8, 25 - ) - - def test_parse_search_fails(self): - - with pytest.raises(parser.ParserError): - self.parser.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") - - def test_escape(self): - - format = "MMMM D, YYYY [at] h:mma" - assert self.parser.parse( - "Thursday, December 10, 2015 at 5:09pm", format - ) == datetime(2015, 12, 10, 17, 9) - - format = "[MMMM] M D, YYYY [at] h:mma" - assert self.parser.parse("MMMM 12 10, 2015 at 5:09pm", format) == datetime( - 2015, 12, 10, 17, 9 - ) - - format = "[It happened on] MMMM Do [in the year] YYYY [a long time ago]" - assert self.parser.parse( - "It happened on November 25th in the year 1990 a long time ago", format - ) == datetime(1990, 11, 25) - - format = "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]" - assert self.parser.parse( - "It happened on November 25th in the year 1990 a long time ago", format - ) == datetime(1990, 11, 25) - - format = "[I'm][ entirely][ escaped,][ weee!]" - assert self.parser.parse("I'm entirely escaped, weee!", format) == datetime( - 1, 1, 1 - ) - - # Special RegEx characters - format = "MMM DD, YYYY |^${}().*+?<>-& h:mm A" - assert self.parser.parse( - "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM", format - ) == datetime(2017, 12, 31, 2, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py deleted file mode 100644 index e48b4de066c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -import time -from datetime import datetime - -import pytest - -from arrow import util - - -class TestUtil: - def test_next_weekday(self): - # Get first Monday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 0) == datetime(1970, 1, 5) - - # Get first Tuesday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 1) == datetime(1970, 1, 6) - - # Get first Wednesday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 2) == datetime(1970, 1, 7) - - # Get first Thursday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 3) == datetime(1970, 1, 1) - - # Get first Friday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 4) == datetime(1970, 1, 2) - - # Get first Saturday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 5) == datetime(1970, 1, 3) - - # Get first Sunday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 6) == datetime(1970, 1, 4) - - # Weekdays are 0-indexed - with pytest.raises(ValueError): - util.next_weekday(datetime(1970, 1, 1), 7) - - with pytest.raises(ValueError): - util.next_weekday(datetime(1970, 1, 1), -1) - - def test_total_seconds(self): - td = datetime(2019, 1, 1) - datetime(2018, 1, 1) - assert util.total_seconds(td) == td.total_seconds() - - def test_is_timestamp(self): - timestamp_float = time.time() - timestamp_int = int(timestamp_float) - - assert util.is_timestamp(timestamp_int) - assert util.is_timestamp(timestamp_float) - assert util.is_timestamp(str(timestamp_int)) - assert util.is_timestamp(str(timestamp_float)) - - assert not util.is_timestamp(True) - assert not util.is_timestamp(False) - - class InvalidTimestamp: - pass - - assert not util.is_timestamp(InvalidTimestamp()) - - full_datetime = "2019-06-23T13:12:42" - assert not util.is_timestamp(full_datetime) - - def test_normalize_timestamp(self): - timestamp = 1591161115.194556 - millisecond_timestamp = 1591161115194 - microsecond_timestamp = 1591161115194556 - - assert util.normalize_timestamp(timestamp) == timestamp - assert util.normalize_timestamp(millisecond_timestamp) == 1591161115.194 - assert util.normalize_timestamp(microsecond_timestamp) == 1591161115.194556 - - with pytest.raises(ValueError): - util.normalize_timestamp(3e17) - - def test_iso_gregorian(self): - with pytest.raises(ValueError): - util.iso_to_gregorian(2013, 0, 5) - - with pytest.raises(ValueError): - util.iso_to_gregorian(2013, 8, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py deleted file mode 100644 index 2a048feb3fe..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -import pytz -from dateutil.zoneinfo import get_zonefile_instance - -from arrow import util - - -def make_full_tz_list(): - dateutil_zones = set(get_zonefile_instance().zones) - pytz_zones = set(pytz.all_timezones) - return dateutil_zones.union(pytz_zones) - - -def assert_datetime_equality(dt1, dt2, within=10): - assert dt1.tzinfo == dt2.tzinfo - assert abs(util.total_seconds(dt1 - dt2)) < within diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini deleted file mode 100644 index 46576b12e35..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini +++ /dev/null @@ -1,53 +0,0 @@ -[tox] -minversion = 3.18.0 -envlist = py{py3,27,35,36,37,38,39},lint,docs -skip_missing_interpreters = true - -[gh-actions] -python = - pypy3: pypy3 - 2.7: py27 - 3.5: py35 - 3.6: py36 - 3.7: py37 - 3.8: py38 - 3.9: py39 - -[testenv] -deps = -rrequirements.txt -allowlist_externals = pytest -commands = pytest - -[testenv:lint] -basepython = python3 -skip_install = true -deps = pre-commit -commands = - pre-commit install - pre-commit run --all-files --show-diff-on-failure - -[testenv:docs] -basepython = python3 -skip_install = true -changedir = docs -deps = - doc8 - sphinx - python-dateutil -allowlist_externals = make -commands = - doc8 index.rst ../README.rst --extension .rst --ignore D001 - make html SPHINXOPTS="-W --keep-going" - -[pytest] -addopts = -v --cov-branch --cov=arrow --cov-fail-under=100 --cov-report=term-missing --cov-report=xml -testpaths = tests - -[isort] -line_length = 88 -multi_line_output = 3 -include_trailing_comma = true - -[flake8] -per-file-ignores = arrow/__init__.py:F401 -ignore = E203,E501,W503 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore deleted file mode 100644 index be621609ab9..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore +++ /dev/null @@ -1,42 +0,0 @@ -# General -*.py[cod] - -# Packages -*.egg -*.egg-info -dist -build -.eggs/ -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 -__pycache__ - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.tox - -# Caches -Thumbs.db - -# Development -.project -.pydevproject -.settings -.idea/ -.history/ -.vscode/ - -# Testing -.cache -test-reports/* -.pytest_cache/* \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python deleted file mode 100644 index 9dc010d8034..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python +++ /dev/null @@ -1,254 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations, which became -Zope Corporation. In 2001, the Python Software Foundation (PSF, see -https://www.python.org/psf/) was formed, a non-profit organization -created specifically to own Python-related Intellectual Property. -Zope Corporation was a sponsoring member of the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt deleted file mode 100644 index d9a10c0d8e8..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt +++ /dev/null @@ -1,176 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in deleted file mode 100644 index 3216ee548c6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include LICENSE.txt -include README.rst -recursive-include resource *.py -recursive-include doc *.rst *.conf *.py *.png *.css diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst deleted file mode 100644 index 074a35f97c2..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst +++ /dev/null @@ -1,34 +0,0 @@ -################# -ftrack Python API -################# - -Python API for ftrack. - -.. important:: - - This is the new Python client for the ftrack API. If you are migrating from - the old client then please read the dedicated `migration guide `_. - -************* -Documentation -************* - -Full documentation, including installation and setup guides, can be found at -http://ftrack-python-api.rtd.ftrack.com/en/stable/ - -********************* -Copyright and license -********************* - -Copyright (c) 2014 ftrack - -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this work except in compliance with the License. You may obtain a copy of the -License in the LICENSE.txt file, or at: - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software distributed -under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml deleted file mode 100644 index 355f00f7529..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml +++ /dev/null @@ -1,24 +0,0 @@ -# Test configuration for bitbucket pipelines. -options: - max-time: 20 -definitions: - services: - ftrack: - image: - name: ftrackdocker/test-server:latest - username: $DOCKER_HUB_USERNAME - password: $DOCKER_HUB_PASSWORD - email: $DOCKER_HUB_EMAIL -pipelines: - default: - - parallel: - - step: - name: run tests against python 2.7.x - image: python:2.7 - caches: - - pip - services: - - ftrack - script: - - bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' $FTRACK_SERVER)" != "200" ]]; do sleep 1; done' - - python setup.py test \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css deleted file mode 100644 index 3456b0c3c5c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css +++ /dev/null @@ -1,16 +0,0 @@ -@import "css/theme.css"; - -.domain-summary li { - float: left; - min-width: 12em; -} - -.domain-summary ul:before, ul:after { - content: ''; - clear: both; - display:block; -} - -.rst-content table.docutils td:last-child { - white-space: normal; -} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst deleted file mode 100644 index 4e165b01223..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.base -************************ - -.. automodule:: ftrack_api.accessor.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst deleted file mode 100644 index f7d9dddf376..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.disk -************************ - -.. automodule:: ftrack_api.accessor.disk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst deleted file mode 100644 index 0adc23fe2de..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************* -ftrack_api.accessor -******************* - -.. automodule:: ftrack_api.accessor - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst deleted file mode 100644 index 62bd7f41659..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.server -************************ - -.. automodule:: ftrack_api.accessor.server diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst deleted file mode 100644 index 9fd8994eb11..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.attribute -******************** - -.. automodule:: ftrack_api.attribute diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst deleted file mode 100644 index cbf9128a5a6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.cache -**************** - -.. automodule:: ftrack_api.cache diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst deleted file mode 100644 index 607d574cb56..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************* -ftrack_api.collection -********************* - -.. automodule:: ftrack_api.collection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst deleted file mode 100644 index 0bc4ce35f10..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************************* -ftrack_api.entity.asset_version -******************************* - -.. automodule:: ftrack_api.entity.asset_version diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst deleted file mode 100644 index f4beedc9a4e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************** -ftrack_api.entity.base -********************** - -.. automodule:: ftrack_api.entity.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst deleted file mode 100644 index c9ce0a0cf11..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*************************** -ftrack_api.entity.component -*************************** - -.. automodule:: ftrack_api.entity.component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst deleted file mode 100644 index 483c16641bd..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -************************* -ftrack_api.entity.factory -************************* - -.. automodule:: ftrack_api.entity.factory diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst deleted file mode 100644 index fce68c0e94f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.entity -***************** - -.. automodule:: ftrack_api.entity - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst deleted file mode 100644 index 9d22a7c378d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************* -ftrack_api.entity.job -********************* - -.. automodule:: ftrack_api.entity.job diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst deleted file mode 100644 index 60e006a10c9..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************** -ftrack_api.entity.location -************************** - -.. automodule:: ftrack_api.entity.location diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst deleted file mode 100644 index 3588e48e5b5..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************** -ftrack_api.entity.note -********************** - -.. automodule:: ftrack_api.entity.note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst deleted file mode 100644 index 5777ab0b404..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************************** -ftrack_api.entity.project_schema -******************************** - -.. automodule:: ftrack_api.entity.project_schema diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst deleted file mode 100644 index 0014498b9cc..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************** -ftrack_api.entity.user -********************** - -.. automodule:: ftrack_api.entity.user diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst deleted file mode 100644 index 2b0ca8d3ed7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************* -ftrack_api.event.base -********************* - -.. automodule:: ftrack_api.event.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst deleted file mode 100644 index f5827170603..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -*************************** -ftrack_api.event.expression -*************************** - -.. automodule:: ftrack_api.event.expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst deleted file mode 100644 index 36d7a331639..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.event.hub -******************** - -.. automodule:: ftrack_api.event.hub diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst deleted file mode 100644 index 0986e8e2f4f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.event -**************** - -.. automodule:: ftrack_api.event - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst deleted file mode 100644 index 974f3758177..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -*************************** -ftrack_api.event.subscriber -*************************** - -.. automodule:: ftrack_api.event.subscriber diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst deleted file mode 100644 index 94a20e36112..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************************** -ftrack_api.event.subscription -***************************** - -.. automodule:: ftrack_api.event.subscription diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst deleted file mode 100644 index 64c3a699d75..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.exception -******************** - -.. automodule:: ftrack_api.exception diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst deleted file mode 100644 index 9b8154bdc36..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.formatter -******************** - -.. automodule:: ftrack_api.formatter diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst deleted file mode 100644 index ea3517ca68e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _api_reference: - -************* -API Reference -************* - -ftrack_api -========== - -.. automodule:: ftrack_api - -.. toctree:: - :maxdepth: 1 - :glob: - - */index - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst deleted file mode 100644 index 8223ee72f2b..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************* -ftrack_api.inspection -********************* - -.. automodule:: ftrack_api.inspection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst deleted file mode 100644 index ecb883d3853..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -****************** -ftrack_api.logging -****************** - -.. automodule:: ftrack_api.logging diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst deleted file mode 100644 index b2dff9933d7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************** -ftrack_api.operation -******************** - -.. automodule:: ftrack_api.operation diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst deleted file mode 100644 index a4993d94cfb..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.plugin -***************** - -.. automodule:: ftrack_api.plugin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst deleted file mode 100644 index acbd8d237a2..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.query -**************** - -.. automodule:: ftrack_api.query diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst deleted file mode 100644 index 09cdad8627b..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _api_reference/resource_identifier_transformer.base: - -*********************************************** -ftrack_api.resource_identifier_transformer.base -*********************************************** - -.. automodule:: ftrack_api.resource_identifier_transformer.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst deleted file mode 100644 index 755f052c9df..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _api_reference/resource_identifier_transformer: - -****************************************** -ftrack_api.resource_identifier_transformer -****************************************** - -.. automodule:: ftrack_api.resource_identifier_transformer - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst deleted file mode 100644 index dcce173d1f6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -****************** -ftrack_api.session -****************** - -.. automodule:: ftrack_api.session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst deleted file mode 100644 index 55a1cc75d2c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************* -ftrack_api.structure.base -************************* - -.. automodule:: ftrack_api.structure.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst deleted file mode 100644 index ade2c7ae887..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*********************** -ftrack_api.structure.id -*********************** - -.. automodule:: ftrack_api.structure.id diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst deleted file mode 100644 index cbd4545cf71..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.structure -******************** - -.. automodule:: ftrack_api.structure - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst deleted file mode 100644 index 403173e257a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*************************** -ftrack_api.structure.origin -*************************** - -.. automodule:: ftrack_api.structure.origin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst deleted file mode 100644 index 5c0d88026bf..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -***************************** -ftrack_api.structure.standard -***************************** - -.. automodule:: ftrack_api.structure.standard diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst deleted file mode 100644 index 55dc0125a8c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.symbol -***************** - -.. automodule:: ftrack_api.symbol diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst deleted file mode 100644 index bfc5cef4019..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst +++ /dev/null @@ -1,175 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - - -.. _caching: - -******* -Caching -******* - -The API makes use of caching in order to provide more efficient retrieval of -data by reducing the number of calls to the remote server:: - - # First call to retrieve user performs a request to the server. - user = session.get('User', 'some-user-id') - - # A later call in the same session to retrieve the same user just gets - # the existing instance from the cache without a request to the server. - user = session.get('User', 'some-user-id') - -It also seamlessly merges related data together regardless of how it was -retrieved:: - - >>> timelog = user['timelogs'][0] - >>> with session.auto_populating(False): - >>> print timelog['comment'] - NOT_SET - >>> session.query( - ... 'select comment from Timelog where id is "{0}"' - ... .format(timelog['id']) - ... ).all() - >>> with session.auto_populating(False): - >>> print timelog['comment'] - 'Some comment' - -By default, each :class:`~ftrack_api.session.Session` is configured with a -simple :class:`~ftrack_api.cache.MemoryCache()` and the cache is lost as soon as -the session expires. - -Configuring a session cache -=========================== - -It is possible to configure the cache that a session uses. An example would be a -persistent auto-populated cache that survives between sessions:: - - import os - import ftrack_api.cache - - # Specify where the file based cache should be stored. - cache_path = os.path.join(tempfile.gettempdir(), 'ftrack_session_cache.dbm') - - - # Define a cache maker that returns a file based cache. Note that a - # function is used because the file based cache should use the session's - # encode and decode methods to serialise the entity data to a format that - # can be written to disk (JSON). - def cache_maker(session): - '''Return cache to use for *session*.''' - return ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - - # Create the session using the cache maker. - session = ftrack_api.Session(cache=cache_maker) - -.. note:: - - There can be a performance penalty when using a more complex cache setup. - For example, serialising data and also writing and reading from disk can be - relatively slow operations. - -Regardless of the cache specified, the session will always construct a -:class:`~ftrack_api.cache.LayeredCache` with a -:class:`~ftrack_api.cache.MemoryCache` at the top level and then your cache at -the second level. This is to ensure consistency of instances returned by the -session. - -You can check (or even modify) at any time what cache configuration a session is -using by accessing the `cache` attribute on a -:class:`~ftrack_api.session.Session`:: - - >>> print session.cache - - -Writing a new cache interface -============================= - -If you have a custom cache backend you should be able to integrate it into the -system by writing a cache interface that matches the one defined by -:class:`ftrack_api.cache.Cache`. This typically involves a subclass and -overriding the :meth:`~ftrack_api.cache.Cache.get`, -:meth:`~ftrack_api.cache.Cache.set` and :meth:`~ftrack_api.cache.Cache.remove` -methods. - - -Managing what gets cached -========================= - -The cache system is quite flexible when it comes to controlling what should be -cached. - -Consider you have a layered cache where the bottom layer cache should be -persisted between sessions. In this setup you probably don't want the persisted -cache to hold non-persisted values, such as modified entity values or newly -created entities not yet committed to the server. However, you might want the -top level memory cache to hold onto these values. - -Here is one way to set this up. First define a new proxy cache that is selective -about what it sets:: - - import ftrack_api.inspection - - - class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that won't cache newly created entities.''' - - def set(self, key, value): - '''Set *value* for *key*.''' - if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): - return - - super(SelectiveCache, self).set(key, value) - -Now use this custom cache to wrap the serialised cache in the setup above: - -.. code-block:: python - :emphasize-lines: 3, 9 - - def cache_maker(session): - '''Return cache to use for *session*.''' - return SelectiveCache( - ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - ) - -Now to prevent modified attributes also being persisted, tweak the encode -settings for the file cache: - -.. code-block:: python - :emphasize-lines: 1, 9-12 - - import functools - - - def cache_maker(session): - '''Return cache to use for *session*.''' - return SelectiveCache( - ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=functools.partial( - session.encode, - entity_attribute_strategy='persisted_only' - ), - decode=session.decode - ) - ) - -And use the updated cache maker for your session:: - - session = ftrack_api.Session(cache=cache_maker) - -.. note:: - - For some type of attributes that are computed, long term caching is not - recommended and such values will not be encoded with the `persisted_only` - strategy. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py deleted file mode 100644 index 11544721555..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py +++ /dev/null @@ -1,102 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''ftrack Python API documentation build configuration file.''' - -import os -import re - -# -- General ------------------------------------------------------------------ - -# Extensions. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.extlinks', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', - 'lowdown' -] - - -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'ftrack Python API' -copyright = u'2014, ftrack' - -# Version -with open( - os.path.join( - os.path.dirname(__file__), '..', 'source', - 'ftrack_api', '_version.py' - ) -) as _version_file: - _version = re.match( - r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL - ).group(1) - -version = _version -release = _version - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_template'] - -# A list of prefixes to ignore for module listings. -modindex_common_prefix = [ - 'ftrack_api.' -] - -# -- HTML output -------------------------------------------------------------- - -if not os.environ.get('READTHEDOCS', None) == 'True': - # Only import and set the theme if building locally. - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -html_static_path = ['_static'] -html_style = 'ftrack.css' - -# If True, copy source rst files to output for reference. -html_copy_source = True - - -# -- Autodoc ------------------------------------------------------------------ - -autodoc_default_flags = ['members', 'undoc-members', 'inherited-members'] -autodoc_member_order = 'bysource' - - -def autodoc_skip(app, what, name, obj, skip, options): - '''Don't skip __init__ method for autodoc.''' - if name == '__init__': - return False - - return skip - - -# -- Intersphinx -------------------------------------------------------------- - -intersphinx_mapping = { - 'python': ('http://docs.python.org/', None), - 'ftrack': ( - 'http://rtd.ftrack.com/docs/ftrack/en/stable/', None - ) -} - - -# -- Todos --------------------------------------------------------------------- - -todo_include_todos = os.environ.get('FTRACK_DOC_INCLUDE_TODOS', False) == 'True' - - -# -- Setup -------------------------------------------------------------------- - -def setup(app): - app.connect('autodoc-skip-member', autodoc_skip) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf deleted file mode 100644 index 3c927cc1eeb..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf +++ /dev/null @@ -1,2 +0,0 @@ -[html4css1 writer] -field-name-limit:0 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst deleted file mode 100644 index 99019ee44f8..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _environment_variables: - -********************* -Environment variables -********************* - -The following is a consolidated list of environment variables that this API -can reference: - -.. envvar:: FTRACK_SERVER - - The full url of the ftrack server to connect to. For example - "https://mycompany.ftrackapp.com" - -.. envvar:: FTRACK_API_USER - - The username of the ftrack user to act on behalf of when performing actions - in the system. - - .. note:: - - When this environment variable is not set, the API will typically also - check other standard operating system variables that hold the username - of the current logged in user. To do this it uses - :func:`getpass.getuser`. - -.. envvar:: FTRACK_API_KEY - - The API key to use when performing actions in the system. The API key is - used to determine the permissions that a script has in the system. - -.. envvar:: FTRACK_APIKEY - - For backwards compatibility. See :envvar:`FTRACK_API_KEY`. - -.. envvar:: FTRACK_EVENT_PLUGIN_PATH - - Paths to search recursively for plugins to load and use in a session. - Multiple paths can be specified by separating with the value of - :attr:`os.pathsep` (e.g. ':' or ';'). - -.. envvar:: FTRACK_API_SCHEMA_CACHE_PATH - - Path to a directory that will be used for storing and retrieving a cache of - the entity schemas fetched from the server. - -.. envvar:: http_proxy / https_proxy - - If you need to use a proxy to connect to ftrack you can use the - "standard" :envvar:`http_proxy` and :envvar:`https_proxy`. Please note that they - are lowercase. - - For example "export https_proxy=http://proxy.mycompany.com:8080" \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst deleted file mode 100644 index 0c44a1b68c9..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst +++ /dev/null @@ -1,137 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _event_list: - -********** -Event list -********** - -The following is a consolidated list of events published directly by this API. - -For some events, a template plugin file is also listed for download -(:guilabel:`Download template plugin`) to help get you started with writing your -own plugin for a particular event. - -.. seealso:: - - * :ref:`handling_events` - * :ref:`ftrack server event list ` - -.. _event_list/ftrack.api.session.construct-entity-type: - -ftrack.api.session.construct-entity-type -======================================== - -:download:`Download template plugin -` - -:ref:`Synchronous `. Published by -the session to retrieve constructed class for specified schema:: - - Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ) - -Expects returned data to be:: - - A Python class. - -.. seealso:: :ref:`working_with_entities/entity_types`. - -.. _event_list/ftrack.api.session.configure-location: - -ftrack.api.session.configure-location -===================================== - -:download:`Download template plugin -` - -:ref:`Synchronous `. Published by -the session to allow configuring of location instances:: - - Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ) - -.. seealso:: :ref:`Configuring locations `. - -.. _event_list/ftrack.location.component-added: - -ftrack.location.component-added -=============================== - -Published whenever a component is added to a location:: - - Event( - topic='ftrack.location.component-added', - data=dict( - component_id='e2dc0524-b576-11d3-9612-080027331d74', - location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' - ) - ) - -.. _event_list/ftrack.location.component-removed: - -ftrack.location.component-removed -================================= - -Published whenever a component is removed from a location:: - - Event( - topic='ftrack.location.component-removed', - data=dict( - component_id='e2dc0524-b576-11d3-9612-080027331d74', - location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' - ) - ) - -.. _event_list/ftrack.api.session.ready: - -ftrack.api.session.ready -======================== - -:ref:`Synchronous `. Published after -a :class:`~ftrack_api.session.Session` has been initialized and -is ready to be used:: - - Event( - topic='ftrack.api.session.ready', - data=dict( - session=, - ) - ) - -.. warning:: - - Since the event is synchronous and blocking, avoid doing any unnecessary - work as it will slow down session initialization. - -.. seealso:: - - Also see example usage in :download:`example_plugin_using_session.py - `. - - -.. _event_list/ftrack.api.session.reset: - -ftrack.api.session.reset -======================== - -:ref:`Synchronous `. Published after -a :class:`~ftrack_api.session.Session` has been reset and is ready to be used -again:: - - Event( - topic='ftrack.api.session.reset', - data=dict( - session=, - ) - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst deleted file mode 100644 index 985eb9bb442..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst +++ /dev/null @@ -1,82 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/assignments_and_allocations: - -**************************************** -Working with assignments and allocations -**************************************** - -.. currentmodule:: ftrack_api.session - -The API exposes `assignments` and `allocations` relationships on objects in -the project hierarchy. You can use these to retrieve the allocated or assigned -resources, which can be either groups or users. - -Allocations can be used to allocate users or groups to a project team, while -assignments are more explicit and is used to assign users to tasks. Both -assignment and allocations are modelled as `Appointment` objects, with a -`type` attribute indicating the type of the appoinment. - -The following example retrieves all users part of the project team:: - - # Retrieve a project - project = session.query('Project').first() - - # Set to hold all users part of the project team - project_team = set() - - # Add all allocated groups and users - for allocation in project['allocations']: - - # Resource may be either a group or a user - resource = allocation['resource'] - - # If the resource is a group, add its members - if isinstance(resource, session.types['Group']): - for membership in resource['memberships']: - user = membership['user'] - project_team.add(user) - - # The resource is a user, add it. - else: - user = resource - project_team.add(user) - -The next example shows how to assign the current user to a task:: - - # Retrieve a task and the current user - task = session.query('Task').first() - current_user = session.query( - u'User where username is {0}'.format(session.api_user) - ).one() - - # Create a new Appointment of type assignment. - session.create('Appointment', { - 'context': task, - 'resource': current_user, - 'type': 'assignment' - }) - - # Finally, persist the new assignment - session.commit() - -To list all users assigned to a task, see the following example:: - - task = session.query('Task').first() - users = session.query( - 'select first_name, last_name from User ' - 'where assignments any (context_id = "{0}")'.format(task['id']) - ) - for user in users: - print user['first_name'], user['last_name'] - -To list the current user's assigned tasks, see the example below:: - - assigned_tasks = session.query( - 'select link from Task ' - 'where assignments any (resource.username = "{0}")'.format(session.api_user) - ) - for task in assigned_tasks: - print u' / '.join(item['name'] for item in task['link']) - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst deleted file mode 100644 index 6a39bb20d15..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/component: - -*********************** -Working with components -*********************** - -.. currentmodule:: ftrack_api.session - -Components can be created manually or using the provide helper methods on a -:meth:`session ` or existing -:meth:`asset version -`:: - - component = version.create_component('/path/to/file_or_sequence.jpg') - session.commit() - -When a component is created using the helpers it is automatically added to a -location. - -.. seealso:: :ref:`Locations tutorial ` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst deleted file mode 100644 index 033942b4428..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst +++ /dev/null @@ -1,94 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/custom_attribute: - -*********************** -Using custom attributes -*********************** - -.. currentmodule:: ftrack_api.session - -Custom attributes can be written and read from entities using the -``custom_attributes`` property. - -The ``custom_attributes`` property provides a similar interface to a dictionary. - -Keys can be printed using the keys method:: - - >>> task['custom_attributes'].keys() - [u'my_text_field'] - -or access keys and values as items:: - - >>> print task['custom_attributes'].items() - [(u'my_text_field', u'some text')] - -Read existing custom attribute values:: - - >>> print task['custom_attributes']['my_text_field'] - 'some text' - -Updating a custom attributes can also be done similar to a dictionary:: - - task['custom_attributes']['my_text_field'] = 'foo' - -To query for tasks with a custom attribute, ``my_text_field``, you can use the -key from the configuration:: - - for task in session.query( - 'Task where custom_attributes any ' - '(key is "my_text_field" and value is "bar")' - ): - print task['name'] - -Limitations -=========== - -Expression attributes ---------------------- - -Expression attributes are not yet supported and the reported value will -always be the non-evaluated expression. - -Hierarchical attributes ------------------------ - -Hierarchical attributes are not yet fully supported in the API. Hierarchical -attributes support both read and write, but when read they are not calculated -and instead the `raw` value is returned:: - - # The hierarchical attribute `my_attribute` is set on Shot but this will not - # be reflected on the children. Instead the raw value is returned. - print shot['custom_attributes']['my_attribute'] - 'foo' - print task['custom_attributes']['my_attribute'] - None - -To work around this limitation it is possible to use the legacy api for -hierarchical attributes or to manually query the parents for values and use the -first value that is set. - -Validation -========== - -Custom attributes are validated on the ftrack server before persisted. The -validation will check that the type of the data is correct for the custom -attribute. - - * number - :py:class:`int` or :py:class:`float` - * text - :py:class:`str` or :py:class:`unicode` - * enumerator - :py:class:`list` - * boolean - :py:class:`bool` - * date - :py:class:`datetime.datetime` or :py:class:`datetime.date` - -If the value set is not valid a :py:exc:`ftrack_api.exception.ServerError` is -raised with debug information:: - - shot['custom_attributes']['fstart'] = 'test' - - Traceback (most recent call last): - ... - ftrack_api.exception.ServerError: Server reported error: - ValidationError(Custom attribute value for "fstart" must be of type number. - Got "test" of type ) \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst deleted file mode 100644 index 2be01ffe479..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst +++ /dev/null @@ -1,53 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/encode_media: - -************** -Encoding media -************** - -Media such as images and video can be encoded by the ftrack server to allow -playing it in the ftrack web interface. Media can be encoded using -:meth:`ftrack_api.session.Session.encode_media` which accepts a path to a file -or an existing component in the ftrack.server location. - -Here is an example of how to encode a video and read the output:: - - job = session.encode_media('/PATH/TO/MEDIA') - job_data = json.loads(job['data']) - - print 'Source component id', job_data['source_component_id'] - print 'Keeping original component', job_data['keep_original'] - for output in job_data['output']: - print u'Output component - id: {0}, format: {1}'.format( - output['component_id'], output['format'] - ) - -You can also call the corresponding helper method on an :meth:`asset version -`, to have the -encoded components automatically associated with the version:: - - job = asset_version.encode_media('/PATH/TO/MEDIA') - -It is also possible to get the URL to an encoded component once the job has -finished:: - - job = session.encode_media('/PATH/TO/MEDIA') - - # Wait for job to finish. - - location = session.query('Location where name is "ftrack.server"').one() - for component in job['job_components']: - print location.get_url(component) - -Media can also be an existing component in another location. Before encoding it, -the component needs to be added to the ftrack.server location:: - - location = session.query('Location where name is "ftrack.server"').one() - location.add_component(component) - session.commit() - - job = session.encode_media(component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst deleted file mode 100644 index 43e31484f40..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. _example/entity_links: - -****************** -Using entity links -****************** - -A link can be used to represent a dependency or another relation between -two entities in ftrack. - -There are two types of entities that can be linked: - -* Versions can be linked to other asset versions, where the link entity type - is `AssetVersionLink`. -* Objects like Task, Shot or Folder, where the link entity type is - `TypedContextLink`. - -Both `AssetVersion` and `TypedContext` objects have the same relations -`incoming_links` and `outgoing_links`. To list the incoming links to a Shot we -can use the relationship `incoming_links`:: - - for link in shot['incoming_links']: - print link['from'], link['to'] - -In the above example `link['to']` is the shot and `link['from']` could be an -asset build or something else that is linked to the shot. There is an equivalent -`outgoing_links` that can be used to access outgoing links on an object. - -To create a new link between objects or asset versions create a new -`TypedContextLink` or `AssetVersionLink` entity with the from and to properties -set. In this example we will link two asset versions:: - - session.create('AssetVersionLink', { - 'from': from_asset_version, - 'to': to_asset_version - }) - session.commit() - -Using asset version link shortcut -================================= - -Links on asset version can also be created by the use of the `uses_versions` and -`used_in_versions` relations:: - - rig_version['uses_versions'].append(model_version) - session.commit() - -This has the same result as creating the `AssetVersionLink` entity as in the -previous section. - -Which versions are using the model can be listed with:: - - for version in model_version['used_in_versions']: - print '{0} is using {1}'.format(version, model_version) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst deleted file mode 100644 index 4fca37d754e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst +++ /dev/null @@ -1,52 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example: - -************** -Usage examples -************** - -The following examples show how to use the API to accomplish specific tasks -using the default configuration. - -.. note:: - - If you are using a server with a customised configuration you may need to - alter the examples slightly to make them work correctly. - -Most of the examples assume you have the *ftrack_api* package imported and have -already constructed a :class:`Session`:: - - import ftrack_api - - session = ftrack_api.Session() - - -.. toctree:: - - project - component - review_session - metadata - custom_attribute - manage_custom_attribute_configuration - link_attribute - scope - job - note - list - timer - assignments_and_allocations - thumbnail - encode_media - entity_links - web_review - publishing - security_roles - task_template - sync_ldap_users - invite_user - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst deleted file mode 100644 index 342f0ef6025..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/invite_user: - -********************* -Invite user -********************* - -Here we create a new user and send them a invitation through mail - - -Create a new user:: - - user_email = 'artist@mail.vfx-company.com' - - new_user = session.create( - 'User', { - 'username':user_email, - 'email':user_email, - 'is_active':True - } - ) - - session.commit() - - -Invite our new user:: - - new_user.send_invite() - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst deleted file mode 100644 index 296a0f5e173..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst +++ /dev/null @@ -1,97 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/job: - -************* -Managing jobs -************* - -.. currentmodule:: ftrack_api.session - -Jobs can be used to display feedback to users in the ftrack web interface when -performing long running tasks in the API. - -To create a job use :meth:`Session.create`:: - - user = # Get a user from ftrack. - - job = session.create('Job', { - 'user': user, - 'status': 'running' - }) - -The created job will appear as running in the :guilabel:`jobs` menu for the -specified user. To set a description on the job, add a dictionary containing -description as the `data` key: - -.. note:: - - In the current version of the API the dictionary needs to be JSON - serialised. - -.. code-block:: python - - import json - - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'My custom job description.' - }) - }) - -When the long running task has finished simply set the job as completed and -continue with the next task. - -.. code-block:: python - - job['status'] = 'done' - session.commit() - -Attachments -=========== - -Job attachments are files that are attached to a job. In the ftrack web -interface these attachments can be downloaded by clicking on a job in the `Jobs` -menu. - -To get a job's attachments through the API you can use the `job_components` -relation and then use the ftrack server location to get the download URL:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - for job_component in job['job_components']: - print 'Download URL: {0}'.format( - server_location.get_url(job_component['component']) - ) - -To add an attachment to a job you have to add it to the ftrack server location -and create a `jobComponent`:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - # Create component and name it "My file". - component = session.create_component( - '/path/to/file', - data={'name': 'My file'}, - location=server_location - ) - - # Attach the component to the job. - session.create( - 'JobComponent', - {'component_id': component['id'], 'job_id': job['id']} - ) - - session.commit() - -.. note:: - - The ftrack web interface does only support downloading one attachment so - attaching more than one will have limited support in the web interface. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst deleted file mode 100644 index 1dcea842cdb..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst +++ /dev/null @@ -1,55 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/link_attribute: - -********************* -Using link attributes -********************* - -The `link` attribute can be used to retreive the ids and names of the parents of -an object. It is particularly useful in cases where the path of an object must -be presented in a UI, but can also be used to speedup certain query patterns. - -You can use the `link` attribute on any entity inheriting from a -`Context` or `AssetVersion`. Here we use it on the `Task` entity:: - - task = session.query( - 'select link from Task where name is "myTask"' - ).first() - print task['link'] - -It can also be used create a list of parent entities, including the task -itself:: - - entities = [] - for item in task['link']: - entities.append(session.get(item['type'], item['id'])) - -The `link` attribute is an ordered list of dictionaries containting data -of the parents and the item itself. Each dictionary contains the following -entries: - - id - The id of the object and can be used to do a :meth:`Session.get`. - name - The name of the object. - type - The schema id of the object. - -A more advanced use-case is to get the parent names and ids of all timelogs for -a user:: - - for timelog in session.query( - 'select context.link, start, duration from Timelog ' - 'where user.username is "john.doe"' - ): - print timelog['context']['link'], timelog['start'], timelog['duration'] - -The attribute is also available from the `AssetVersion` asset relation:: - - for asset_version in session.query( - 'select link from AssetVersion ' - 'where user.username is "john.doe"' - ): - print asset_version['link'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst deleted file mode 100644 index 155b25f9af6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst +++ /dev/null @@ -1,46 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/list: - -*********** -Using lists -*********** - -.. currentmodule:: ftrack_api.session - -Lists can be used to create a collection of asset versions or objects such as -tasks. It could be a list of items that should be sent to client, be included in -todays review session or items that belong together in way that is different -from the project hierarchy. - -There are two types of lists, one for asset versions and one for other objects -such as tasks. - -To create a list use :meth:`Session.create`:: - - user = # Get a user from ftrack. - project = # Get a project from ftrack. - list_category = # Get a list category from ftrack. - - asset_version_list = session.create('AssetVersionList', { - 'owner': user, - 'project': project, - 'category': list_category - }) - - task_list = session.create('TypedContextList', { - 'owner': user, - 'project': project, - 'category': list_category - }) - -Then add items to the list like this:: - - asset_version_list['items'].append(asset_version) - task_list['items'].append(task) - -And remove items from the list like this:: - - asset_version_list['items'].remove(asset_version) - task_list['items'].remove(task) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst deleted file mode 100644 index e3d7c4062c1..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst +++ /dev/null @@ -1,320 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/manage_custom_attribute_configuration: - -**************************************** -Managing custom attribute configurations -**************************************** - -From the API it is not only possible to -:ref:`read and update custom attributes for entities `, -but also managing custom attribute configurations. - -Existing custom attribute configurations can be queried as :: - - # Print all existing custom attribute configurations. - print session.query('CustomAttributeConfiguration').all() - -Use :meth:`Session.create` to create a new custom attribute configuration:: - - # Get the custom attribute type. - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - - # Persist it to the ftrack instance. - session.commit() - -.. tip:: - - The example above does not add security roles. This can be done either - from System Settings in the ftrack web application, or by following the - :ref:`example/manage_custom_attribute_configuration/security_roles` example. - -Global or project specific -========================== - -A custom attribute can be global or project specific depending on the -`project_id` attribute:: - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - # Set the `project_id` and the custom attribute will only be available - # on `my_project`. - 'project_id': my_project['id'], - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - session.commit() - -A project specific custom attribute can be changed to a global:: - - custom_attribute_configuration['project_id'] = None - session.commit() - -Changing a global custom attribute configuration to a project specific is not -allowed. - -Entity types -============ - -Custom attribute configuration entity types are using a legacy notation. A -configuration can have one of the following as `entity_type`: - -:task: - Represents TypedContext (Folder, Shot, Sequence, Task, etc.) custom - attribute configurations. When setting this as entity_type the - object_type_id must be set as well. - - Creating a text custom attribute for Folder:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - object_type = session.query('ObjectType where name is "Folder"').one() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'task', - 'object_type_id': object_type['id'], - 'type': custom_attribute_type, - 'label': 'Foo', - 'key': 'foo', - 'default': 'bar', - }) - session.commit() - - Can be associated with a `project_id`. - -:show: - Represents Projects custom attribute configurations. - - Can be associated with a `project_id`. - -:assetversion: - Represents AssetVersion custom attribute configurations. - - Can be associated with a `project_id`. - -:user: - Represents User custom attribute configurations. - - Must be `global` and cannot be associated with a `project_id`. - -:list: - Represents List custom attribute configurations. - - Can be associated with a `project_id`. - -:asset: - Represents Asset custom attribute configurations. - - .. note:: - - Asset custom attributes have limited support in the ftrack web - interface. - - Can be associated with a `project_id`. - -It is not possible to change type after a custom attribute configuration has -been created. - -Custom attribute configuration types -==================================== - -Custom attributes can be of different data types depending on what type is set -in the configuration. Some types requires an extra json encoded config to be -set: - -:text: - A sting type custom attribute. - - The `default` value must be either :py:class:`str` or :py:class:`unicode`. - - Can be either presented as raw text or markdown formatted in applicaitons - which support it. This is configured through a markwdown key:: - - # Get the custom attribute type. - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - - # Persist it to the ftrack instance. - session.commit() - -:boolean: - - A boolean type custom attribute. - - The `default` value must be a :py:class:`bool`. - - No config is required. - -:date: - A date type custom attribute. - - The `default` value must be an :term:`arrow` date - e.g. - arrow.Arrow(2017, 2, 8). - - No config is required. - -:enumerator: - An enumerator type custom attribute. - - The `default` value must be a list with either :py:class:`str` or - :py:class:`unicode`. - - The enumerator can either be single or multi select. The config must a json - dump of a dictionary containing `multiSelect` and `data`. Where - `multiSelect` is True or False and data is a list of options. Each option - should be a dictionary containing `value` and `menu`, where `menu` is meant - to be used as label in a user interface. - - Create a custom attribute enumerator:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "enumerator"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Enumerator attribute', - 'key': 'enumerator_attribute', - 'default': ['bar'], - 'config': json.dumps({ - 'multiSelect': True, - 'data': json.dumps([ - {'menu': 'Foo', 'value': 'foo'}, - {'menu': 'Bar', 'value': 'bar'} - ]) - }) - }) - session.commit() - -:dynamic enumerator: - - An enumerator type where available options are fetched from remote. Created - in the same way as enumerator but without `data`. - -:number: - - A number custom attribute can be either decimal or integer for presentation. - - This can be configured through the `isdecimal` config option:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "number"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Number attribute', - 'key': 'number_attribute', - 'default': 42, - 'config': json.dumps({ - 'isdecimal': True - }) - }) - session.commit() - -Changing default -================ - -It is possible to update the `default` value of a custom attribute -configuration. This will not change the value of any existing custom -attributes:: - - # Change the default value of custom attributes. This will only affect - # newly created entities. - custom_attribute_configuration['default'] = 43 - session.commit() - -.. _example/manage_custom_attribute_configuration/security_roles: - -Security roles -============== - -By default new custom attribute configurations and the entity values are not -readable or writable by any security role. - -This can be configured through the `read_security_roles` and `write_security_roles` -attributes:: - - # Pick random security role. - security_role = session.query('SecurityRole').first() - custom_attribute_type = session.query( - 'CustomAttributeType where name is "date"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Date attribute', - 'key': 'date_attribute', - 'default': arrow.Arrow(2017, 2, 8), - 'write_security_roles': [security_role], - 'read_security_roles': [security_role] - }) - session.commit() - -.. note:: - - Setting the correct security role is important and must be changed to - whatever security role is appropriate for your configuration and intended - purpose. - -Custom attribute groups -======================= - -A custom attribute configuration can be categorized using a -`CustomAttributeGroup`:: - - group = session.query('CustomAttributeGroup').first() - security_role = session.query('SecurityRole').first() - custom_attribute_type = session.query( - 'CustomAttributeType where name is "enumerator"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Enumerator attribute', - 'key': 'enumerator_attribute', - 'default': ['bar'], - 'config': json.dumps({ - 'multiSelect': True, - 'data': json.dumps([ - {'menu': 'Foo', 'value': 'foo'}, - {'menu': 'Bar', 'value': 'bar'} - ]) - }), - 'group': group, - 'write_security_roles': [security_role], - 'read_security_roles': [security_role] - }) - session.commit() - -.. seealso:: - - :ref:`example/custom_attribute` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst deleted file mode 100644 index 7b168810177..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst +++ /dev/null @@ -1,43 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/metadata: - -************** -Using metadata -************** - -.. currentmodule:: ftrack_api.session - -Key/value metadata can be written to entities using the metadata property -and also used to query entities. - -The metadata property has a similar interface as a dictionary and keys can be -printed using the keys method:: - - >>> print new_sequence['metadata'].keys() - ['frame_padding', 'focal_length'] - -or items:: - - >>> print new_sequence['metadata'].items() - [('frame_padding': '4'), ('focal_length': '70')] - -Read existing metadata:: - - >>> print new_sequence['metadata']['frame_padding'] - '4' - -Setting metadata can be done in a few ways where that later one will replace -any existing metadata:: - - new_sequence['metadata']['frame_padding'] = '5' - new_sequence['metadata'] = { - 'frame_padding': '4' - } - -Entities can also be queried using metadata:: - - session.query( - 'Sequence where metadata any (key is "frame_padding" and value is "4")' - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst deleted file mode 100644 index 8f8f1bb57da..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst +++ /dev/null @@ -1,169 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/note: - -*********** -Using notes -*********** - -Notes can be written on almost all levels in ftrack. To retrieve notes on an -entity you can either query them or use the relation called `notes`:: - - task = session.query('Task').first() - - # Retrieve notes using notes property. - notes_on_task = task['notes'] - - # Or query them. - notes_on_task = session.query('Note where parent_id is "{}"'.format( - task['id'] - )) - -.. note:: - - It's currently not possible to use the `parent` property when querying - notes or to use the `parent` property on notes:: - - task = session.query('Task').first() - - # This won't work in the current version of the API. - session.query('Note where parent.id is "{}"'.format( - task['id'] - )) - - # Neither will this. - parent_of_note = note['parent'] - -To create new notes you can either use the helper method called -:meth:`~ftrack_api.entity.note.CreateNoteMixin.create_note` on any entity that -can have notes or use :meth:`Session.create` to create them manually:: - - user = session.query('User').first() - - # Create note using the helper method. - note = task.create_note('My new note', author=user) - - # Manually create a note - note = session.create('Note', { - 'content': 'My new note', - 'author': user - }) - - task['notes'].append(note) - -Replying to an existing note can also be done with a helper method or by -using :meth:`Session.create`:: - - # Create using helper method. - first_note_on_task = task['notes'][0] - first_note_on_task.create_reply('My new reply on note', author=user) - - # Create manually - reply = session.create('Note', { - 'content': 'My new note', - 'author': user - }) - - first_note_on_task.replies.append(reply) - -Notes can have labels. Use the label argument to set labels on the -note using the helper method:: - - label = session.query( - 'NoteLabel where name is "External Note"' - ).first() - - note = task.create_note( - 'New note with external category', author=user, labels=[label] - ) - -Or add labels to notes when creating a note manually:: - - label = session.query( - 'NoteLabel where name is "External Note"' - ).first() - - note = session.create('Note', { - 'content': 'New note with external category', - 'author': user - }) - - session.create('NoteLabelLink', { - 'note_id': note['id], - 'label_id': label['id'] - }) - - task['notes'].append(note) - -.. note:: - - Support for labels on notes was added in ftrack server version 4.3. For - older versions of the server, NoteCategory can be used instead. - -To specify a category when creating a note simply pass a `NoteCategory` instance -to the helper method:: - - category = session.query( - 'NoteCategory where name is "External Note"' - ).first() - - note = task.create_note( - 'New note with external category', author=user, category=category - ) - -When writing notes you might want to direct the note to someone. This is done -by adding users as recipients. If a user is added as a recipient the user will -receive notifications and the note will be displayed in their inbox. - -To add recipients pass a list of user or group instances to the helper method:: - - john = session.query('User where username is "john"').one() - animation_group = session.query('Group where name is "Animation"').first() - - note = task.create_note( - 'Note with recipients', author=user, recipients=[john, animation_group] - ) - -Attachments -=========== - -Note attachments are files that are attached to a note. In the ftrack web -interface these attachments appears next to the note and can be downloaded by -the user. - -To get a note's attachments through the API you can use the `note_components` -relation and then use the ftrack server location to get the download URL:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - for note_component in note['note_components']: - print 'Download URL: {0}'.format( - server_location.get_url(note_component['component']) - ) - -To add an attachment to a note you have to add it to the ftrack server location -and create a `NoteComponent`:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - # Create component and name it "My file". - component = session.create_component( - '/path/to/file', - data={'name': 'My file'}, - location=server_location - ) - - # Attach the component to the note. - session.create( - 'NoteComponent', - {'component_id': component['id'], 'note_id': note['id']} - ) - - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst deleted file mode 100644 index 0b4c0879d69..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst +++ /dev/null @@ -1,65 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/project: - -********************* -Working with projects -********************* - -.. currentmodule:: ftrack_api.session - -Creating a project -================== - -A project with sequences, shots and tasks can be created in one single -transaction. Tasks need to have a type and status set on creation based on the -project schema:: - - import uuid - - # Create a unique name for the project. - name = 'projectname_{0}'.format(uuid.uuid1().hex) - - # Naively pick the first project schema. For this example to work the - # schema must contain `Shot` and `Sequence` object types. - project_schema = session.query('ProjectSchema').first() - - # Create the project with the chosen schema. - project = session.create('Project', { - 'name': name, - 'full_name': name + '_full', - 'project_schema': project_schema - }) - - # Retrieve default types. - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - # Create sequences, shots and tasks. - for sequence_number in range(1, 5): - sequence = session.create('Sequence', { - 'name': 'seq_{0}'.format(sequence_number), - 'parent': project - }) - - for shot_number in range(1, 5): - shot = session.create('Shot', { - 'name': '{0}0'.format(shot_number).zfill(3), - 'parent': sequence, - 'status': default_shot_status - }) - - for task_number in range(1, 5): - session.create('Task', { - 'name': 'task_{0}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - # Commit all changes to the server. - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst deleted file mode 100644 index bf1da18ab9b..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/publishing: - -******************* -Publishing versions -******************* - -To know more about publishing and the concepts around publishing, read the -`ftrack article `_ -about publishing. - -To publish an asset you first need to get the context where the asset should be -published:: - - # Get a task from a given id. - task = session.get('Task', '423ac382-e61d-4802-8914-dce20c92b740') - -And the parent of the task which will be used to publish the asset on:: - - asset_parent = task['parent'] - -Then we create an asset and a version on the asset:: - - asset_type = session.query('AssetType where name is "Geometry"').one() - asset = session.create('Asset', { - 'name': 'My asset', - 'type': asset_type, - 'parent': asset_parent - }) - asset_version = session.create('AssetVersion', { - 'asset': asset, - 'task': task - }) - -.. note:: - - The task is not used as the parent of the asset, instead the task is linked - directly to the AssetVersion. - -Then when we have a version where we can create the components:: - - asset_version.create_component( - '/path/to/a/file.mov', location='auto' - ) - asset_version.create_component( - '/path/to/a/another-file.mov', location='auto' - ) - - session.commit() - -This will automatically create a new component and add it to the location which -has been configured as the first in priority. - -Components can also be named and added to a custom location like this:: - - location = session.query('Location where name is "my-location"') - asset_version.create_component( - '/path/to/a/file.mov', - data={ - 'name': 'foobar' - }, - location=location - ) - -.. seealso:: - - * :ref:`example/component` - * :ref:`example/web_review` - * :ref:`example/thumbnail` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst deleted file mode 100644 index 68f7870d1c6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/review_session: - -********************* -Using review sessions -********************* - -.. currentmodule:: ftrack_api.session - -Client review sessions can either be queried manually or by using a project -instance. - -.. code-block:: python - - review_sessions = session.query( - 'ReviewSession where name is "Weekly review"' - ) - - project_review_sessions = project['review_sessions'] - -To create a new review session on a specific project use :meth:`Session.create`. - -.. code-block:: python - - review_session = session.create('ReviewSession', { - 'name': 'Weekly review', - 'description': 'See updates from last week.', - 'project': project - }) - -To add objects to a review session create them using -:meth:`Session.create` and reference a review session and an asset version. - -.. code-block:: python - - review_session = session.create('ReviewSessionObject', { - 'name': 'Compositing', - 'description': 'Fixed shadows.', - 'version': 'Version 3', - 'review_session': review_session, - 'asset_version': asset_version - }) - -To list all objects in a review session. - -.. code-block:: python - - review_session_objects = review_session['review_session_objects'] - -Listing and adding collaborators to review session can be done using -:meth:`Session.create` and the `review_session_invitees` relation on a -review session. - -.. code-block:: python - - invitee = session.create('ReviewSessionInvitee', { - 'name': 'John Doe', - 'email': 'john.doe@example.com', - 'review_session': review_session - }) - - session.commit() - - invitees = review_session['review_session_invitees'] - -To remove a collaborator simply delete the object using -:meth:`Session.delete`. - -.. code-block:: python - - session.delete(invitee) - -To send out an invite email to a signle collaborator use -:meth:`Session.send_review_session_invite`. - -.. code-block:: python - - session.send_review_session_invite(invitee) - -Multiple invitees can have emails sent to them in one batch using -:meth:`Session.send_review_session_invites`. - -.. code-block:: python - - session.send_review_session_invites(a_list_of_invitees) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst deleted file mode 100644 index 3be42322cef..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst +++ /dev/null @@ -1,27 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/scope: - -************ -Using scopes -************ - -.. currentmodule:: ftrack_api.session - -Entities can be queried based on their scopes:: - - >>> tasks = session.query( - ... 'Task where scopes.name is "London"' - ... ) - -Scopes can be read and modified for entities:: - - >>> scope = session.query( - ... 'Scope where name is "London"' - ... )[0] - ... - ... if scope in task['scopes']: - ... task['scopes'].remove(scope) - ... else: - ... task['scopes'].append(scope) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst deleted file mode 100644 index 4219e3d1263..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/security_roles: - -********************************* -Working with user security roles -********************************* - -.. currentmodule:: ftrack_api.session - -The API exposes `SecurityRole` and `UserSecurityRole` that can be used to -specify who should have access to certain data on different projects. - -List all available security roles like this:: - - security_roles = session.query( - 'select name from SecurityRole where type is "PROJECT"' - ) - -.. note:: - - We only query for project roles since those are the ones we can add to a - user for certain projects. Other types include API and ASSIGNED. Type API - can only be added to global API keys, which is currently not supported via - the api and type ASSIGNED only applies to assigned tasks. - -To get all security roles from a user we can either use relations like this:: - - for user_security_role in user['user_security_roles']: - if user_security_role['is_all_projects']: - result_string = 'all projects' - else: - result_string = ', '.join( - [project['full_name'] for project in user_security_role['projects']] - ) - - print 'User has security role "{0}" which is valid on {1}.'.format( - user_security_role['security_role']['name'], - result_string - ) - -or query them directly like this:: - - user_security_roles = session.query( - 'UserSecurityRole where user.username is "{0}"'.format(session.api_user) - ).all() - -User security roles can also be added to a user for all projects like this:: - - project_manager_role = session.query( - 'SecurityRole where name is "Project Manager"' - ).one() - - session.create('UserSecurityRole', { - 'is_all_projects': True, - 'user': user, - 'security_role': project_manager_role - }) - session.commit() - -or for certain projects only like this:: - - projects = session.query( - 'Project where full_name is "project1" or full_name is "project2"' - ).all()[:] - - session.create('UserSecurityRole', { - 'user': user, - 'security_role': project_manager_role, - 'projects': projects - }) - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst deleted file mode 100644 index 5ea0e47dc68..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst +++ /dev/null @@ -1,30 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/sync_with_ldap: - -******************** -Sync users with LDAP -******************** - -.. currentmodule:: ftrack_api.session - - -If ftrack is configured to connect to LDAP you may trigger a -synchronization through the api using the -:meth:`ftrack_api.session.Session.call`:: - - result = session.call([ - dict( - action='delayed_job', - job_type='SYNC_USERS_LDAP' - ) - ]) - job = result[0]['data] - -You will get a `ftrack_api.entity.job.Job` instance back which can be used -to check the success of the job:: - - if job.get('status') == 'failed': - # The job failed get the error. - logging.error(job.get('data')) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst deleted file mode 100644 index c6161e834a4..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/task_template: - -*************************** -Working with Task Templates -*************************** - -Task templates can help you organize your workflows by building a collection -of tasks to be applied for specific contexts. They can be applied to all `Context` -objects for example Project, Sequences, Shots, etc... - -Query task templates -======================= - -Retrive all task templates and there tasks for a project:: - - project = session.query('Project').first() - - for task_template in project['project_schema']['task_templates']: - print('\ntask template: {0}'.format( - task_template['name'] - )) - - for task_type in [t['task_type'] for t in task_template['items']]: - print('\ttask type: {0}'.format( - task_type['name'] - )) - - - -"Apply" a task template -======================= -Create all tasks in a random task template directly under the project:: - - - project = session.query('Project').first() - - task_template = random.choice( - project['project_schema']['task_templates'] - ) - - for task_type in [t['task_type'] for t in task_template['items']]: - session.create( - 'Task', { - 'name': task_type['name'], - 'type': task_type, - 'parent': project - } - ) - - session.commit() - - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst deleted file mode 100644 index 64199869a56..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst +++ /dev/null @@ -1,71 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. _example/thumbnail: - -*********************** -Working with thumbnails -*********************** - -Components can be used as thumbnails on various entities, including -`Project`, `Task`, `AssetVersion` and `User`. To create and set a thumbnail -you can use the helper method -:meth:`~ftrack_api.entity.component.CreateThumbnailMixin.create_thumbnail` on -any entity that can have a thumbnail:: - - task = session.get('Task', my_task_id) - thumbnail_component = task.create_thumbnail('/path/to/image.jpg') - -It is also possible to set an entity thumbnail by setting its `thumbnail` -relation or `thumbnail_id` attribute to a component you would -like to use as a thumbnail. For a component to be usable as a thumbnail, -it should - - 1. Be a FileComponent. - 2. Exist in the *ftrack.server* :term:`location`. - 3. Be of an appropriate resolution and valid file type. - -The following example creates a new component in the server location, and -uses that as a thumbnail for a task:: - - task = session.get('Task', my_task_id) - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - thumbnail_component = session.create_component( - '/path/to/image.jpg', - dict(name='thumbnail'), - location=server_location - ) - task['thumbnail'] = thumbnail_component - session.commit() - -The next example reuses a version's thumbnail for the asset parent thumbnail:: - - asset_version = session.get('AssetVersion', my_asset_version_id) - asset_parent = asset_version['asset']['parent'] - asset_parent['thumbnail_id'] = asset_version['thumbnail_id'] - session.commit() - -.. _example/thumbnail/url: - -Retrieving thumbnail URL -======================== - -To get an URL to a thumbnail, `thumbnail_component`, which can be used used -to download or display the image in an interface, use the following:: - - import ftrack_api.symbol - server_location = session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) - thumbnail_url = server_location.get_thumbnail_url(thumbnail_component) - thumbnail_url_tiny = server_location.get_thumbnail_url( - thumbnail_component, size=100 - ) - thumbnail_url_large = server_location.get_thumbnail_url( - thumbnail_component, size=500 - ) - -.. seealso:: - - :ref:`example/component` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst deleted file mode 100644 index eb86e2f8976..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/timer: - -************ -Using timers -************ - -.. currentmodule:: ftrack_api.session - -Timers can be used to track how much time has been spend working on something. - -To start a timer for a user:: - - user = # Get a user from ftrack. - task = # Get a task from ftrack. - - user.start_timer(task) - -A timer has now been created for that user and should show up in the ftrack web -UI. - -To stop the currently running timer for a user and create a timelog from it:: - - user = # Get a user from ftrack. - - timelog = user.stop_timer() - -.. note:: - - Starting a timer when a timer is already running will raise in an exception. - Use the force parameter to automatically stop the running timer first. - - .. code-block:: python - - user.start_timer(task, force=True) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst deleted file mode 100644 index f1dede570ff..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst +++ /dev/null @@ -1,78 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/web_review: - -************************* -Publishing for web review -************************* - -Follow the :ref:`example/encode_media` example if you want to -upload and encode media using ftrack. - -If you already have a file encoded in the correct format and want to bypass -the built-in encoding in ftrack, you can create the component manually -and add it to the `ftrack.server` location:: - - # Retrieve or create version. - version = session.query('AssetVersion', 'SOME-ID') - - server_location = session.query('Location where name is "ftrack.server"').one() - filepath = '/path/to/local/file.mp4' - - component = version.create_component( - path=filepath, - data={ - 'name': 'ftrackreview-mp4' - }, - location=server_location - ) - - # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. - component['metadata']['ftr_meta'] = json.dumps({ - 'frameIn': 0, - 'frameOut': 150, - 'frameRate': 25 - }) - - component.session.commit() - -To publish an image for review the steps are similar:: - - # Retrieve or create version. - version = session.query('AssetVersion', 'SOME-ID') - - server_location = session.query('Location where name is "ftrack.server"').one() - filepath = '/path/to/image.jpg' - - component = version.create_component( - path=filepath, - data={ - 'name': 'ftrackreview-image' - }, - location=server_location - ) - - # Meta data needs to contain *format*. - component['metadata']['ftr_meta'] = json.dumps({ - 'format': 'image' - }) - - component.session.commit() - -Here is a list of components names and how they should be used: - -================== ===================================== -Component name Use -================== ===================================== -ftrackreview-image Images reviewable in the browser -ftrackreview-mp4 H.264/mp4 video reviewable in browser -ftrackreview-webm WebM video reviewable in browser -================== ===================================== - -.. note:: - - Make sure to use the pre-defined component names and set the `ftr_meta` on - the components or review will not work. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst deleted file mode 100644 index aa5cc779760..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst +++ /dev/null @@ -1,76 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******** -Glossary -******** - -.. glossary:: - - accessor - An implementation (typically a :term:`Python` plugin) for accessing - a particular type of storage using a specific protocol. - - .. seealso:: :ref:`locations/overview/accessors` - - action - Actions in ftrack provide a standardised way to integrate other tools, - either off-the-shelf or custom built, directly into your ftrack - workflow. - - .. seealso:: :ref:`ftrack:using/actions` - - api - Application programming interface. - - arrow - A Python library that offers a sensible, human-friendly approach to - creating, manipulating, formatting and converting dates, times, and - timestamps. Read more at http://crsmithdev.com/arrow/ - - asset - A container for :term:`asset versions `, typically - representing the output from an artist. For example, 'geometry' - from a modeling artist. Has an :term:`asset type` that categorises the - asset. - - asset type - Category for a particular asset. - - asset version - A specific version of data for an :term:`asset`. Can contain multiple - :term:`components `. - - component - A container to hold any type of data (such as a file or file sequence). - An :term:`asset version` can have any number of components, each with - a specific name. For example, a published version of geometry might - have two components containing the high and low resolution files, with - the component names as 'hires' and 'lowres' respectively. - - PEP-8 - Style guide for :term:`Python` code. Read the guide at - https://www.python.org/dev/peps/pep-0008/ - - plugin - :term:`Python` plugins are used by the API to extend it with new - functionality, such as :term:`locations ` or :term:`actions `. - - .. seealso:: :ref:`understanding_sessions/plugins` - - python - A programming language that lets you work more quickly and integrate - your systems more effectively. Often used in creative industries. Visit - the language website at http://www.python.org - - PyPi - :term:`Python` package index. The Python Package Index or PyPI is the - official third-party software repository for the Python programming - language. Visit the website at https://pypi.python.org/pypi - - resource identifier - A string that is stored in ftrack as a reference to a resource (such as - a file) in a specific location. Used by :term:`accessors ` to - determine how to access data. - - .. seealso:: :ref:`locations/overview/resource_identifiers` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst deleted file mode 100644 index 1d378473fac..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst +++ /dev/null @@ -1,315 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _handling_events: - -*************** -Handling events -*************** - -.. currentmodule:: ftrack_api.event - -Events are generated in ftrack when things happen such as a task being updated -or a new version being published. Each :class:`~ftrack_api.session.Session` -automatically connects to the event server and can be used to subscribe to -specific events and perform an action as a result. That action could be updating -another related entity based on a status change or generating folders when a new -shot is created for example. - -The :class:`~hub.EventHub` for each :class:`~ftrack_api.session.Session` is -accessible via :attr:`Session.event_hub -<~ftrack_api.session.Session.event_hub>`. - -.. _handling_events/subscribing: - -Subscribing to events -===================== - -To listen to events, you register a function against a subscription using -:meth:`Session.event_hub.subscribe `. The subscription -uses the :ref:`expression ` syntax and will filter -against each :class:`~base.Event` instance to determine if the registered -function should receive that event. If the subscription matches, the registered -function will be called with the :class:`~base.Event` instance as its sole -argument. The :class:`~base.Event` instance is a mapping like structure and can -be used like a normal dictionary. - -The following example subscribes a function to receive all 'ftrack.update' -events and then print out the entities that were updated:: - - import ftrack_api - - - def my_callback(event): - '''Event callback printing all new or updated entities.''' - for entity in event['data'].get('entities', []): - - # Print data for the entity. - print(entity) - - - # Subscribe to events with the update topic. - session = ftrack_api.Session() - session.event_hub.subscribe('topic=ftrack.update', my_callback) - -At this point, if you run this, your code would exit almost immediately. This -is because the event hub listens for events in a background thread. Typically, -you only want to stay connected whilst using the session, but in some cases you -will want to block and listen for events solely - a dedicated event processor. -To do this, use the :meth:`EventHub.wait ` method:: - - # Wait for events to be received and handled. - session.event_hub.wait() - -You cancel waiting for events by using a system interrupt (:kbd:`Ctrl-C`). -Alternatively, you can specify a *duration* to process events for:: - - # Only wait and process events for 5 seconds. - session.event_hub.wait(duration=5) - -.. note:: - - Events are continually received and queued for processing in the background - as soon as the connection to the server is established. As a result you may - see a flurry of activity as soon as you call - :meth:`~hub.EventHub.wait` for the first time. - -.. _handling_events/subscribing/subscriber_information: - -Subscriber information ----------------------- - -When subscribing, you can also specify additional information about your -subscriber. This contextual information can be useful when routing events, -particularly when :ref:`targeting events -`. By default, the -:class:`~hub.EventHub` will set some default information, but it can be -useful to enhance this. To do so, simply pass in *subscriber* as a dictionary of -data to the :meth:`~hub.EventHub.subscribe` method:: - - session.event_hub.subscribe( - 'topic=ftrack.update', - my_callback, - subscriber={ - 'id': 'my-unique-subscriber-id', - 'applicationId': 'maya' - } - ) - -.. _handling_events/subscribing/sending_replies: - -Sending replies ---------------- - -When handling an event it is sometimes useful to be able to send information -back to the source of the event. For example, -:ref:`ftrack:developing/events/list/ftrack.location.request-resolve` would -expect a resolved path to be sent back. - -You can craft a custom reply event if you want, but an easier way is just to -return the appropriate data from your handler. Any non *None* value will be -automatically sent as a reply:: - - def on_event(event): - # Send following data in automatic reply. - return {'success': True, 'message': 'Cool!'} - - session.event_hub.subscribe('topic=test-reply', on_event) - -.. seealso:: - - :ref:`handling_events/publishing/handling_replies` - -.. note:: - - Some events are published :ref:`synchronously - `. In this case, any returned data - is passed back to the publisher directly. - -.. _handling_events/subscribing/stopping_events: - -Stopping events ---------------- - -The *event* instance passed to each event handler also provides a method for -stopping the event, :meth:`Event.stop `. - -Once an event has been stopped, no further handlers for that specific event -will be called **locally**. Other handlers in other processes may still be -called. - -Combining this with setting appropriate priorities when subscribing to a topic -allows handlers to prevent lower priority handlers running when desired. - - >>> import ftrack_api - >>> import ftrack_api.event.base - >>> - >>> def callback_a(event): - ... '''Stop the event!''' - ... print('Callback A') - ... event.stop() - >>> - >>> def callback_b(event): - ... '''Never run.''' - ... print('Callback B') - >>> - >>> session = ftrack_api.Session() - >>> session.event_hub.subscribe( - ... 'topic=test-stop-event', callback_a, priority=10 - ... ) - >>> session.event_hub.subscribe( - ... 'topic=test-stop-event', callback_b, priority=20 - ... ) - >>> session.event_hub.publish( - ... ftrack_api.event.base.Event(topic='test-stop-event') - ... ) - >>> session.event_hub.wait(duration=5) - Callback A called. - -.. _handling_events/publishing: - -Publishing events -================= - -So far we have looked at listening to events coming from ftrack. However, you -are also free to publish your own events (or even publish relevant ftrack -events). - -To do this, simply construct an instance of :class:`ftrack_api.event.base.Event` -and pass it to :meth:`EventHub.publish ` via the session:: - - import ftrack_api.event.base - - event = ftrack_api.event.base.Event( - topic='my-company.some-topic', - data={'key': 'value'} - ) - session.event_hub.publish(event) - -The event hub will automatically add some information to your event before it -gets published, including the *source* of the event. By default the event source -is just the event hub, but you can customise this to provide more relevant -information if you want. For example, if you were publishing from within Maya:: - - session.event_hub.publish(ftrack_api.event.base.Event( - topic='my-company.some-topic', - data={'key': 'value'}, - source={ - 'applicationId': 'maya' - } - )) - -Remember that all supplied information can be used by subscribers to filter -events so the more accurate the information the better. - -.. _handling_events/publishing/synchronously: - -Publish synchronously ---------------------- - -It is also possible to call :meth:`~hub.EventHub.publish` synchronously by -passing `synchronous=True`. In synchronous mode, only local handlers will be -called. The result from each called handler is collected and all the results -returned together in a list:: - - >>> import ftrack_api - >>> import ftrack_api.event.base - >>> - >>> def callback_a(event): - ... return 'A' - >>> - >>> def callback_b(event): - ... return 'B' - >>> - >>> session = ftrack_api.Session() - >>> session.event_hub.subscribe( - ... 'topic=test-synchronous', callback_a, priority=10 - ... ) - >>> session.event_hub.subscribe( - ... 'topic=test-synchronous', callback_b, priority=20 - ... ) - >>> results = session.event_hub.publish( - ... ftrack_api.event.base.Event(topic='test-synchronous'), - ... synchronous=True - ... ) - >>> print results - ['A', 'B'] - -.. _handling_events/publishing/handling_replies: - -Handling replies ----------------- - -When publishing an event it is also possible to pass a callable that will be -called with any :ref:`reply event ` -received in response to the published event. - -To do so, simply pass in a callable as the *on_reply* parameter:: - - def handle_reply(event): - print 'Got reply', event - - session.event_hub.publish( - ftrack_api.event.base.Event(topic='test-reply'), - on_reply=handle_reply - ) - -.. _handling_events/publishing/targeting: - -Targeting events ----------------- - -In addition to subscribers filtering events to receive, it is also possible to -give an event a specific target to help route it to the right subscriber. - -To do this, set the *target* value on the event to an :ref:`expression -`. The expression will filter against registered -:ref:`subscriber information -`. - -For example, if you have many subscribers listening for a event, but only want -one of those subscribers to get the event, you can target the event to the -subscriber using its registered subscriber id:: - - session.event_hub.publish( - ftrack_api.event.base.Event( - topic='my-company.topic', - data={'key': 'value'}, - target='id=my-custom-subscriber-id' - ) - ) - -.. _handling_events/expressions: - -Expressions -=========== - -An expression is used to filter against a data structure, returning whether the -structure fulfils the expression requirements. Expressions are currently used -for subscriptions when :ref:`subscribing to events -` and for targets when :ref:`publishing targeted -events `. - -The form of the expression is loosely groupings of 'key=value' with conjunctions -to join them. - -For example, a common expression for subscriptions is to filter against an event -topic:: - - 'topic=ftrack.location.component-added' - -However, you can also perform more complex filtering, including accessing -nested parameters:: - - 'topic=ftrack.location.component-added and data.locationId=london' - -.. note:: - - If the structure being tested does not have any value for the specified - key reference then it is treated as *not* matching. - -You can also use a single wildcard '*' at the end of any value for matching -multiple values. For example, the following would match all events that have a -topic starting with 'ftrack.':: - - 'topic=ftrack.*' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png deleted file mode 100644 index 7438cb52bebd5dd1c0c5814cd7e1d5f2fdf6a572..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7313 zcmV;C9B$)@P)KOJ#IsdX9YK2_oxysWkE--^>ub$^C=yJ%K%Nmb;2;67`L)eAY4kcqufz9M);<8V8!p z=o?uv&!ZhL4m4L)*EDnoGk5M=e08kfw{zRawV>DKmo+j(-X%mx?aklXJ34-A7N39K ze@&v_bdT*GepTS$Og#Q7dZ2A3&$bdOFRNkcPiuQ=E~c>2)91UHrmhjwBz$)TLRRNP zR|~zM+_`ToU4~HW(JdcWod`9orf&147T29$o58)^S`X+i%A<8<#YDSnTx70uIw!yKJOJ(=j z_ibhR<=Sq>C~i};bUo6;?ouuandaZ8P_5UzeB~e3mw({$&@~8xy5|X}E(ysG)Yu33 z^P8z^w=Q`m##C7lgr%;z)@FS_pP%=BWOp!mA=L-f6`cXLzq$XPX;6iXTY*)o}39>}p6n$tmGpgu~iD$?*G{5_Z4unBilu z{R3c;HHe3GZuSE_?i;+UDDaB`KBEh1ab&xL%>g>FpL*yUXeA1=I*7S=F^gkYe)=^C5*GB&v?!spa8xuMf* zmPG5fz_8}ut|zHpefHCZ^axtf=98)&#TF=C$aJV2A&Y}8G7U0VK62K9sP(*hO=SyT zR9I?eV`KUxTr8xuSV%nUDjYag=0Cak=Aq&`S?z?Aj^G=;P3bpi53r^6e&OpB!vrGa z*`NQ?)zu9)y6)eHot_&LHFS8#pCzZkw+l?Hilf2kSWq1OB06H={iEF2F>I1VAHnqH zp>GIT!5UKGlGTDBD5?z3VQYnV%Wn{}0^eMCC96?$s-{9h z24`5k)U}MvRH1y>n)wn-Rss+vMF+&!rW&CsSJ~9_l&rsYoAQ$JxEXE@QLP!H4|dvhV@zzFEit5UD1(uV%F(A@$o(wL6|ZUd->r zbmBpu!>XXN{(z{YkJ%+gRCPC8S*Q~2_|j}FcV)?ntPeOTtEDFJLi8iCSSJ>QMD?sJ zO*BFSuK70vn`y!n)k**os=<-E+eUTm?Zuh{Yh3E%Pb0Pa}Z-vat3=DLAFQibi8U4Gs`V6VN@w&x^%vX|Fp?m0USUnp?k!k3pgx!>WvAV4voV2?O zD_enq%~g2B>0??fsCH5^re$LdSuA9EHxCW`>!1f!&z|JS5d2mkLVid;rY9q9H7l_GM+q?69=#2NpVjScK5agadSJ$Z|vVmHHoh)~$3Yj)nC^yaP|b3lI~N-js~>WMo9FBO+oPZCFR5L zQs()w`8VKA7KCJ@a9n*WA*snU7^Rc*J3)Sh_~kEuDTMf&i>7Jv4d4I%_YL9OkmpoI zBq-(0d_C8M#O%7XvobX1UwWZ`Ro?VG4wKNfNXUjD&Y8fM&Z{)ShY4^1b6-UP zK~zj07y{P%vKfYPVuFOF-2Dw{fXrf6hiOgdD+oxCGz$7YB4O6n7c^@o0e+@BAgKus zTODj_=(Do+II zZz-Yxn+hPun1n4? zudyH>KYsklC!hS&g}CD5k3Tj9ud%{6k#uaH>Cn@~l%y?+o^j6F(3Gs@h9$<#l)Il7 z&W3JW;0Z0DWiJ)uGws`vt3$&c`ZMkrpZY2!D9Al%n0L#ih zBe3VTC_B0=QyJSeMHT_csc;0TXlU}P>B?C@cs84Zxz8yGeZuC>Va$$bI9Wh$rj^Gi zGs(vkYFv(#34Ja|o3jE{MI~SPIE<_QT(xhGZlv793VAWr>5ItBe%8;+n3T-`@6N(H z2~D83h{4;i4tZcaa9ri<*o>FgdWU@U=#f%NdSHGsgwqcUQca(49<3xVOsD1JCdk~- ziq1BfZd3Ch!@_wD6@ym<$^=a1eq@!jgrJnK>(b?hT8QJ;-8uc@sML&%^R2<6=)dd1JXZD41vkL z{w4J!0~6dJiU9gV&kcA6cPlLX_4IllSsy-p`0UxU&pa?c8N%t?irmFq!1{;B7xUZR zk!zg>``QLOq<|KRur>XLipBO8!8R@iMR-qlPA56$&e-87xAO|<3>1kwc2#6Wi-NR< z*i|S<-kTvHASXK{U~02{PDOvPLwe?pD?+mr}+JG3x$euNJ+AxO`^A>M9C@;?dC$q+th~3es|xK7+B_c~*o6f(*;u z_tFx0F0G-{F8e_gmhHKU&UQ$I7DJVK&K;h)<6e*|Bp-o88)~WVO zM3-A}8tzbSAm0X{uYwHr2Adl=FvsAl)vx=d&PN}8Wd09__Q3pP2tHff#7LCWMTlh@ z*)rD&d@gdq$2uVU`E6x=&x~r-kOz2DU55jLt&e z6Qt%{WJ*2B-4Go2LTldVW?VA#I7S43Z|C(Eiq zDvHz!vV1N`JdVOslooaXD+Ql{8KZKYWw3nTjW}saNZJ9PP+sEkf(s(+{ zGj}m;9|6=$!*0e2y5p(2AZe5yLEbNmtgY)Hr9$7Y_RY}&da2UT=>(^YK(FD?oxuw;%hI+4?g(dT#!gOh2E}zv|Yx9kQ%Fr1E9dok)b#+k0d@kGZaFo!=Q$( z6b@Gx)>=VcZqy?`HiuiG(Ww@<*(wP%tkfxAD`}u{EY^Dv`u^PgxSfVkl|Cusd7=on z+P6nX!vk;rI}32_)^W}M&)${&rqLwv`Gbc26UP65%`@^_jyGF=GWzb*Lb)wDht~cM zqh;;f3GseFg833Yc4yfgv>MBYG)REJ5};v(T4=Q5d8?XA(rlW^e5bE2)x)@_y1J^T ze_h>e)b%Gm{hb;>h7vf|za}A)hs~2n{=X@35K$02quodr;xbk21mOTe!_(JO07%QS zo;`bZ@7}#{zy0?1?b|Dy-~s-GLU;uN$HGuWiu{h_NdD{Zdv&Mo{ieUvQKU$b`sth^ zMT*p&I*JvmORgqxFkG4m# zVg<;6D?E-9CjH{(ciRpB%#NE{>Hg~0{wa4;0rJOJrQj<(K;U?o%C2eJr=>HS1sT)4 zQ$m#Aj{BJ7EK>9oAdd|fUj7t(g$D@yK22ll|5!5glrQ9dJOUQ1Ju0R&n7^qbgRZlk zv)aA_FhM1;&+gZ7K!j$nV9i`#U|>slMB=FR5tm+$mWfm|LcG9 zdgIX@gL>W^@LED{GJj)!3cFDOG7P`iei?j)2MSE8&i2pqPn)hdd*x!b+!~b_iA=3w zFTp38&kQI#I?~dk;={!3DA}u9H4(S5JH@iy_TR`%tv<@zhrU}$Ehe7Y)z$OS-r+^Fh z2g5Kf4Ab+Yiq2js#j` zodY{tuwe?=d$hc=Wz`z{D=|t7|4z44mUyX7A+;_{c9}(;JeBl3c%3sJUPs}jLdUIq zhaT_&6Kv zyvdaAuNs$1u<;wN&j66Vb3IgU zBuvTjfwAob)s=NS_LW>GC7)lgeKHt7sHEp3*+UA^*Yr_8w?2iw_$&{F<}Dfq3Xq{V z_o5JZg$D|dup9K47=OZ+!%Oa#)Mzogg$9URV8t&9w2p6EDa4uyLAbX%$q}Z)sK#C3uIN{3Yw(4RP zCr2wuU84vo>vmKn*C{kQX%j0}6qVaa==qC#UVRbfphtDJhYZWv=i9+ocz{4~gxshO z$4esFuKb$?0+Pxp9L*q@oA|tMKsl2GGPsFm?gMzpRxxC)8)K2Df4F}Q*P36{WZGmh zg0&t-$i-Ysa2Wz%2p4#SoTJe>%zhTp3?3mbAX)+q zL+}RxQm!*BAfuN58j$+yem>0Wpd&U#lavBvNd6)FBKQgq5SUn<**IcnE}4y9yRl!a zw5a65oxQH#_gYrP=?4H3zF7Qe+aJm%T2PXzc^y2Jp|$AG2IO86>PEBGo>NNU;7AnV zZL^ZkV)kW!I}wosQZ6G}%hth3{R8We^}G9@8nhJ*;ew9?U5hLwQ%m@OS=rs!E9Awb zX*n7S+~n| zP6Q-_bDHV+Q~J0dd@BzXo!lk#w8FT-KK2qc8_%m#)}O(D%RCRhF#rSts=GT+r!}I{ zyz3tDa}Fwcv&4!LT7+kN^8=K@@FJEY*K(Ig!ThDg8I&8PVGRdizT=Z+!f~?4m#j7e=S7vv{hKxSziMc1vdAISSGAEqekW zqnbh3XqHn3iG!*PMJ>O+av`D-&6_TX8yKP=E~2TR-H20SN-{Q!;ct)@Q@9l5Wps@Bb15##~-e!tl!Dul#Wt zpEVCVBDY6ka`xL@HW-1H9zcWNXRPlBY9pB-Yx^EpNIr&h0jtof(biFn9zK{#ExKLN zBxKi@xaNA2-s@wbFZ|gX=^S{ZTy!LyPU7Hmgn`!^t4IezmzqC&Oe#PI5&`nKFl}6k zggAVJ+QPG$;Kxg`S)IqIbXAIHhWpQPqBEr9$8H(JpyancduVa;p*{w{vU^UsPaLQ}hJ9Wv4l;5)} z`)yzSQb)03#R`y$6`xR6cj_ooq%OIdKnDj0|B}%Wn4(CL0wiEhdWDC=q#^~#BcO7* zJn0G#c=bnHqezhgWWW_3|7-8c6=b!QXnzs^z!&HTh>1Jxl#CFscqPJq@8OMzyKxtJ z!+utxgoJ=Z34sD73KX=Zf>uwf2VGBlw>c5RluA{lIv{nx`SUob23#KEI6mZ~I&RqT zZz1Hj-+t@=goifxGw4_vd84ez)4Z3zvGWZQq=-B5*Nbc;vmi9BLHq z%PL!!>EunecVy3p_7qaMZ%1(_B%DQXJ8Aet=DDJz=GLHX)9@kypEoB#KwqbGQu};S zHH31NZF!jeRy{weMdCX!ipZ5bDpDoEWeV4D>kLq=OQP^JlsYRV$0iC7RGuu+JV}@} z{c$a}OU(7J>SgMEp_5`63ShkeqKkH6J<5%c55uchul%3z&<1N`#n_qNjq;9At~5WP zsZrWu%5}fiS>6!F`aC*o>lbM5`w}I2nIQC?Fvmg)oLV_?P_$36)f)zddRZc~_ITizTh{QTS-%L6 zPZMj5tBju-q8j9yk{~i1?k*^5T0LI|K(7N^Kt_T?{l8hieAEezZY@QSCU`n*78_qHtAz!?B;s1n(Hdq@A{Rchg==cN1(~d5% z(w`t7pHbnAm8^|-0F?Bu3_V=mMO2(6vgPU-G%}C(g+=G4N!km>X2gbM32<>u_(}3! zoc3}tyj#59+yr?ZUS^OJr(E{hCO)IDlVwQOy1A~`*(wdMt(2X`oo?pdN^CP64^F0; zc}HSyPQn4^+ZpoVc>esk{}UeC;1)v8+bwuT4h357FU`%Ju>i$L6+&CPgoLMf%RLR@ z!ZO5tLxzQ1{-OxBXEvlu61s%UsI)Ds$4!23Zi1>dEY2?(wL(aexepq}aj|#bc!0G! zgOgn~s@D<{EW^`us)9JhFC`?R>f}6FUi(93HUp7~saD&MIyd0ivuFNKcxVG3A#wln(N>goIt9c2?j) zh>5iAD37c;z(HjcshuE-ZU7 zdw?}*^}w+;+2s_>MF)pyDBTU%2>JBsQ|}i8z+fGHb+GdHG)H$-=|z1AcYu9z2#iS~ zbX#2ZE6S<9D5KqiU{AR17(}eEp|K|O6xLuBYRu!nZW7@oAlp2)Gv$~qVVns`k?AC1 z8y%X=#JNLg8Hcjbxbk^(69mvIu^GV4i*nHdL47oM5uehWc?Ad$mD}LHIv+(}BRe)D zmn(Hqy@`~eEmPX)duy@5C}0{~L*Dks@J`*?iNH@gH(?{>lP6ESgaiXGloUeVzdezH zO9eJy;&-CqHxZl*Deq}XJ~^_V7II=Kis+1o06RS>tMr{QYoh}E17!g ztRQ>kUnyHBe`F5qF;xjCr6~>IiE}$PLf&rhSAZzc&OzGo-_U=mANU>r7i9VX2x7LW zopA3@t$7c()2)6tLjLf>4_|%t)u*3+`oRYuy!F;w89-m?KlM%hFKuxAS3|L(XU@xe rgJ69>%In+H&=0rLaU`. - -.. toctree:: - :maxdepth: 1 - - introduction - installing - tutorial - understanding_sessions - working_with_entities - querying - handling_events - caching - locations/index - example/index - api_reference/index - event_list - environment_variables - security_and_authentication - release/index - glossary - -****************** -Indices and tables -****************** - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst deleted file mode 100644 index 5e42621bee5..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst +++ /dev/null @@ -1,77 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _installing: - -********** -Installing -********** - -.. highlight:: bash - -Installation is simple with `pip `_:: - - pip install ftrack-python-api - -Building from source -==================== - -You can also build manually from the source for more control. First obtain a -copy of the source by either downloading the -`zipball `_ or -cloning the public repository:: - - git clone git@bitbucket.org:ftrack/ftrack-python-api.git - -Then you can build and install the package into your current Python -site-packages folder:: - - python setup.py install - -Alternatively, just build locally and manage yourself:: - - python setup.py build - -Building documentation from source ----------------------------------- - -To build the documentation from source:: - - python setup.py build_sphinx - -Then view in your browser:: - - file:///path/to/ftrack-python-api/build/doc/html/index.html - -Running tests against the source --------------------------------- - -With a copy of the source it is also possible to run the unit tests:: - - python setup.py test - -Dependencies -============ - -* `ftrack server `_ >= 3.3.11 -* `Python `_ >= 2.7, < 3 -* `Requests `_ >= 2, <3, -* `Arrow `_ >= 0.4.4, < 1, -* `termcolor `_ >= 1.1.0, < 2, -* `pyparsing `_ >= 2.0, < 3, -* `Clique `_ >= 1.2.0, < 2, -* `websocket-client `_ >= 0.40.0, < 1 - -Additional For building ------------------------ - -* `Sphinx `_ >= 1.2.2, < 2 -* `sphinx_rtd_theme `_ >= 0.1.6, < 1 -* `Lowdown `_ >= 0.1.0, < 2 - -Additional For testing ----------------------- - -* `Pytest `_ >= 2.3.5, < 3 -* `pytest-mock `_ >= 0.4, < 1, -* `pytest-catchlog `_ >= 1, <=2 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst deleted file mode 100644 index 63fe980749c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _introduction: - -************ -Introduction -************ - -This API allows developers to write :term:`Python` scripts that talk directly -with an ftrack server. The scripts can perform operations against that server -depending on granted permissions. - -With any API it is important to find the right balance between flexibility and -usefulness. If an API is too low level then everyone ends up writing boilerplate -code for common problems and usually in an non-uniform way making it harder to -share scripts with others. It's also harder to get started with such an API. -Conversely, an API that attempts to be too smart can often become restrictive -when trying to do more advanced functionality or optimise for performance. - -With this API we have tried to strike the right balance between these two, -providing an API that should be simple to use out-of-the-box, but also expose -more flexibility and power when needed. - -Nothing is perfect though, so please do provide feedback on ways that we can -continue to improve this API for your specific needs. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst deleted file mode 100644 index 97483221aae..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/configuring: - -********************* -Configuring locations -********************* - -To allow management of data by a location or retrieval of filesystem paths where -supported, a location instance needs to be configured in a session with an -:term:`accessor` and :term:`structure`. - -.. note:: - - The standard builtin locations require no further setup or configuration - and it is not necessary to read the rest of this section to use them. - -Before continuing, make sure that you are familiar with the general concepts -of locations by reading the :ref:`locations/overview`. - -.. _locations/configuring/manually: - -Configuring manually -==================== - -Locations can be configured manually when using a session by retrieving the -location and setting the appropriate attributes:: - - location = session.query('Location where name is "my.location"').one() - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 50 - -.. _locations/configuring/automatically: - -Configuring automatically -========================= - -Often the configuration of locations should be determined by developers -looking after the core pipeline and so ftrack provides a way for a plugin to -be registered to configure the necessary locations for each session. This can -then be managed centrally if desired. - -The configuration is handled through the standard events system via a topic -*ftrack.api.session.configure-location*. Set up an :ref:`event listener plugin -` as normal with a register function that -accepts a :class:`~ftrack_api.session.Session` instance. Then register a -callback against the relevant topic to configure locations at the appropriate -time:: - - import ftrack_api - import ftrack_api.entity.location - import ftrack_api.accessor.disk - import ftrack_api.structure.id - - - def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - location = session.query('Location where name is "my.location"').one() - ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 50 - - - def register(session): - '''Register plugin with *session*.''' - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) - -.. note:: - - If you expect the plugin to also be evaluated by the legacy API, remember - to :ref:`validate the arguments `. - -So long as the directory containing the plugin exists on your -:envvar:`FTRACK_EVENT_PLUGIN_PATH`, the plugin will run for each session -created and any configured locations will then remain configured for the -duration of that related session. - -Be aware that you can configure many locations in one plugin or have separate -plugins for different locations - the choice is entirely up to you! diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst deleted file mode 100644 index ac1eaba6494..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _developing/locations: - -********* -Locations -********* - -Learn how to access locations using the API and configure your own location -plugins. - -.. toctree:: - :maxdepth: 1 - - overview - tutorial - configuring diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst deleted file mode 100644 index 0a6ec171aa1..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst +++ /dev/null @@ -1,143 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/overview: - -******** -Overview -******** - -Locations provides a way to easily track and manage data (files, image sequences -etc.) using ftrack. - -With locations it is possible to see where published data is in the world and -also to transfer data automatically between different locations, even different -storage mechanisms, by defining a few simple :term:`Python` plugins. By keeping -track of the size of the data it also helps manage storage capacity better. In -addition, the intrinsic links to production information makes assigning work to -others and transferring only the relevant data much simpler as well as greatly -reducing the burden on those responsible for archiving finished work. - -Concepts -======== - -The system is implemented in layers using a few key concepts in order to provide -a balance between out of the box functionality and custom configuration. - -.. _locations/overview/locations: - -Locations ---------- - -Data locations can be varied in scope and meaning - a facility, a laptop, a -specific drive. As such, rather than place a hard limit on what can be -considered a location, ftrack simply requires that a location be identifiable by -a string and that string be unique to that location. - -A global company with facilities in many different parts of the world might -follow a location naming convention similar to the following: - - * 'ftrack.london.server01' - * 'ftrack.london.server02' - * 'ftrack.nyc.server01' - * 'ftrack.amsterdam.server01' - * '..' - -Whereas, for a looser setup, the following might suit better: - - * 'bjorns-workstation' - * 'fredriks-mobile' - * 'martins-laptop' - * 'cloud-backup' - -Availability ------------- - -When tracking data across several locations it is important to be able to -quickly find out where data is available and where it is not. As such, ftrack -provides simple mechanisms for retrieving information on the availability of a -:term:`component` in each location. - -For a single file, the availability with be either 0% or 100%. For containers, -such as file sequences, each file is tracked separately and the availability of -the container calculated as an overall percentage (e.g. 47%). - -.. _locations/overview/accessors: - -Accessors ---------- - -Due to the flexibility of what can be considered a location, the system must be -able to cope with locations that represent different ways of storing data. For -example, data might be stored on a local hard drive, a cloud service or even in -a database. - -In addition, the method of accessing that storage can change depending on -perspective - local filesystem, FTP, S3 API etc. - -To handle this, ftrack introduces the idea of an :term:`accessor` that provides -access to the data in a standard way. An accessor is implemented in -:term:`Python` following a set interface and can be configured at runtime to -provide relevant access to a location. - -With an accessor configured for a location, it becomes possible to not only -track data, but also manage it through ftrack by using the accessor to add and -remove data from the location. - -At present, ftrack includes a :py:class:`disk accessor -` for local filesystem access. More will be -added over time and developers are encouraged to contribute their own. - -.. _locations/overview/structure: - -Structure ---------- - -Another important consideration for locations is how data should be structured -in the location (folder structure and naming conventions). For example, -different facilities may want to use different folder structures, or different -storage mechanisms may use different paths for the data. - -For this, ftrack supports the use of a :term:`Python` structure plugin. This -plugin is called when adding a :term:`component` to a location in order to -determine the correct structure to use. - -.. note:: - - A structure plugin accepts an ftrack entity as its input and so can be - reused for generating general structures as well. For example, an action - callback could be implemented to create the base folder structure for some - selected shots by reusing a structure plugin. - -.. _locations/overview/resource_identifiers: - -Resource identifiers --------------------- - -When a :term:`component` can be linked to multiple locations it becomes -necessary to store information about the relationship on the link rather than -directly on the :term:`component` itself. The most important information is the -path to the data in that location. - -However, as seen above, not all locations may be filesystem based or accessed -using standard filesystem protocols. For this reason, and to help avoid -confusion, this *path* is referred to as a :term:`resource identifier` and no -limitations are placed on the format. Keep in mind though that accessors use -this information (retrieved from the database) in order to work out how to -access the data, so the format used must be compatible with all the accessors -used for any one location. For this reason, most -:term:`resource identifiers ` should ideally look like -relative filesystem paths. - -.. _locations/overview/resource_identifiers/transformer: - -Transformer -^^^^^^^^^^^ - -To further support custom formats for -:term:`resource identifiers `, it is also possible to -configure a resource identifier transformer plugin which will convert -the identifiers before they are stored centrally and after they are retrieved. - -A possible use case of this might be to store JSON encoded metadata about a path -in the database and convert this to an actual filesystem path on retrieval. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst deleted file mode 100644 index 4c5a6c0f136..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst +++ /dev/null @@ -1,193 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/tutorial: - -******** -Tutorial -******** - -This tutorial is a walkthrough on how you interact with Locations using the -ftrack :term:`API`. Before you read this tutorial, make sure you familiarize -yourself with the location concepts by reading the :ref:`locations/overview`. - -All examples assume you are using Python 2.x, have the :mod:`ftrack_api` -module imported and a :class:`session ` created. - -.. code-block:: python - - import ftrack_api - session = ftrack_api.Session() - -.. _locations/creating-locations: - -Creating locations -================== - -Locations can be created just like any other entity using -:meth:`Session.create `:: - - location = session.create('Location', dict(name='my.location')) - session.commit() - -.. note:: - Location names beginning with ``ftrack.`` are reserved for internal use. Do - not use this prefix for your location names. - -To create a location only if it doesn't already exist use the convenience -method :meth:`Session.ensure `. This will return -either an existing matching location or a newly created one. - -Retrieving locations -==================== - -You can retrieve existing locations using the standard session -:meth:`~ftrack_api.session.Session.get` and -:meth:`~ftrack_api.session.Session.query` methods:: - - # Retrieve location by unique id. - location_by_id = session.get('Location', 'unique-id') - - # Retrieve location by name. - location_by_name = session.query( - 'Location where name is "my.location"' - ).one() - -To retrieve all existing locations use a standard query:: - - all_locations = session.query('Location').all() - for existing_location in all_locations: - print existing_location['name'] - -Configuring locations -===================== - -At this point you have created a custom location "my.location" in the database -and have an instance to reflect that. However, the location cannot be used in -this session to manage data unless it has been configured. To configure a -location for the session, set the appropriate attributes for accessor and -structure:: - - import tempfile - import ftrack_api.accessor.disk - import ftrack_api.structure.id - - # Assign a disk accessor with *temporary* storage - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=tempfile.mkdtemp() - ) - - # Assign using ID structure. - location.structure = ftrack_api.structure.id.IdStructure() - - # Set a priority which will be used when automatically picking locations. - # Lower number is higher priority. - location.priority = 30 - -To learn more about how to configure locations automatically in a session, see -:ref:`locations/configuring`. - -.. note:: - - If a location is not configured in a session it can still be used as a - standard entity and to find out availability of components - -Using components with locations -=============================== - -The Locations :term:`API` tries to use sane defaults to stay out of your way. -When creating :term:`components `, a location is automatically picked -using :meth:`Session.pick_location `:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_a = session.create_component(path=component_path) - -To override, specify a location explicitly:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_b = session.create_component( - path=component_path, location=location - ) - -If you set the location to ``None``, the component will only be present in the -special origin location for the duration of the session:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_c = session.create_component(path=component_path, location=None) - -After creating a :term:`component` in a location, it can be added to another -location by calling :meth:`Location.add_component -` and passing the location to -use as the *source* location:: - - origin_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() - location.add_component(component_c, origin_location) - -To remove a component from a location use :meth:`Location.remove_component -`:: - - location.remove_component(component_b) - -Each location specifies whether to automatically manage data when adding or -removing components. To ensure that a location does not manage data, mixin the -relevant location mixin class before use:: - - import ftrack_api - import ftrack_api.entity.location - - ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - -Accessing paths -=============== - -The locations system is designed to help avoid having to deal with filesystem -paths directly. This is particularly important when you consider that a number -of locations won't provide any direct filesystem access (such as cloud storage). - -However, it is useful to still be able to get a filesystem path from locations -that support them (typically those configured with a -:class:`~ftrack_api.accessor.disk.DiskAccessor`). For example, you might need to -pass a filesystem path to another application or perform a copy using a faster -protocol. - -To retrieve the path if available, use :meth:`Location.get_filesystem_path -`:: - - print location.get_filesystem_path(component_c) - -Obtaining component availability -================================ - -Components in locations have a notion of availability. For regular components, -consisting of a single file, the availability would be either 0 if the -component is unavailable or 100 percent if the component is available in the -location. Composite components, like image sequences, have an availability -which is proportional to the amount of child components that have been added to -the location. - -For example, an image sequence might currently be in a state of being -transferred to :data:`test.location`. If half of the images are transferred, it -might be possible to start working with the sequence. To check availability use -the helper :meth:`Session.get_component_availability -` method:: - - print session.get_component_availability(component_c) - -There are also convenience methods on both :meth:`components -` and :meth:`locations -` for -retrieving availability as well:: - - print component_c.get_availability() - print location.get_component_availability(component_c) - -Location events -=============== - -If you want to receive event notifications when components are added to or -removed from locations, you can subscribe to the topics published, -:data:`ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC` or -:data:`ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC` and the callback -you want to be run. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst deleted file mode 100644 index 7a200529ab1..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst +++ /dev/null @@ -1,263 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _querying: - -******** -Querying -******** - -.. currentmodule:: ftrack_api.session - -The API provides a simple, but powerful query language in addition to iterating -directly over entity attributes. Using queries can often substantially speed -up your code as well as reduce the amount of code written. - -A query is issued using :meth:`Session.query` and returns a list of matching -entities. The query always has a single *target* entity type that the query -is built against. This means that you cannot currently retrieve back a list of -different entity types in one query, though using :ref:`projections -` does allow retrieving related entities of a different -type in one go. - -The syntax for a query is: - -.. code-block:: none - - select from where - -However, both the selection of projections and criteria are optional. This means -the most basic query is just to fetch all entities of a particular type, such as -all projects in the system:: - - projects = session.query('Project') - -A query always returns a :class:`~ftrack_api.query.QueryResult` instance that -acts like a list with some special behaviour. The main special behaviour is that -the actual query to the server is not issued until you iterate or index into the -query results:: - - for project in projects: - print project['name'] - -You can also explicitly call :meth:`~ftrack_api.query.QueryResult.all` on the -result set:: - - projects = session.query('Project').all() - -.. note:: - - This behaviour exists in order to make way for efficient *paging* and other - optimisations in future. - -.. _querying/criteria: - -Using criteria to narrow results -================================ - -Often you will have some idea of the entities you want to retrieve. In this -case you can optimise your code by not fetching more data than you need. To do -this, add criteria to your query:: - - projects = session.query('Project where status is active') - -Each criteria follows the form: - -.. code-block:: none - - - -You can inspect the entity type or instance to find out which :ref:`attributes -` are available to filter on for a particular -entity type. The list of :ref:`operators ` that can -be applied and the types of values they expect is listed later on. - -.. _querying/criteria/combining: - -Combining criteria ------------------- - -Multiple criteria can be applied in a single expression by joining them with -either ``and`` or ``or``:: - - projects = session.query( - 'Project where status is active and name like "%thrones"' - ) - -You can use parenthesis to control the precedence when compound criteria are -used (by default ``and`` takes precedence):: - - projects = session.query( - 'Project where status is active and ' - '(name like "%thrones" or full_name like "%thrones")' - ) - -.. _querying/criteria/relationships: - -Filtering on relationships --------------------------- - -Filtering on relationships is also intuitively supported. Simply follow the -relationship using a dotted notation:: - - tasks_in_project = session.query( - 'Task where project.id is "{0}"'.format(project['id']) - ) - -This works even for multiple strides across relationships (though do note that -excessive strides can affect performance):: - - tasks_completed_in_project = session.query( - 'Task where project.id is "{0}" and ' - 'status.type.name is "Done"' - .format(project['id']) - ) - -The same works for collections (where each entity in the collection is compared -against the subsequent condition):: - - import arrow - - tasks_with_time_logged_today = session.query( - 'Task where timelogs.start >= "{0}"'.format(arrow.now().floor('day')) - ) - -In the above query, each *Task* that has at least one *Timelog* with a *start* -time greater than the start of today is returned. - -When filtering on relationships, the conjunctions ``has`` and ``any`` can be -used to specify how the criteria should be applied. This becomes important when -querying using multiple conditions on collection relationships. The relationship -condition can be written against the following form:: - - () - -For optimal performance ``has`` should be used for scalar relationships when -multiple conditions are involved. For example, to find notes by a specific -author when only name is known:: - - notes_written_by_jane_doe = session.query( - 'Note where author has (first_name is "Jane" and last_name is "Doe")' - ) - -This query could be written without ``has``, giving the same results:: - - notes_written_by_jane_doe = session.query( - 'Note where author.first_name is "Jane" and author.last_name is "Doe"' - ) - -``any`` should be used for collection relationships. For example, to find all -projects that have at least one metadata instance that has `key=some_key` -and `value=some_value` the query would be:: - - projects_where_some_key_is_some_value = session.query( - 'Project where metadata any (key=some_key and value=some_value)' - ) - -If the query was written without ``any``, projects with one metadata matching -*key* and another matching the *value* would be returned. - -``any`` can also be used to query for empty relationship collections:: - - users_without_timelogs = session.query( - 'User where not timelogs any ()' - ) - -.. _querying/criteria/operators: - -Supported operators -------------------- - -This is the list of currently supported operators: - -+--------------+----------------+----------------------------------------------+ -| Operators | Description | Example | -+==============+================+==============================================+ -| = | Exactly equal. | name is "martin" | -| is | | | -+--------------+----------------+----------------------------------------------+ -| != | Not exactly | name is_not "martin" | -| is_not | equal. | | -+--------------+----------------+----------------------------------------------+ -| > | Greater than | start after "2015-06-01" | -| after | exclusive. | | -| greater_than | | | -+--------------+----------------+----------------------------------------------+ -| < | Less than | end before "2015-06-01" | -| before | exclusive. | | -| less_than | | | -+--------------+----------------+----------------------------------------------+ -| >= | Greater than | bid >= 10 | -| | inclusive. | | -+--------------+----------------+----------------------------------------------+ -| <= | Less than | bid <= 10 | -| | inclusive. | | -+--------------+----------------+----------------------------------------------+ -| in | One of. | status.type.name in ("In Progress", "Done") | -+--------------+----------------+----------------------------------------------+ -| not_in | Not one of. | status.name not_in ("Omitted", "On Hold") | -+--------------+----------------+----------------------------------------------+ -| like | Matches | name like "%thrones" | -| | pattern. | | -+--------------+----------------+----------------------------------------------+ -| not_like | Does not match | name not_like "%thrones" | -| | pattern. | | -+--------------+----------------+----------------------------------------------+ -| has | Test scalar | author has (first_name is "Jane" and | -| | relationship. | last_name is "Doe") | -+--------------+----------------+----------------------------------------------+ -| any | Test collection| metadata any (key=some_key and | -| | relationship. | value=some_value) | -+--------------+----------------+----------------------------------------------+ - -.. _querying/projections: - -Optimising using projections -============================ - -In :ref:`understanding_sessions` we mentioned :ref:`auto-population -` of attribute values on access. This -meant that when iterating over a lot of entities and attributes a large number -of queries were being sent to the server. Ultimately, this can cause your code -to run slowly:: - - >>> projects = session.query('Project') - >>> for project in projects: - ... print( - ... # Multiple queries issued here for each attribute accessed for - ... # each project in the loop! - ... '{project[full_name]} - {project[status][name]})' - ... .format(project=project) - ... ) - - -Fortunately, there is an easy way to optimise. If you know what attributes you -are interested in ahead of time you can include them in your query string as -*projections* in order to fetch them in one go:: - - >>> projects = session.query( - ... 'select full_name, status.name from Project' - ... ) - >>> for project in projects: - ... print( - ... # No additional queries issued here as the values were already - ... # loaded by the above query! - ... '{project[full_name]} - {project[status][name]})' - ... .format(project=project) - ... ) - -Notice how this works for related entities as well. In the example above, we -also fetched the name of each *Status* entity attached to a project in the same -query, which meant that no further queries had to be issued when accessing those -nested attributes. - -.. note:: - - There are no arbitrary limits to the number (or depth) of projections, but - do be aware that excessive projections can ultimately result in poor - performance also. As always, it is about choosing the right tool for the - job. - -You can also customise the -:ref:`working_with_entities/entity_types/default_projections` to use for each -entity type when none are specified in the query string. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst deleted file mode 100644 index 0eef0b7407a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _release: - -*************************** -Release and migration notes -*************************** - -Find out information about what has changed between versions and any important -migration notes to be aware of when switching to a new version. - -.. toctree:: - :maxdepth: 1 - - release_notes - migration - migrating_from_old_api diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst deleted file mode 100644 index 699ccf224a7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst +++ /dev/null @@ -1,613 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _release/migrating_from_old_api: - -********************** -Migrating from old API -********************** - -.. currentmodule:: ftrack_api.session - -Why a new API? -============== - -With the introduction of Workflows, ftrack is capable of supporting a greater -diversity of industries. We're enabling teams to closely align the system with -their existing practices and naming conventions, resulting in a tool that feels -more natural and intuitive. The old API was locked to specific workflows, making -it impractical to support this new feature naturally. - -We also wanted this new flexibility to extend to developers, so we set about -redesigning the API to fully leverage the power in the system. And while we had -the wrenches out, we figured why not go that extra mile and build in some of the -features that we see developers having to continually implement in-house across -different companies - features such as caching and support for custom pipeline -extensions. In essence, we decided to build the API that, as pipeline -developers, we had always wanted from our production tracking and asset -management systems. We think we succeeded, and we hope you agree. - -Installing -========== - -Before, you used to download the API package from your ftrack instance. With -each release of the new API we make it available on :term:`PyPi`, and -installing is super simple: - -.. code-block:: none - - pip install ftrack-python-api - -Before installing, it is always good to check the latest -:ref:`release/release_notes` to see which version of the ftrack server is -required. - -.. seealso:: :ref:`installing` - -Overview -======== - -An API needs to be approachable, so we built the new API to feel -intuitive and familiar. We bundle all the core functionality into one place – a -session – with consistent methods for interacting with entities in the system:: - - import ftrack_api - session = ftrack_api.Session() - -The session is responsible for loading plugins and communicating with the ftrack -server and allows you to use multiple simultaneous sessions. You will no longer -need to explicitly call :meth:`ftrack.setup` to load plugins. - -The core methods are straightforward: - -Session.create - create a new entity, like a new version. -Session.query - fetch entities from the server using a powerful query language. -Session.delete - delete existing entities. -Session.commit - commit all changes in one efficient call. - -.. note:: - - The new API batches create, update and delete operations by default for - efficiency. To synchronise local changes with the server you need to call - :meth:`Session.commit`. - -In addition all entities in the API now act like simple Python dictionaries, -with some additional helper methods where appropriate. If you know a little -Python (or even if you don't) getting up to speed should be a breeze:: - - >>> print user.keys() - ['first_name', 'last_name', 'email', ...] - >>> print user['email'] - 'old@example.com' - >>> user['email'] = 'new@example.com' - -And of course, relationships between entities are reflected in a natural way as -well:: - - new_timelog = session.create('Timelog', {...}) - task['timelogs'].append(new_timelog) - -.. seealso :: :ref:`tutorial` - -The new API also makes use of caching in order to provide more efficient -retrieval of data by reducing the number of calls to the remote server. - -.. seealso:: :ref:`caching` - -Open source and standard code style -=================================== - -The new API is open source software and developed in public at -`Bitbucket `_. We welcome you -to join us in the development and create pull requests there. - -In the new API, we also follow the standard code style for Python, -:term:`PEP-8`. This means that you will now find that methods and variables are -written using ``snake_case`` instead of ``camelCase``, amongst other things. - -Package name -============ - -The new package is named :mod:`ftrack_api`. By using a new package name, we -enable you to use the old API and the new side-by-side in the same process. - -Old API:: - - import ftrack - -New API:: - - import ftrack_api - -Specifying your credentials -=========================== - -The old API used three environment variables to authenticate with your ftrack -instance. While these continue to work as before, you now also have -the option to specify them when initializing the session:: - - >>> import ftrack_api - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -In the examples below, will assume that you have imported the package and -created a session. - -.. seealso:: - - * :ref:`environment_variables` - * :ref:`tutorial` - - -Querying objects -================ - -The old API relied on predefined methods for querying objects and constructors -which enabled you to get an entity by it's id or name. - -Old API:: - - project = ftrack.getProject('dev_tutorial') - task = ftrack.Task('8923b7b3-4bf0-11e5-8811-3c0754289fd3') - user = ftrack.User('jane') - -New API:: - - project = session.query('Project where name is "dev_tutorial"').one() - task = session.get('Task', '8923b7b3-4bf0-11e5-8811-3c0754289fd3') - user = session.query('User where username is "jane"').one() - -While the new API can be a bit more verbose for simple queries, it is much more -powerful and allows you to filter on any field and preload related data:: - - tasks = session.query( - 'select name, parent.name from Task ' - 'where project.full_name is "My Project" ' - 'and status.type.short is "DONE" ' - 'and not timelogs any ()' - ).all() - -The above fetches all tasks for “My Project” that are done but have no timelogs. -It also pre-fetches related information about the tasks parent – all in one -efficient query. - -.. seealso:: :ref:`querying` - -Creating objects -================ - -In the old API, you create objects using specialized methods, such as -:meth:`ftrack.createProject`, :meth:`Project.createSequence` and -:meth:`Task.createShot`. - -In the new API, you can create any object using :meth:`Session.create`. In -addition, there are a few helper methods to reduce the amount of boilerplate -necessary to create certain objects. Don't forget to call :meth:`Session.commit` -once you have issued your create statements to commit your changes. - -As an example, let's look at populating a project with a few entities. - -Old API:: - - project = ftrack.getProject('migration_test') - - # Get default task type and status from project schema - taskType = project.getTaskTypes()[0] - taskStatus = project.getTaskStatuses(taskType)[0] - - sequence = project.createSequence('001') - - # Create five shots with one task each - for shot_number in xrange(10, 60, 10): - shot = sequence.createShot( - '{0:03d}'.format(shot_number) - ) - shot.createTask( - 'Task name', - taskType, - taskStatus - ) - - -New API:: - - project = session.query('Project where name is "migration_test"').one() - - # Get default task type and status from project schema - project_schema = project['project_schema'] - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - # Create sequence - sequence = session.create('Sequence', { - 'name': '001', - 'parent': project - }) - - # Create five shots with one task each - for shot_number in xrange(10, 60, 10): - shot = session.create('Shot', { - 'name': '{0:03d}'.format(shot_number), - 'parent': sequence, - 'status': default_shot_status - }) - session.create('Task', { - 'name': 'Task name', - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - # Commit all changes to the server. - session.commit() - -If you test the example above, one thing you might notice is that the new API -is much more efficient. Thanks to the transaction-based architecture in the new -API only a single call to the server is required to create all the objects. - -.. seealso:: :ref:`working_with_entities/creating` - -Updating objects -================ - -Updating objects in the new API works in a similar way to the old API. Instead -of using the :meth:`set` method on objects, you simply set the key of the -entity to the new value, and call :meth:`Session.commit` to persist the -changes to the database. - -The following example adjusts the duration and comment of a timelog for a -user using the old and new API, respectively. - -Old API:: - - import ftrack - - user = ftrack.User('john') - user.set('email', 'john@example.com') - -New API:: - - import ftrack_api - session = ftrack_api.Session() - - user = session.query('User where username is "john"').one() - user['email'] = 'john@example.com' - session.commit() - -.. seealso:: :ref:`working_with_entities/updating` - - -Date and datetime attributes -============================ - -In the old API, date and datetime attributes where represented using a standard -:mod:`datetime` object. In the new API we have opted to use the :term:`arrow` -library instead. Datetime attributes are represented in the server timezone, -but with the timezone information stripped. - -Old API:: - - >>> import datetime - - >>> task_old_api = ftrack.Task(task_id) - >>> task_old_api.get('startdate') - datetime.datetime(2015, 9, 2, 0, 0) - - >>> # Updating a datetime attribute - >>> task_old_api.set('startdate', datetime.date.today()) - -New API:: - - >>> import arrow - - >>> task_new_api = session.get('Task', task_id) - >>> task_new_api['start_date'] - - - >>> # In the new API, utilize the arrow library when updating a datetime. - >>> task_new_api['start_date'] = arrow.utcnow().floor('day') - >>> session.commit() - -Custom attributes -================= - -In the old API, custom attributes could be retrieved from an entity by using -the methods :meth:`get` and :meth:`set`, like standard attributes. In the new -API, custom attributes can be written and read from entities using the -``custom_attributes`` property, which provides a dictionary-like interface. - -Old API:: - - >>> task_old_api = ftrack.Task(task_id) - >>> task_old_api.get('my_custom_attribute') - - >>> task_old_api.set('my_custom_attribute', 'My new value') - - -New API:: - - >>> task_new_api = session.get('Task', task_id) - >>> task_new_api['custom_attributes']['my_custom_attribute'] - - - >>> task_new_api['custom_attributes']['my_custom_attribute'] = 'My new value' - -For more information on working with custom attributes and existing -limitations, please see: - -.. seealso:: - - :ref:`example/custom_attribute` - - -Using both APIs side-by-side -============================ - -With so many powerful new features and the necessary support for more flexible -workflows, we chose early on to not limit the new API design by necessitating -backwards compatibility. However, we also didn't want to force teams using the -existing API to make a costly all-or-nothing switchover. As such, we have made -the new API capable of coexisting in the same process as the old API:: - - import ftrack - import ftrack_api - -In addition, the old API will continue to be supported for some time, but do -note that it will not support the new `Workflows -`_ and will not have new features back ported -to it. - -In the first example, we obtain a task reference using the old API and -then use the new API to assign a user to it:: - - import ftrack - import ftrack_api - - # Create session for new API, authenticating using envvars. - session = ftrack_api.Session() - - # Obtain task id using old API - shot = ftrack.getShot(['migration_test', '001', '010']) - task = shot.getTasks()[0] - task_id = task.getId() - - user = session.query( - 'User where username is "{0}"'.format(session.api_user) - ).one() - session.create('Appointment', { - 'resource': user, - 'context_id': task_id, - 'type': 'assignment' - }) - -The second example fetches a version using the new API and uploads and sets a -thumbnail using the old API:: - - import arrow - import ftrack - - # fetch a version published today - version = session.query( - 'AssetVersion where date >= "{0}"'.format( - arrow.now().floor('day') - ) - ).first() - - # Create a thumbnail using the old api. - thumbnail_path = '/path/to/thumbnail.jpg' - version_old_api = ftrack.AssetVersion(version['id']) - thumbnail = version_old_api.createThumbnail(thumbnail_path) - - # Also set the same thumbnail on the task linked to the version. - task_old_api = ftrack.Task(version['task_id']) - task_old_api.setThumbnail(thumbnail) - -.. note:: - - It is now possible to set thumbnails using the new API as well, for more - info see :ref:`example/thumbnail`. - -Plugin registration -------------------- - -To make event and location plugin register functions work with both old and new -API the function should be updated to validate the input arguments. For old -plugins the register method should validate that the first input is of type -``ftrack.Registry``, and for the new API it should be of type -:class:`ftrack_api.session.Session`. - -If the input parameter is not validated, a plugin might be mistakenly -registered twice, since both the new and old API will look for plugins the -same directories. - -.. seealso:: - - :ref:`ftrack:release/migration/3.0.29/developer_notes/register_function` - - -Example: publishing a new version -================================= - -In the following example, we look at migrating a script which publishes a new -version with two components. - -Old API:: - - # Query a shot and a task to create the asset against. - shot = ftrack.getShot(['dev_tutorial', '001', '010']) - task = shot.getTasks()[0] - - # Create new asset. - asset = shot.createAsset(name='forest', assetType='geo') - - # Create a new version for the asset. - version = asset.createVersion( - comment='Added more leaves.', - taskid=task.getId() - ) - - # Get the calculated version number. - print version.getVersion() - - # Add some components. - previewPath = '/path/to/forest_preview.mov' - previewComponent = version.createComponent(path=previewPath) - - modelPath = '/path/to/forest_mode.ma' - modelComponent = version.createComponent(name='model', path=modelPath) - - # Publish. - asset.publish() - - # Add thumbnail to version. - thumbnail = version.createThumbnail('/path/to/forest_thumbnail.jpg') - - # Set thumbnail on other objects without duplicating it. - task.setThumbnail(thumbnail) - -New API:: - - # Query a shot and a task to create the asset against. - shot = session.query( - 'Shot where project.name is "dev_tutorial" ' - 'and parent.name is "001" and name is "010"' - ).one() - task = shot['children'][0] - - # Create new asset. - asset_type = session.query('AssetType where short is "geo"').first() - asset = session.create('Asset', { - 'parent': shot, - 'name': 'forest', - 'type': asset_type - }) - - # Create a new version for the asset. - status = session.query('Status where name is "Pending"').one() - version = session.create('AssetVersion', { - 'asset': asset, - 'status': status, - 'comment': 'Added more leaves.', - 'task': task - }) - - # In the new API, the version number is not set until we persist the changes - print 'Version number before commit: {0}'.format(version['version']) - session.commit() - print 'Version number after commit: {0}'.format(version['version']) - - # Add some components. - preview_path = '/path/to/forest_preview.mov' - preview_component = version.create_component(preview_path, location='auto') - - model_path = '/path/to/forest_mode.ma' - model_component = version.create_component(model_path, { - 'name': 'model' - }, location='auto') - - # Publish. Newly created version defaults to being published in the new api, - # but if set to false you can update it by setting the key on the version. - version['is_published'] = True - - # Persist the changes - session.commit() - - # Add thumbnail to version. - thumbnail = version.create_thumbnail( - '/path/to/forest_thumbnail.jpg' - ) - - # Set thumbnail on other objects without duplicating it. - task['thumbnail'] = thumbnail - session.commit() - - -Workarounds for missing convenience methods -=========================================== - -Query object by path --------------------- - -In the old API, there existed a convenience methods to get an object by -referencing the path (i.e object and parent names). - -Old API:: - - shot = ftrack.getShot(['dev_tutorial', '001', '010']) - -New API:: - - shot = session.query( - 'Shot where project.name is "dev_tutorial" ' - 'and parent.name is "001" and name is "010"' - ) - - -Retrieving an object's parents ------------------------------- - -To retrieve a list of an object's parents, you could call the method -:meth:`getParents` in the old API. Currently, it is not possible to fetch this -in a single call using the new API, so you will have to traverse the ancestors -one-by-one and fetch each object's parent. - -Old API:: - - parents = task.getParents() - -New API:: - - parents = [] - for item in task['link'][:-1]: - parents.append(session.get(item['type'], item['id'])) - -Note that link includes the task itself so `[:-1]` is used to only retreive the -parents. To learn more about the `link` attribute, see -:ref:`Using link attributes example`. - -Limitations in the current version of the API -============================================= - -The new API is still quite young and in active development and there are a few -limitations currently to keep in mind when using it. - -Missing schemas ---------------- - -The following entities are as of the time of writing not currently available -in the new API. Let us know if you depend on any of them. - - * Booking - * Calendar and Calendar Type - * Dependency - * Manager and Manager Type - * Phase - * Role - * Task template - * Temp data - -Action base class ------------------ -There is currently no helper class for creating actions using the new API. We -will add one in the near future. - -In the meantime, it is still possible to create actions without the base class -by listening and responding to the -:ref:`ftrack:developing/events/list/ftrack.action.discover` and -:ref:`ftrack:developing/events/list/ftrack.action.launch` events. - -Legacy location ---------------- - -The ftrack legacy disk locations utilizing the -:class:`InternalResourceIdentifierTransformer` has been deprecated. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst deleted file mode 100644 index 1df2211f96c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _release/migration: - -*************** -Migration notes -*************** - -.. note:: - - Migrating from the old ftrack API? Read the dedicated :ref:`guide - `. - -Migrate to upcoming 2.0.0 -========================= - -.. _release/migration/2.0.0/event_hub: - -Default behavior for connecting to event hub --------------------------------------------- - -The default behavior for the `ftrack_api.Session` class will change -for the argument `auto_connect_event_hub`, the default value will -switch from True to False. In order for code relying on the event hub -to continue functioning as expected you must modify your code -to explicitly set the argument to True or that you manually call -`session.event_hub.connect()`. - -.. note:: - If you rely on the `ftrack.location.component-added` or - `ftrack.location.component-removed` events to further process created - or deleted components remember that your session must be connected - to the event hub for the events to be published. - - -Migrate to 1.0.3 -================ - -.. _release/migration/1.0.3/mutating_dictionary: - -Mutating custom attribute dictionary ------------------------------------- - -Custom attributes can no longer be set by mutating entire dictionary:: - - # This will result in an error. - task['custom_attributes'] = dict(foo='baz', bar=2) - session.commit() - -Instead the individual values should be changed:: - - # This works better. - task['custom_attributes']['foo'] = 'baz' - task['custom_attributes']['bar'] = 2 - session.commit() - -Migrate to 1.0.0 -================ - -.. _release/migration/1.0.0/chunked_transfer: - -Chunked accessor transfers --------------------------- - -Data transfers between accessors is now buffered using smaller chunks instead of -all data at the same time. Included accessor file representations such as -:class:`ftrack_api.data.File` and :class:`ftrack_api.accessor.server.ServerFile` -are built to handle that. If you have written your own accessor and file -representation you may have to update it to support multiple reads using the -limit parameter and multiple writes. - -Migrate to 0.2.0 -================ - -.. _release/migration/0.2.0/new_api_name: - -New API name ------------- - -In this release the API has been renamed from `ftrack` to `ftrack_api`. This is -to allow both the old and new API to co-exist in the same environment without -confusion. - -As such, any scripts using this new API need to be updated to import -`ftrack_api` instead of `ftrack`. For example: - -**Previously**:: - - import ftrack - import ftrack.formatter - ... - -**Now**:: - - import ftrack_api - import ftrack_api.formatter - ... diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst deleted file mode 100644 index d7978ac0b86..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst +++ /dev/null @@ -1,1478 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _release/release_notes: - -************* -Release Notes -************* - -.. currentmodule:: ftrack_api.session - -.. release:: 1.8.2 - :date: 2020-01-14 - - .. change:: fixed - :tag: Test - - test_ensure_entity_with_non_string_data_types test fails due to missing parents. - - .. change:: changed - :tags: session - - Use WeakMethod when registering atexit handler to prevent memory leak. - -.. release:: 1.8.1 - :date: 2019-10-30 - - .. change:: changed - :tags: Location - - Increase chunk size for file operations to 1 Megabyte. - This value can now also be set from the environment variable: - - :envvar:`FTRACK_API_FILE_CHUNK_SIZE` - - .. change:: new - :tag: setup - - Add check for correct python version when installing with pip. - - .. change:: new - :tags: Notes - - Add support for note labels in create_note helper method. - - .. change:: changed - :tags: session - - Ensure errors from server are fully reported with stack trace. - -.. release:: 1.8.0 - :date: 2019-02-21 - - .. change:: fixed - :tags: documentation - - Event description component-removed report component-added event signature. - - .. change:: new - :tags: session, attribute - - Add new scalar type `object` to factory. - - .. change:: new - :tags: session, attribute - - Add support for list of `computed` attributes as part of schema - definition. A computed attribute is derived on the server side, and can - be time dependentant and differ between users. As such a computed - attribute is not suitable for long term encoding and will not be encoded - with the `persisted_only` stragey. - - .. change:: changed - - The `delayed_job` method has been deprecated in favour of a direct - `Session.call`. See :ref:`example/sync_with_ldap` for example - usage. - - .. change:: changed - - Private method :meth:`Session._call` has been converted to - a public method, :meth:`Session.call`. - - The private method will continue to work, but a pending deprecation - warning will be issued when used. The private method will be removed - entirely in version 2.0. - - .. change:: changed - :tags: session, events - - Event server connection error is too generic, - the actual error is now reported to users. - -.. release:: 1.7.1 - :date: 2018-11-13 - - .. change:: fixed - :tags: session, events - - Meta events for event hub connect and disconnect does not include - source. - - .. change:: fixed - :tags: session, location - - Missing context argument to - :meth:`ResourceIdentifierTransformer.decode` - in :meth:`Location.get_resource_identifier`. - -.. release:: 1.7.0 - :date: 2018-07-27 - - .. change:: new - :tags: session, events - - Added new events :ref:`event_list/ftrack.api.session.ready` and - :ref:`event_list/ftrack.api.session.reset` which can be used to perform - operations after the session is ready or has been reset, respectively. - - .. change:: changed - - Private method :meth:`Session._entity_reference` has been converted to - a public method, :meth:`Session.entity_reference`. - - The private method will continue to work, but a pending deprecation - warning will be issued when used. The private method will be removed - entirely in version 2.0. - - .. change:: fixed - :tags: session, events - - :meth:`Session.close` raises an exception if event hub was explicitly - connected after session initialization. - -.. release:: 1.6.0 - :date: 2018-05-17 - - .. change:: new - :tags: depreciation, events - - In version 2.0.0 of the `ftrack-python-api` the default behavior for - the :class:`Session` class will change for the argument - *auto_connect_event_hub*, the default value will switch from *True* to - *False*. - - A warning will now be emitted if async events are published or - subscribed to without *auto_connect_event_hub* has not explicitly been - set to *True*. - - .. seealso:: :ref:`release/migration/2.0.0/event_hub`. - - .. change:: fixed - :tags: documentation - - Event payload not same as what is being emitted for - :ref:`event_list/ftrack.location.component-added` and - :ref:`event_list/ftrack.location.component-removed`. - - .. change:: fixed - :tags: events - - Pyparsing is causing random errors in a threaded environment. - -.. release:: 1.5.0 - :date: 2018-04-19 - - .. change:: fixed - :tags: session, cache - - Cached entities not updated correctly when fetched in a nested - query. - -.. release:: 1.4.0 - :date: 2018-02-05 - - .. change:: fixed - :tags: session, cache - - Collection attributes not merged correctly when fetched from - server. - - .. change:: new - :tags: session, user, api key - - New function :meth:`ftrack_api.session.Session.reset_remote` allows - resetting of attributes to their default value. A convenience method - for resetting a users api key utalizing this was also added - :meth:`ftrack_api.entity.user.User.reset_api_key`. - - .. seealso:: :ref:`working_with_entities/resetting` - - .. change:: new - - Add support for sending out invitation emails to users. - See :ref:`example/invite_user` for example usage. - - .. change:: changed - :tags: cache, performance - - Entities fetched from cache are now lazily merged. Improved - performance when dealing with highly populated caches. - -.. release:: 1.3.3 - :date: 2017-11-16 - - - .. change:: new - :tags: users, ldap - - Add support for triggering a synchronization of - users between ldap and ftrack. See :ref:`example/sync_with_ldap` - for example usage. - - .. note:: - - This requires that you run ftrack 3.5.10 or later. - - .. change:: fixed - :tags: metadata - - Not possible to set metadata on creation. - -.. release:: 1.3.2 - :date: 2017-09-18 - - - .. change:: new - :tags: task template - - Added example for managing task templates through the API. See - :ref:`example/task_template` for example usage. - - .. change:: fixed - :tags: custom attributes - - Not possible to set hierarchical custom attributes on an entity that - has not been committed. - - .. change:: fixed - :tags: custom attributes - - Not possible to set custom attributes on an `Asset` that has not been - committed. - - .. change:: fixed - :tags: metadata - - Not possible to set metadata on creation. - -.. release:: 1.3.1 - :date: 2017-07-21 - - .. change:: fixed - :tags: session, events - - Calling disconnect on the event hub is slow. - -.. release:: 1.3.0 - :date: 2017-07-17 - - .. change:: new - :tags: session - - Support using a :class:`Session` as a context manager to aid closing of - session after use:: - - with ftrack_api.Session() as session: - # Perform operations with session. - - .. change:: new - :tags: session - - :meth:`Session.close` automatically called on Python exit if session not - already closed. - - .. change:: new - :tags: session - - Added :meth:`Session.close` to properly close a session's connections to - the server(s) as well as ensure event listeners are properly - unsubscribed. - - .. change:: new - - Added :exc:`ftrack_api.exception.ConnectionClosedError` to represent - error caused when trying to access servers over closed connection. - -.. release:: 1.2.0 - :date: 2017-06-16 - - .. change:: changed - :tags: events - - Updated the websocket-client dependency to version >= 0.40.0 to allow - for http proxies. - - .. change:: fixed - :tags: documentation - - The :ref:`example/publishing` example incorrectly stated that a - location would be automatically picked if the *location* keyword - argument was omitted. - -.. release:: 1.1.1 - :date: 2017-04-27 - - .. change:: fixed - :tags: custom attributes - - Cannot use custom attributes for `Asset` in ftrack versions prior to - `3.5.0`. - - .. change:: fixed - :tags: documentation - - The :ref:`example ` - section for managing `text` custom attributes is not correct. - -.. release:: 1.1.0 - :date: 2017-03-08 - - .. change:: new - :tags: server location, thumbnail - - Added method :meth:`get_thumbnail_url() ` - to server location, which can be used to retrieve a thumbnail URL. - See :ref:`example/thumbnail/url` for example usage. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on how to manage entity - links from the API. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on - how to manage custom attribute configurations from the API. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on how to use - `SecurityRole` and `UserSecurityRole` to manage security roles for - users. - - .. change:: new - :tags: documentation - - Added :ref:`examples ` to show how - to list a user's assigned tasks and all users assigned to a task. - - .. change:: changed - :tags: session, plugins - - Added *plugin_arguments* to :class:`Session` to allow passing of - optional keyword arguments to discovered plugin register functions. Only - arguments defined in a plugin register function signature are passed so - existing plugin register functions do not need updating if the new - functionality is not desired. - - .. change:: fixed - :tags: documentation - - The :ref:`example/project` example can be confusing since the project - schema may not contain the necessary object types. - - .. change:: fixed - :tags: documentation - - Query tutorial article gives misleading information about the ``has`` - operator. - - .. change:: fixed - :tags: session - - Size is not set on sequence components when using - :meth:`Session.create_component`. - -.. release:: 1.0.4 - :date: 2017-01-13 - - .. change:: fixed - :tags: custom attributes - - Custom attribute values cannot be set on entities that are not - persisted. - - .. change:: fixed - :tags: events - - `username` in published event's source data is set to the operating - system user and not the API user. - -.. release:: 1.0.3 - :date: 2017-01-04 - - .. change:: changed - :tags: session, custom attributes - - Increased performance of custom attributes and better support for - filtering when using a version of ftrack that supports non-sparse - attribute values. - - .. change:: changed - :tags: session, custom attributes - - Custom attributes can no longer be set by mutating entire dictionary. - - .. seealso:: :ref:`release/migration/1.0.3/mutating_dictionary`. - -.. release:: 1.0.2 - :date: 2016-11-17 - - .. change:: changed - :tags: session - - Removed version restriction for higher server versions. - -.. release:: 1.0.1 - :date: 2016-11-11 - - .. change:: fixed - - :meth:`EventHub.publish ` - *on_reply* callback only called for first received reply. It should be - called for all relevant replies received. - -.. release:: 1.0.0 - :date: 2016-10-28 - - .. change:: new - :tags: session - - :meth:`Session.get_upload_metadata` has been added. - - .. change:: changed - :tags: locations, backwards-incompatible - - Data transfer between locations using accessors is now chunked to avoid - reading large files into memory. - - .. seealso:: :ref:`release/migration/1.0.0/chunked_transfer`. - - .. change:: changed - :tags: server accessor - - :class:`ftrack_api.accessor.server.ServerFile` has been refactored to - work with large files more efficiently. - - .. change:: changed - :tags: server accessor - - :class:`ftrack_api.accessor.server.ServerFile` has been updated to use - the get_upload_metadata API endpoint instead of - /component/getPutMetadata. - - .. change:: changed - :tags: locations - - :class:`ftrack_api.data.String` is now using a temporary file instead of - StringIO to avoid reading large files into memory. - - .. change:: fixed - :tags: session, locations - - `ftrack.centralized-storage` does not properly validate location - selection during user configuration. - -.. release:: 0.16.0 - :date: 2016-10-18 - - .. change:: new - :tags: session, encode media - - :meth:`Session.encode_media` can now automatically associate the output - with a version by specifying a *version_id* keyword argument. A new - helper method on versions, :meth:`AssetVersion.encode_media - `, can be - used to make versions playable in a browser. A server version of 3.3.32 - or higher is required for it to function properly. - - .. seealso:: :ref:`example/encode_media`. - - .. change:: changed - :tags: session, encode media - - You can now decide if :meth:`Session.encode_media` should keep or - delete the original component by specifying the *keep_original* - keyword argument. - - .. change:: changed - :tags: backwards-incompatible, collection - - Collection mutation now stores collection instance in operations rather - than underlying data structure. - - .. change:: changed - :tags: performance - - Improve performance of commit operations by optimising encoding and - reducing payload sent to server. - - .. change:: fixed - :tags: documentation - - Asset parent variable is declared but never used in - :ref:`example/publishing`. - - .. change:: fixed - :tags: documentation - - Documentation of hierarchical attributes and their limitations are - misleading. See :ref:`example/custom_attribute`. - -.. release:: 0.15.5 - :date: 2016-08-12 - - .. change:: new - :tags: documentation - - Added two new examples for :ref:`example/publishing` and - :ref:`example/web_review`. - - .. change:: fixed - :tags: session, availability - - :meth:`Session.get_component_availabilities` ignores passed locations - shortlist and includes all locations in returned availability mapping. - - .. change:: fixed - :tags: documentation - - Source distribution of ftrack-python-api does not include ftrack.css - in the documentation. - -.. release:: 0.15.4 - :date: 2016-07-12 - - .. change:: fixed - :tags: querying - - Custom offset not respected by - :meth:`QueryResult.first `. - - .. change:: changed - :tags: querying - - Using a custom offset with :meth:`QueryResult.one - ` helper method now raises an - exception as an offset is inappropriate when expecting to select a - single item. - - .. change:: fixed - :tags: caching - - :meth:`LayeredCache.remove ` - incorrectly raises :exc:`~exceptions.KeyError` if key only exists in - sub-layer cache. - -.. release:: 0.15.3 - :date: 2016-06-30 - - .. change:: fixed - :tags: session, caching - - A newly created entity now has the correct - :attr:`ftrack_api.symbol.CREATED` state when checked in caching layer. - Previously the state was :attr:`ftrack_api.symbol.NOT_SET`. Note that - this fix causes a change in logic and the stored - :class:`ftrack_api.operation.CreateEntityOperation` might hold data that - has not been fully :meth:`merged `. - - .. change:: fixed - :tags: documentation - - The second example in the assignments article is not working. - - .. change:: changed - :tags: session, caching - - A callable cache maker can now return ``None`` to indicate that it could - not create a suitable cache, but :class:`Session` instantiation can - continue safely. - -.. release:: 0.15.2 - :date: 2016-06-02 - - .. change:: new - :tags: documentation - - Added an example on how to work with assignments and allocations - :ref:`example/assignments_and_allocations`. - - .. change:: new - :tags: documentation - - Added :ref:`example/entity_links` article with - examples of how to manage asset version dependencies. - - .. change:: fixed - :tags: performance - - Improve performance of large collection management. - - .. change:: fixed - - Entities are not hashable because - :meth:`ftrack_api.entity.base.Entity.__hash__` raises `TypeError`. - -.. release:: 0.15.1 - :date: 2016-05-02 - - .. change:: fixed - :tags: collection, attribute, performance - - Custom attribute configurations does not cache necessary keys, leading - to performance issues. - - .. change:: fixed - :tags: locations, structure - - Standard structure does not work if version relation is not set on - the `Component`. - -.. release:: 0.15.0 - :date: 2016-04-04 - - .. change:: new - :tags: session, locations - - `ftrack.centralized-storage` not working properly on Windows. - -.. release:: 0.14.0 - :date: 2016-03-14 - - .. change:: changed - :tags: session, locations - - The `ftrack.centralized-storage` configurator now validates that name, - label and description for new locations are filled in. - - .. change:: new - :tags: session, client review - - Added :meth:`Session.send_review_session_invite` and - :meth:`Session.send_review_session_invites` that can be used to inform - review session invitees about a review session. - - .. seealso:: :ref:`Usage guide `. - - .. change:: new - :tags: session, locations - - Added `ftrack.centralized-storage` configurator as a private module. It - implements a wizard like interface used to configure a centralised - storage scenario. - - .. change:: new - :tags: session, locations - - `ftrack.centralized-storage` storage scenario is automatically - configured based on information passed from the server with the - `query_server_information` action. - - .. change:: new - :tags: structure - - Added :class:`ftrack_api.structure.standard.StandardStructure` with - hierarchy based resource identifier generation. - - .. change:: new - :tags: documentation - - Added more information to the :ref:`understanding_sessions/plugins` - article. - - .. change:: fixed - - :meth:`~ftrack_api.entity.user.User.start_timer` arguments *comment* - and *name* are ignored. - - .. change:: fixed - - :meth:`~ftrack_api.entity.user.User.stop_timer` calculates the wrong - duration when the server is not running in UTC. - - For the duration to be calculated correctly ftrack server version - >= 3.3.15 is required. - -.. release:: 0.13.0 - :date: 2016-02-10 - - .. change:: new - :tags: component, thumbnail - - Added improved support for handling thumbnails. - - .. seealso:: :ref:`example/thumbnail`. - - .. change:: new - :tags: session, encode media - - Added :meth:`Session.encode_media` that can be used to encode - media to make it playable in a browser. - - .. seealso:: :ref:`example/encode_media`. - - .. change:: fixed - - :meth:`Session.commit` fails when setting a custom attribute on an asset - version that has been created and committed in the same session. - - .. change:: new - :tags: locations - - Added :meth:`ftrack_api.entity.location.Location.get_url` to retrieve a - URL to a component in a location if supported by the - :class:`ftrack_api.accessor.base.Accessor`. - - .. change:: new - :tags: documentation - - Updated :ref:`example/note` and :ref:`example/job` articles with - examples of how to use note and job components. - - .. change:: changed - :tags: logging, performance - - Logged messages now evaluated lazily using - :class:`ftrack_api.logging.LazyLogMessage` as optimisation. - - .. change:: changed - :tags: session, events - - Auto connection of event hub for :class:`Session` now takes place in - background to improve session startup time. - - .. change:: changed - :tags: session, events - - Event hub connection timeout is now 60 seconds instead of 10. - - .. change:: changed - :tags: server version - - ftrack server version >= 3.3.11, < 3.4 required. - - .. change:: changed - :tags: querying, performance - - :class:`ftrack_api.query.QueryResult` now pages internally using a - specified page size in order to optimise record retrieval for large - query results. :meth:`Session.query` has also been updated to allow - passing a custom page size at runtime if desired. - - .. change:: changed - :tags: querying, performance - - Increased performance of :meth:`~ftrack_api.query.QueryResult.first` and - :meth:`~ftrack_api.query.QueryResult.one` by using new `limit` syntax. - -.. release:: 0.12.0 - :date: 2015-12-17 - - .. change:: new - :tags: session, widget url - - Added :meth:`ftrack_api.session.Session.get_widget_url` to retrieve an - authenticated URL to info or tasks widgets. - -.. release:: 0.11.0 - :date: 2015-12-04 - - .. change:: new - :tags: documentation - - Updated :ref:`release/migrating_from_old_api` with new link attribute - and added a :ref:`usage example `. - - .. change:: new - :tags: caching, schemas, performance - - Caching of schemas for increased performance. - :meth:`ftrack_api.session.Session` now accepts `schema_cache_path` - argument to specify location of schema cache. If not set it will use a - temporary folder. - -.. release:: 0.10.0 - :date: 2015-11-24 - - .. change:: changed - :tags: tests - - Updated session test to use mocked schemas for encoding tests. - - .. change:: fixed - - Documentation specifies Python 2.6 instead of Python 2.7 as minimum - interpreter version. - - .. change:: fixed - - Documentation does not reflect current dependencies. - - .. change:: changed - :tags: session, component, locations, performance - - Improved performance of - :meth:`ftrack_api.entity.location.Location.add_components` by batching - database operations. - - As a result it is no longer possible to determine progress of transfer - for container components in realtime as events will be emitted in batch - at end of operation. - - In addition, it is now the callers responsibility to clean up any - transferred data should an error occur during either data transfer or - database registration. - - .. change:: changed - :tags: exception, locations - - :exc:`ftrack_api.exception.ComponentInLocationError` now accepts either - a single component or multiple components and makes them available as - *components* in its *details* parameter. - - .. change:: changed - :tags: tests - - Updated session test to not fail on the new private link attribute. - - .. change:: changed - :tags: session - - Internal method :py:meth:`_fetch_schemas` has beed renamed to - :py:meth:`Session._load_schemas` and now requires a `schema_cache_path` - argument. - -.. release:: 0.9.0 - :date: 2015-10-30 - - .. change:: new - :tags: caching - - Added :meth:`ftrack_api.cache.Cache.values` as helper for retrieving - all values in cache. - - .. change:: fixed - :tags: session, caching - - :meth:`Session.merge` redundantly attempts to expand entity references - that have already been expanded causing performance degradation. - - .. change:: new - :tags: session - - :meth:`Session.rollback` has been added to support cleanly reverting - session state to last good state following a failed commit. - - .. change:: changed - :tags: events - - Event hub will no longer allow unverified SSL connections. - - .. seealso:: :ref:`security_and_authentication`. - - .. change:: changed - :tags: session - - :meth:`Session.reset` no longer resets the connection. It also clears - all local state and re-configures certain aspects that are cache - dependant, such as location plugins. - - .. change:: fixed - :tags: factory - - Debug logging messages using incorrect index for formatting leading to - misleading exception. - -.. release:: 0.8.4 - :date: 2015-10-08 - - .. change:: new - - Added initial support for custom attributes. - - .. seealso:: :ref:`example/custom_attribute`. - - .. change:: new - :tags: collection, attribute - - Added :class:`ftrack_api.collection.CustomAttributeCollectionProxy` and - :class:`ftrack_api.attribute.CustomAttributeCollectionAttribute` to - handle custom attributes. - - .. change:: changed - :tags: collection, attribute - - ``ftrack_api.attribute.MappedCollectionAttribute`` renamed to - :class:`ftrack_api.attribute.KeyValueMappedCollectionAttribute` to more - closely reflect purpose. - - .. change:: changed - :tags: collection - - :class:`ftrack_api.collection.MappedCollectionProxy` has been refactored - as a generic base class with key, value specialisation handled in new - dedicated class - :class:`ftrack_api.collection.KeyValueMappedCollectionProxy`. This is - done to avoid confusion following introduction of new - :class:`ftrack_api.collection.CustomAttributeCollectionProxy` class. - - .. change:: fixed - :tags: events - - The event hub does not always reconnect after computer has come back - from sleep. - -.. release:: 0.8.3 - :date: 2015-09-28 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2.1, < 3.4 required. - - .. change:: changed - - Updated *ftrack.server* location implementation. A server version of 3.3 - or higher is required for it to function properly. - - .. change:: fixed - - :meth:`ftrack_api.entity.factory.StandardFactory.create` not respecting - *bases* argument. - -.. release:: 0.8.2 - :date: 2015-09-16 - - .. change:: fixed - :tags: session - - Wrong file type set on component when publishing image sequence using - :meth:`Session.create_component`. - -.. release:: 0.8.1 - :date: 2015-09-08 - - .. change:: fixed - :tags: session - - :meth:`Session.ensure` not implemented. - -.. release:: 0.8.0 - :date: 2015-08-28 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2.1, < 3.3 required. - - .. change:: new - - Added lists example. - - .. seealso:: :ref:`example/list`. - - .. change:: new - - Added convenience methods for handling timers - :class:`~ftrack_api.entity.user.User.start_timer` and - :class:`~ftrack_api.entity.user.User.stop_timer`. - - .. change:: changed - - The dynamic API classes Type, Status, Priority and - StatusType have been renamed to Type, Status, Priority and State. - - .. change:: changed - - :meth:`Session.reset` now also clears the top most level cache (by - default a :class:`~ftrack_api.cache.MemoryCache`). - - .. change:: fixed - - Some invalid server url formats not detected. - - .. change:: fixed - - Reply events not encoded correctly causing them to be misinterpreted by - the server. - -.. release:: 0.7.0 - :date: 2015-08-24 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2, < 3.3 required. - - .. change:: changed - - Removed automatic set of default statusid, priorityid and typeid on - objects as that is now either not mandatory or handled on server. - - .. change:: changed - - Updated :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_statuses` - and :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_types` to - handle custom objects. - -.. release:: 0.6.0 - :date: 2015-08-19 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.1.8, < 3.2 required. - - .. change:: changed - :tags: querying, documentation - - Updated documentation with details on new operators ``has`` and ``any`` - for querying relationships. - - .. seealso:: :ref:`querying/criteria/operators` - -.. release:: 0.5.2 - :date: 2015-07-29 - - .. change:: changed - :tags: server version - - ftrack server version 3.1.5 or greater required. - - .. change:: changed - - Server reported errors are now more readable and are no longer sometimes - presented as an HTML page. - -.. release:: 0.5.1 - :date: 2015-07-06 - - .. change:: changed - - Defaults computed by :class:`~ftrack_api.entity.factory.StandardFactory` - are now memoised per session to improve performance. - - .. change:: changed - - :class:`~ftrack_api.cache.Memoiser` now supports a *return_copies* - parameter to control whether deep copies should be returned when a value - was retrieved from the cache. - -.. release:: 0.5.0 - :date: 2015-07-02 - - .. change:: changed - - Now checks for server compatibility and requires an ftrack server - version of 3.1 or greater. - - .. change:: new - - Added convenience methods to :class:`~ftrack_api.query.QueryResult` to - fetch :meth:`~ftrack_api.query.QueryResult.first` or exactly - :meth:`~ftrack_api.query.QueryResult.one` result. - - .. change:: new - :tags: notes - - Added support for handling notes. - - .. seealso:: :ref:`example/note`. - - .. change:: changed - - Collection attributes generate empty collection on first access when no - remote value available. This allows interacting with a collection on a - newly created entity before committing. - - .. change:: fixed - :tags: session - - Ambiguous error raised when :class:`Session` is started with an invalid - user or key. - - .. change:: fixed - :tags: caching, session - - :meth:`Session.merge` fails against - :class:`~ftrack_api.cache.SerialisedCache` when circular reference - encountered due to entity identity not being prioritised in merge. - -.. release:: 0.4.3 - :date: 2015-06-29 - - .. change:: fixed - :tags: plugins, session, entity types - - Entity types not constructed following standard install. - - This is because the discovery of the default plugins is unreliable - across Python installation processes (pip, wheel etc). Instead, the - default plugins have been added as templates to the :ref:`event_list` - documentation and the - :class:`~ftrack_api.entity.factory.StandardFactory` used to create any - missing classes on :class:`Session` startup. - -.. release:: 0.4.2 - :date: 2015-06-26 - - .. change:: fixed - :tags: metadata - - Setting exact same metadata twice can cause - :exc:`~ftrack_api.exception.ImmutableAttributeError` to be incorrectly - raised. - - .. change:: fixed - :tags: session - - Calling :meth:`Session.commit` does not clear locally set attribute - values leading to immutability checks being bypassed in certain cases. - -.. release:: 0.4.1 - :date: 2015-06-25 - - .. change:: fixed - :tags: metadata - - Setting metadata twice in one session causes `KeyError`. - -.. release:: 0.4.0 - :date: 2015-06-22 - - .. change:: changed - :tags: documentation - - Documentation extensively updated. - - .. change:: new - :tags: Client review - - Added support for handling review sessions. - - .. seealso:: :ref:`Usage guide `. - - .. change:: fixed - - Metadata property not working in line with rest of system, particularly - the caching framework. - - .. change:: new - :tags: collection - - Added :class:`ftrack_api.collection.MappedCollectionProxy` class for - providing a dictionary interface to a standard - :class:`ftrack_api.collection.Collection`. - - .. change:: new - :tags: collection, attribute - - Added :class:`ftrack_api.attribute.MappedCollectionAttribute` class for - describing an attribute that should use the - :class:`ftrack_api.collection.MappedCollectionProxy`. - - .. change:: new - - Entities that use composite primary keys are now fully supported in the - session, including for :meth:`Session.get` and :meth:`Session.populate`. - - .. change:: change - - Base :class:`ftrack_api.entity.factory.Factory` refactored to separate - out attribute instantiation into dedicated methods to make extending - simpler. - - .. change:: change - :tags: collection, attribute - - :class:`ftrack_api.attribute.DictionaryAttribute` and - :class:`ftrack_api.attribute.DictionaryAttributeCollection` removed. - They have been replaced by the new - :class:`ftrack_api.attribute.MappedCollectionAttribute` and - :class:`ftrack_api.collection.MappedCollectionProxy` respectively. - - .. change:: new - :tags: events - - :class:`Session` now supports an *auto_connect_event_hub* argument to - control whether the built in event hub should connect to the server on - session initialisation. This is useful for when only local events should - be supported or when the connection should be manually controlled. - -.. release:: 0.3.0 - :date: 2015-06-14 - - .. change:: fixed - - Session operations may be applied server side in invalid order resulting - in unexpected error. - - .. change:: fixed - - Creating and deleting an entity in single commit causes error as create - operation never persisted to server. - - Now all operations for the entity are ignored on commit when this case - is detected. - - .. change:: changed - - Internally moved from differential state to operation tracking for - determining session changes when persisting. - - .. change:: new - - ``Session.recorded_operations`` attribute for examining current - pending operations on a :class:`Session`. - - .. change:: new - - :meth:`Session.operation_recording` context manager for suspending - recording operations temporarily. Can also manually control - ``Session.record_operations`` boolean. - - .. change:: new - - Operation classes to track individual operations occurring in session. - - .. change:: new - - Public :meth:`Session.merge` method for merging arbitrary values into - the session manually. - - .. change:: changed - - An entity's state is now computed from the operations performed on it - and is no longer manually settable. - - .. change:: changed - - ``Entity.state`` attribute removed. Instead use the new inspection - :func:`ftrack_api.inspection.state`. - - Previously:: - - print entity.state - - Now:: - - import ftrack_api.inspection - print ftrack_api.inspection.state(entity) - - There is also an optimised inspection, - :func:`ftrack_api.inspection.states`. for determining state of many - entities at once. - - .. change:: changed - - Shallow copying a :class:`ftrack_api.symbol.Symbol` instance now - returns same instance. - -.. release:: 0.2.0 - :date: 2015-06-04 - - .. change:: changed - - Changed name of API from `ftrack` to `ftrack_api`. - - .. seealso:: :ref:`release/migration/0.2.0/new_api_name`. - - .. change:: new - :tags: caching - - Configurable caching support in :class:`Session`, including the ability - to use an external persisted cache and new cache implementations. - - .. seealso:: :ref:`caching`. - - .. change:: new - :tags: caching - - :meth:`Session.get` now tries to retrieve matching entity from - configured cache first. - - .. change:: new - :tags: serialisation, caching - - :meth:`Session.encode` supports a new mode *persisted_only* that will - only encode persisted attribute values. - - .. change:: changed - - Session.merge method is now private (:meth:`Session._merge`) until it is - qualified for general usage. - - .. change:: changed - :tags: entity state - - :class:`~ftrack_api.entity.base.Entity` state now managed on the entity - directly rather than stored separately in the :class:`Session`. - - Previously:: - - session.set_state(entity, state) - print session.get_state(entity) - - Now:: - - entity.state = state - print entity.state - - .. change:: changed - :tags: entity state - - Entity states are now :class:`ftrack_api.symbol.Symbol` instances rather - than strings. - - Previously:: - - entity.state = 'created' - - Now:: - - entity.state = ftrack_api.symbol.CREATED - - .. change:: fixed - :tags: entity state - - It is now valid to transition from most entity states to an - :attr:`ftrack_api.symbol.NOT_SET` state. - - .. change:: changed - :tags: caching - - :class:`~ftrack_api.cache.EntityKeyMaker` removed and replaced by - :class:`~ftrack_api.cache.StringKeyMaker`. Entity identity now - computed separately and passed to key maker to allow key maker to work - with non entity instances. - - .. change:: fixed - :tags: entity - - Internal data keys ignored when re/constructing entities reducing - distracting and irrelevant warnings in logs. - - .. change:: fixed - :tags: entity - - :class:`~ftrack_api.entity.base.Entity` equality test raises error when - other is not an entity instance. - - .. change:: changed - :tags: entity, caching - - :meth:`~ftrack_api.entity.base.Entity.merge` now also merges state and - local attributes. In addition, it ensures values being merged have also - been merged into the session and outputs more log messages. - - .. change:: fixed - :tags: inspection - - :func:`ftrack_api.inspection.identity` returns different result for same - entity depending on whether entity type is unicode or string. - - .. change:: fixed - - :func:`ftrack_api.mixin` causes method resolution failure when same - class mixed in multiple times. - - .. change:: changed - - Representations of objects now show plain id rather than converting to - hex. - - .. change:: fixed - :tags: events - - Event hub raises TypeError when listening to ftrack.update events. - - .. change:: fixed - :tags: events - - :meth:`ftrack_api.event.hub.EventHub.subscribe` fails when subscription - argument contains special characters such as `@` or `+`. - - .. change:: fixed - :tags: collection - - :meth:`ftrack_api.collection.Collection` incorrectly modifies entity - state on initialisation. - -.. release:: 0.1.0 - :date: 2015-03-25 - - .. change:: changed - - Moved standardised construct entity type logic to core package (as part - of the :class:`~ftrack_api.entity.factory.StandardFactory`) for easier - reuse and extension. - -.. release:: 0.1.0-beta.2 - :date: 2015-03-17 - - .. change:: new - :tags: locations - - Support for ftrack.server location. The corresponding server build is - required for it to function properly. - - .. change:: new - :tags: locations - - Support for managing components in locations has been added. Check out - the :ref:`dedicated tutorial `. - - .. change:: new - - A new inspection API (:mod:`ftrack_api.inspection`) has been added for - extracting useful information from objects in the system, such as the - identity of an entity. - - .. change:: changed - - ``Entity.primary_key`` and ``Entity.identity`` have been removed. - Instead, use the new :func:`ftrack_api.inspection.primary_key` and - :func:`ftrack_api.inspection.identity` functions. This was done to make it - clearer the the extracted information is determined from the current - entity state and modifying the returned object will have no effect on - the entity instance itself. - - .. change:: changed - - :func:`ftrack_api.inspection.primary_key` now returns a mapping of the - attribute names and values that make up the primary key, rather than - the previous behaviour of returning a tuple of just the values. To - emulate previous behaviour do:: - - ftrack_api.inspection.primary_key(entity).values() - - .. change:: changed - - :meth:`Session.encode` now supports different strategies for encoding - entities via the entity_attribute_strategy* keyword argument. This makes - it possible to use this method for general serialisation of entity - instances. - - .. change:: changed - - Encoded referenced entities are now a mapping containing - *__entity_type__* and then each key, value pair that makes up the - entity's primary key. For example:: - - { - '__entity_type__': 'User', - 'id': '8b90a444-4e65-11e1-a500-f23c91df25eb' - } - - .. change:: changed - - :meth:`Session.decode` no longer automatically adds decoded entities to - the :class:`Session` cache making it possible to use decode - independently. - - .. change:: new - - Added :meth:`Session.merge` for merging entities recursively into the - session cache. - - .. change:: fixed - - Replacing an entity in a :class:`ftrack_api.collection.Collection` with an - identical entity no longer raises - :exc:`ftrack_api.exception.DuplicateItemInCollectionError`. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py deleted file mode 100644 index 5fda0195a95..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -import logging - -import ftrack_api.session - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - # Perform your logic here, such as subscribe to an event. - pass - - logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py deleted file mode 100644 index dd11136d69b..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py +++ /dev/null @@ -1,37 +0,0 @@ -# :coding: utf-8 -import logging - -import ftrack_api.session - - -def register_with_session_ready(event): - '''Called when session is ready to be used.''' - logger = logging.getLogger('com.example.example-plugin') - logger.debug('Session ready.') - session = event['data']['session'] - - # Session is now ready and can be used to e.g. query objects. - task = session.query('Task').first() - print task['name'] - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.ready', - register_with_session_ready - ) - - logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst deleted file mode 100644 index 724afa81a64..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _security_and_authentication: - -*************************** -Security and authentication -*************************** - -Self signed SSL certificate -=========================== - -When using a self signed SSL certificate the API may fail to connect if it -cannot verify the SSL certificate. Under the hood the -`requests `_ library is used and it -must be specified where the trusted certificate authority can be found using the -environment variable ``REQUESTS_CA_BUNDLE``. - -.. seealso:: `SSL Cert Verification `_ - -InsecurePlatformWarning -======================= - -When using this API you may sometimes see a warning:: - - InsecurePlatformWarning: A true SSLContext object is not available. This - prevents urllib3 from configuring SSL appropriately and may cause certain - SSL connections to fail. - -If you encounter this warning, its recommended you upgrade to Python 2.7.9, or -use pyOpenSSL. To use pyOpenSSL simply:: - - pip install pyopenssl ndg-httpsclient pyasn1 - -and the `requests `_ library used by -this API will use pyOpenSSL instead. - -.. seealso:: `InsecurePlatformWarning `_ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst deleted file mode 100644 index 73b352eb2f6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst +++ /dev/null @@ -1,156 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _tutorial: - -******** -Tutorial -******** - -.. currentmodule:: ftrack_api.session - -This tutorial provides a quick dive into using the API and the broad stroke -concepts involved. - -First make sure the ftrack Python API is :ref:`installed `. - -Then start a Python session and import the ftrack API:: - - >>> import ftrack_api - -The API uses :ref:`sessions ` to manage communication -with an ftrack server. Create a session that connects to your ftrack server -(changing the passed values as appropriate):: - - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -.. note:: - - A session can use :ref:`environment variables - ` to configure itself. - -Now print a list of the available entity types retrieved from the server:: - - >>> print session.types.keys() - [u'TypedContext', u'ObjectType', u'Priority', u'Project', u'Sequence', - u'Shot', u'Task', u'Status', u'Type', u'Timelog', u'User'] - -Now the list of possible entity types is known, :ref:`query ` the -server to retrieve entities of a particular type by using the -:meth:`Session.query` method:: - - >>> projects = session.query('Project') - -Each project retrieved will be an :ref:`entity ` instance -that behaves much like a standard Python dictionary. For example, to find out -the available keys for an entity, call the -:meth:`~ftrack_api.entity.Entity.keys` method:: - - >>> print projects[0].keys() - [u'status', u'is_global', u'name', u'end_date', u'context_type', - u'id', u'full_name', u'root', u'start_date'] - -Now, iterate over the retrieved entities and print each ones name:: - - >>> for project in projects: - ... print project['name'] - test - client_review - tdb - man_test - ftrack - bunny - -.. note:: - - Many attributes for retrieved entities are loaded on demand when the - attribute is first accessed. Doing this lots of times in a script can be - inefficient, so it is worth using :ref:`projections ` - in queries or :ref:`pre-populating ` - entities where appropriate. You can also :ref:`customise default projections - ` to help others - pre-load common attributes. - -To narrow a search, add :ref:`criteria ` to the query:: - - >>> active_projects = session.query('Project where status is active') - -Combine criteria for more powerful queries:: - - >>> import arrow - >>> - >>> active_projects_ending_before_next_week = session.query( - ... 'Project where status is active and end_date before "{0}"' - ... .format(arrow.now().replace(weeks=+1)) - ... ) - -Some attributes on an entity will refer to another entity or collection of -entities, such as *children* on a *Project* being a collection of *Context* -entities that have the project as their parent:: - - >>> project = session.query('Project').first() - >>> print project['children'] - - -And on each *Context* there is a corresponding *parent* attribute which is a -link back to the parent:: - - >>> child = project['children'][0] - >>> print child['parent'] is project - True - -These relationships can also be used in the criteria for a query:: - - >>> results = session.query( - ... 'Context where parent.name like "te%"' - ... ) - -To create new entities in the system use :meth:`Session.create`:: - - >>> new_sequence = session.create('Sequence', { - ... 'name': 'Starlord Reveal' - ... }) - -The created entity is not yet persisted to the server, but it is still possible -to modify it. - - >>> new_sequence['description'] = 'First hero character reveal.' - -The sequence also needs a parent. This can be done in one of two ways: - -* Set the parent attribute on the sequence:: - - >>> new_sequence['parent'] = project - -* Add the sequence to a parent's children attribute:: - - >>> project['children'].append(new_sequence) - -When ready, persist to the server using :meth:`Session.commit`:: - - >>> session.commit() - -When finished with a :class:`Session`, it is important to :meth:`~Session.close` -it in order to release resources and properly unsubscribe any registered event -listeners. It is also possible to use the session as a context manager in order -to have it closed automatically after use:: - - >>> with ftrack_api.Session() as session: - ... print session.query('User').first() - - >>> print session.closed - True - -Once a :class:`Session` is closed, any operations that attempt to use the closed -connection to the ftrack server will fail:: - - >>> session.query('Project').first() - ConnectionClosedError: Connection closed. - -Continue to the next section to start learning more about the API in greater -depth or jump over to the :ref:`usage examples ` if you prefer to learn -by example. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst deleted file mode 100644 index e3602c4fa9d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst +++ /dev/null @@ -1,281 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _understanding_sessions: - -********************** -Understanding sessions -********************** - -.. currentmodule:: ftrack_api.session - -All communication with an ftrack server takes place through a :class:`Session`. -This allows more opportunity for configuring the connection, plugins etc. and -also makes it possible to connect to multiple ftrack servers from within the -same Python process. - -.. _understanding_sessions/connection: - -Connection -========== - -A session can be manually configured at runtime to connect to a server with -certain credentials:: - - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -Alternatively, a session can use the following environment variables to -configure itself: - - * :envvar:`FTRACK_SERVER` - * :envvar:`FTRACK_API_USER` - * :envvar:`FTRACK_API_KEY` - -When using environment variables, no server connection arguments need to be -passed manually:: - - >>> session = ftrack_api.Session() - -.. _understanding_sessions/unit_of_work: - -Unit of work -============ - -Each session follows the unit of work pattern. This means that many of the -operations performed using a session will happen locally and only be persisted -to the server at certain times, notably when calling :meth:`Session.commit`. -This approach helps optimise calls to the server and also group related logic -together in a transaction:: - - user = session.create('User', {}) - user['username'] = 'martin' - other_user = session.create('User', {'username': 'bjorn'}) - other_user['email'] = 'bjorn@example.com' - -Behind the scenes a series of :class:`operations -` are recorded reflecting the changes made. You -can take a peek at these operations if desired by examining the -``Session.recorded_operations`` property:: - - >>> for operation in session.recorded_operations: - ... print operation - - - - - -Calling :meth:`Session.commit` persists all recorded operations to the server -and clears the operation log:: - - session.commit() - -.. note:: - - The commit call will optimise operations to be as efficient as possible - without breaking logical ordering. For example, a create followed by updates - on the same entity will be compressed into a single create. - -Queries are special and always issued on demand. As a result, a query may return -unexpected results if the relevant local changes have not yet been sent to the -server:: - - >>> user = session.create('User', {'username': 'some_unique_username'}) - >>> query = 'User where username is "{0}"'.format(user['username']) - >>> print len(session.query(query)) - 0 - >>> session.commit() - >>> print len(session.query(query)) - 1 - -Where possible, query results are merged in with existing data transparently -with any local changes preserved:: - - >>> user = session.query('User').first() - >>> user['email'] = 'me@example.com' # Not yet committed to server. - >>> retrieved = session.query( - ... 'User where id is "{0}"'.format(user['id']) - ... ).one() - >>> print retrieved['email'] # Displays locally set value. - 'me@example.com' - >>> print retrieved is user - True - -This is possible due to the smart :ref:`caching` layer in the session. - -.. _understanding_sessions/auto_population: - -Auto-population -=============== - -Another important concept in a session is that of auto-population. By default a -session is configured to auto-populate missing attribute values on access. This -means that the first time you access an attribute on an entity instance a query -will be sent to the server to fetch the value:: - - user = session.query('User').first() - # The next command will issue a request to the server to fetch the - # 'username' value on demand at this is the first time it is accessed. - print user['username'] - -Once a value has been retrieved it is :ref:`cached ` locally in the -session and accessing it again will not issue more server calls:: - - # On second access no server call is made. - print user['username'] - -You can control the auto population behaviour of a session by either changing -the ``Session.auto_populate`` attribute on a session or using the provided -context helper :meth:`Session.auto_populating` to temporarily change the -setting. When turned off you may see a special -:attr:`~ftrack_api.symbol.NOT_SET` symbol that represents a value has not yet -been fetched:: - - >>> with session.auto_populating(False): - ... print user['email'] - NOT_SET - -Whilst convenient for simple scripts, making many requests to the server for -each attribute can slow execution of a script. To support optimisation the API -includes methods for batch fetching attributes. Read about them in -:ref:`querying/projections` and :ref:`working_with_entities/populating`. - -.. _understanding_sessions/entity_types: - -Entity types -============ - -When a session has successfully connected to the server it will automatically -download schema information and :ref:`create appropriate classes -` for use. This is important as different -servers can support different entity types and configurations. - -This information is readily available and useful if you need to check that the -entity types you expect are present. Here's how to print a list of all entity -types registered for use in the current API session:: - - >>> print session.types.keys() - [u'Task', u'Shot', u'TypedContext', u'Sequence', u'Priority', - u'Status', u'Project', u'User', u'Type', u'ObjectType'] - -Each entity type is backed by a :ref:`customisable class -` that further describes the entity type and -the attributes that are available. - -.. hint:: - - If you need to use an :func:`isinstance` check, always go through the - session as the classes are built dynamically:: - - >>> isinstance(entity, session.types['Project']) - -.. _understanding_sessions/plugins: - -Configuring plugins -=================== - -Plugins are used by the API to extend it with new functionality, such as -:term:`locations ` or adding convenience methods to -:ref:`understanding_sessions/entity_types`. In addition to new API -functionality, event plugins may also be used for event processing by listening -to :ref:`ftrack update events ` or adding custom functionality to ftrack by registering -:term:`actions `. - - -When starting a new :class:`Session` either pass the *plugins_paths* to search -explicitly or rely on the environment variable -:envvar:`FTRACK_EVENT_PLUGIN_PATH`. As each session is independent of others, -you can configure plugins per session. - -The paths will be searched for :term:`plugins `, python files -which expose a `register` function. These functions will be evaluated and can -be used extend the API with new functionality, such as locations or actions. - -If you do not specify any override then the session will attempt to discover and -use the default plugins. - -Plugins are discovered using :func:`ftrack_api.plugin.discover` with the -session instance passed as the sole positional argument. Most plugins should -take the form of a mount function that then subscribes to specific :ref:`events -` on the session:: - - def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - # Find location(s) and customise instances. - - def register(session): - '''Register plugin with *session*.''' - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) - -Additional keyword arguments can be passed as *plugin_arguments* to the -:class:`Session` on instantiation. These are passed to the plugin register -function if its signature supports them:: - - # a_plugin.py - def register(session, reticulate_splines=False): - '''Register plugin with *session*.''' - ... - - # main.py - session = ftrack_api.Session( - plugin_arguments={ - 'reticulate_splines': True, - 'some_other_argument': 42 - } - ) - -.. seealso:: - - Lists of events which you can subscribe to in your plugins are available - both for :ref:`synchronous event published by the python API ` - and :ref:`asynchronous events published by the server ` - - -Quick setup ------------ - -1. Create a directory where plugins will be stored. Place any plugins you want -loaded automatically in an API *session* here. - -.. image:: /image/configuring_plugins_directory.png - -2. Configure the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. - - -Detailed setup --------------- - -Start out by creating a directory on your machine where you will store your -plugins. Download :download:`example_plugin.py ` -and place it in the directory. - -Open up a terminal window, and ensure that plugin is picked up when -instantiating the session and manually setting the *plugin_paths*:: - - >>> # Set up basic logging - >>> import logging - >>> logging.basicConfig() - >>> plugin_logger = logging.getLogger('com.example.example-plugin') - >>> plugin_logger.setLevel(logging.DEBUG) - >>> - >>> # Configure the API, loading plugins in the specified paths. - >>> import ftrack_api - >>> plugin_paths = ['/path/to/plugins'] - >>> session = ftrack_api.Session(plugin_paths=plugin_paths) - -If everything is working as expected, you should see the following in the -output:: - - DEBUG:com.example.example-plugin:Plugin registered - -Instead of specifying the plugin paths when instantiating the session, you can -also specify the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. -To specify multiple directories, use the path separator for your operating -system. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst deleted file mode 100644 index 2d9d26f986f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst +++ /dev/null @@ -1,434 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _working_with_entities: - -********************* -Working with entities -********************* - -.. currentmodule:: ftrack_api.session - -:class:`Entity ` instances are Python dict-like -objects whose keys correspond to attributes for that type in the system. They -may also provide helper methods to perform common operations such as replying to -a note:: - - note = session.query('Note').first() - print note.keys() - print note['content'] - note['content'] = 'A different message!' - reply = note.create_reply(...) - -.. _working_with_entities/attributes: - -Attributes -========== - -Each entity instance is typed according to its underlying entity type on the -server and configured with appropriate attributes. For example, a *task* will be -represented by a *Task* class and have corresponding attributes. You can -:ref:`customise entity classes ` to alter -attribute access or provide your own helper methods. - -To see the available attribute names on an entity use the -:meth:`~ftrack_api.entity.base.Entity.keys` method on the instance:: - - >>> task = session.query('Task').first() - >>> print task.keys() - ['id', 'name', ...] - -If you need more information about the type of attribute, examine the -``attributes`` property on the corresponding class:: - - >>> for attribute in type(task).attributes: - ... print attribute - - - - - - ... - -Notice that there are different types of attribute such as -:class:`~ftrack_api.attribute.ScalarAttribute` for plain values or -:class:`~ftrack_api.attribute.ReferenceAttribute` for relationships. These -different types are reflected in the behaviour on the entity instance when -accessing a particular attribute by key: - - >>> # Scalar - >>> print task['name'] - 'model' - >>> task['name'] = 'comp' - - >>> # Single reference - >>> print task['status'] - - >>> new_status = session.query('Status').first() - >>> task['status'] = new_status - - >>> # Collection - >>> print task['timelogs'] - - >>> print task['timelogs'][:] - [, ...] - >>> new_timelog = session.create('Timelog', {...}) - >>> task['timelogs'].append(new_timelog) - -.. _working_with_entities/attributes/bidirectional: - -Bi-directional relationships ----------------------------- - -Some attributes refer to different sides of a bi-directional relationship. In -the current version of the API bi-directional updates are not propagated -automatically to the other side of the relationship. For example, setting a -*parent* will not update the parent entity's *children* collection locally. -There are plans to support this behaviour better in the future. For now, after -commit, :ref:`populate ` the reverse side -attribute manually. - -.. _working_with_entities/creating: - -Creating entities -================= - -In order to create a new instance of an entity call :meth:`Session.create` -passing in the entity type to create and any initial attribute values:: - - new_user = session.create('User', {'username': 'martin'}) - -If there are any default values that can be set client side then they will be -applied at this point. Typically this will be the unique entity key:: - - >>> print new_user['id'] - 170f02a4-6656-4f15-a5cb-c4dd77ce0540 - -At this point no information has been sent to the server. However, you are free -to continue :ref:`updating ` this object -locally until you are ready to persist the changes by calling -:meth:`Session.commit`. - -If you are wondering about what would happen if you accessed an unset attribute -on a newly created entity, go ahead and give it a go:: - - >>> print new_user['first_name'] - NOT_SET - -The session knows that it is a newly created entity that has not yet been -persisted so it doesn't try to fetch any attributes on access even when -``session.auto_populate`` is turned on. - -.. _working_with_entities/updating: - -Updating entities -================= - -Updating an entity is as simple as modifying the values for specific keys on -the dict-like instance and calling :meth:`Session.commit` when ready. The entity -to update can either be a new entity or a retrieved entity:: - - task = session.query('Task').first() - task['bid'] = 8 - -Remember that, for existing entities, accessing an attribute will load it from -the server automatically. If you are interested in just setting values without -first fetching them from the server, turn :ref:`auto-population -` off temporarily:: - - >>> with session.auto_populating(False): - ... task = session.query('Task').first() - ... task['bid'] = 8 - - -.. _working_with_entities/resetting: - -Server side reset of entity attributes or settings. -=========================== - -Some entities support resetting of attributes, for example -to reset a users api key:: - - - session.reset_remote( - 'api_key', entity=session.query('User where username is "test_user"').one() - ) - -.. note:: - Currently the only attribute possible to reset is 'api_key' on - the user entity type. - - -.. _working_with_entities/deleting: - -Deleting entities -================= - -To delete an entity you need an instance of the entity in your session (either -from having created one or retrieving one). Then call :meth:`Session.delete` on -the entity and :meth:`Session.commit` when ready:: - - task_to_delete = session.query('Task').first() - session.delete(task_to_delete) - ... - session.commit() - -.. note:: - - Even though the entity is deleted, you will still have access to the local - instance and any local data stored on that instance whilst that instance - remains in memory. - -Keep in mind that some deletions, when propagated to the server, will cause -other entities to be deleted also, so you don't have to worry about deleting an -entire hierarchy manually. For example, deleting a *Task* will also delete all -*Notes* on that task. - -.. _working_with_entities/populating: - -Populating entities -=================== - -When an entity is retrieved via :meth:`Session.query` or :meth:`Session.get` it -will have some attributes prepopulated. The rest are dynamically loaded when -they are accessed. If you need to access many attributes it can be more -efficient to request all those attributes be loaded in one go. One way to do -this is to use a :ref:`projections ` in queries. - -However, if you have entities that have been passed to you from elsewhere you -don't have control over the query that was issued to get those entities. In this -case you can you can populate those entities in one go using -:meth:`Session.populate` which works exactly like :ref:`projections -` in queries do, but operating against known entities:: - - >>> users = session.query('User') - >>> session.populate(users, 'first_name, last_name') - >>> with session.auto_populating(False): # Turn off for example purpose. - ... for user in users: - ... print 'Name: {0}'.format(user['first_name']) - ... print 'Email: {0}'.format(user['email']) - Name: Martin - Email: NOT_SET - ... - -.. note:: - - You can populate a single or many entities in one call so long as they are - all the same entity type. - -.. _working_with_entities/entity_states: - -Entity states -============= - -Operations on entities are :ref:`recorded in the session -` as they happen. At any time you can -inspect an entity to determine its current state from those pending operations. - -To do this, use :func:`ftrack_api.inspection.state`:: - - >>> import ftrack_api.inspection - >>> new_user = session.create('User', {}) - >>> print ftrack_api.inspection.state(new_user) - CREATED - >>> existing_user = session.query('User').first() - >>> print ftrack_api.inspection.state(existing_user) - NOT_SET - >>> existing_user['email'] = 'martin@example.com' - >>> print ftrack_api.inspection.state(existing_user) - MODIFIED - >>> session.delete(new_user) - >>> print ftrack_api.inspection.state(new_user) - DELETED - -.. _working_with_entities/entity_types: - -Customising entity types -======================== - -Each type of entity in the system is represented in the Python client by a -dedicated class. However, because the types of entities can vary these classes -are built on demand using schema information retrieved from the server. - -Many of the default classes provide additional helper methods which are mixed -into the generated class at runtime when a session is started. - -In some cases it can be useful to tailor the custom classes to your own pipeline -workflows. Perhaps you want to add more helper functions, change attribute -access rules or even providing a layer of backwards compatibility for existing -code. The Python client was built with this in mind and makes such -customisations as easy as possible. - -When a :class:`Session` is constructed it fetches schema details from the -connected server and then calls an :class:`Entity factory -` to create classes from those schemas. It -does this by emitting a synchronous event, -*ftrack.api.session.construct-entity-type*, for each schema and expecting a -*class* object to be returned. - -In the default setup, a :download:`construct_entity_type.py -<../resource/plugin/construct_entity_type.py>` plugin is placed on the -:envvar:`FTRACK_EVENT_PLUGIN_PATH`. This plugin will register a trivial subclass -of :class:`ftrack_api.entity.factory.StandardFactory` to create the classes in -response to the construct event. The simplest way to get started is to edit this -default plugin as required. - -.. seealso:: :ref:`understanding_sessions/plugins` - -.. _working_with_entities/entity_types/default_projections: - -Default projections -------------------- - -When a :ref:`query ` is issued without any :ref:`projections -`, the session will automatically add default projections -according to the type of the entity. - -For example, the following shows that for a *User*, only *id* is fetched by -default when no projections added to the query:: - - >>> user = session.query('User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', Symbol(NOT_SET)), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -.. note:: - - These default projections are also used when you access a relationship - attribute using the dictionary key syntax. - -If you want to default to fetching *username* for a *Task* as well then you can -change the default_projections* in your class factory plugin:: - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.default_projections = ['id', 'username'] - - return cls - -Now a projection-less query will also query *username* by default: - -.. note:: - - You will need to start a new session to pick up the change you made:: - - session = ftrack_api.Session() - -.. code-block:: python - - >>> user = session.query('User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', u'martin'), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -Note that if any specific projections are applied in a query, those override -the default projections entirely. This allows you to also *reduce* the data -loaded on demand:: - - >>> session = ftrack_api.Session() # Start new session to avoid cache. - >>> user = session.query('select id from User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', Symbol(NOT_SET)), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -.. _working_with_entities/entity_types/helper_methods: - -Helper methods --------------- - -If you want to add additional helper methods to the constructed classes to -better support your pipeline logic, then you can simply patch the created -classes in your factory, much like with changing the default projections:: - - def get_full_name(self): - '''Return full name for user.''' - return '{0} {1}'.format(self['first_name'], self['last_name']).strip() - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.get_full_name = get_full_name - - return cls - -Now you have a new helper method *get_full_name* on your *User* entities:: - - >>> session = ftrack_api.Session() # New session to pick up changes. - >>> user = session.query('User').first() - >>> print user.get_full_name() - Martin Pengelly-Phillips - -If you'd rather not patch the existing classes, or perhaps have a lot of helpers -to mixin, you can instead inject your own class as the base class. The only -requirement is that it has the base :class:`~ftrack_api.entity.base.Entity` -class in its ancestor classes:: - - import ftrack_api.entity.base - - - class CustomUser(ftrack_api.entity.base.Entity): - '''Represent user.''' - - def get_full_name(self): - '''Return full name for user.''' - return '{0} {1}'.format(self['first_name'], self['last_name']).strip() - - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Alter base class for constructed class. - if bases is None: - bases = [ftrack_api.entity.base.Entity] - - if schema['id'] == 'User': - bases = [CustomUser] - - cls = super(Factory, self).create(schema, bases=bases) - return cls - -The resulting effect is the same:: - - >>> session = ftrack_api.Session() # New session to pick up changes. - >>> user = session.query('User').first() - >>> print user.get_full_name() - Martin Pengelly-Phillips - -.. note:: - - Your custom class is not the leaf class which will still be a dynamically - generated class. Instead your custom class becomes the base for the leaf - class:: - - >>> print type(user).__mro__ - (, , ...) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini deleted file mode 100644 index b1f515ee18e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -minversion = 2.4.2 -addopts = -v -k-slow --junitxml=test-reports/junit.xml --cache-clear -norecursedirs = .* _* -python_files = test_*.py -python_functions = test_* -mock_use_standalone_module = true \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py deleted file mode 100644 index 0682a5eeb0e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py +++ /dev/null @@ -1,39 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api -import ftrack_api.entity.location -import ftrack_api.accessor.disk - - -def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - # - # location = session.query('Location where name is "my.location"').one() - # ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - # location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:configure_locations.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py deleted file mode 100644 index 45f78416708..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py +++ /dev/null @@ -1,46 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api.entity.factory - - -class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Optionally change bases for class to be generated. - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - - return cls - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - factory = Factory() - - def construct_entity_type(event): - '''Return class to represent entity type specified by *event*.''' - schema = event['data']['schema'] - return factory.create(schema) - - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg deleted file mode 100644 index b2ad8fd0861..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg +++ /dev/null @@ -1,6 +0,0 @@ -[build_sphinx] -config-dir = doc -source-dir = doc -build-dir = build/doc -builder = html -all_files = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py deleted file mode 100644 index da99a572b4c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os -import re - -from setuptools import setup, find_packages -from setuptools.command.test import test as TestCommand - - -ROOT_PATH = os.path.dirname(os.path.realpath(__file__)) -RESOURCE_PATH = os.path.join(ROOT_PATH, 'resource') -SOURCE_PATH = os.path.join(ROOT_PATH, 'source') -README_PATH = os.path.join(ROOT_PATH, 'README.rst') - - -# Read version from source. -with open( - os.path.join(SOURCE_PATH, 'ftrack_api', '_version.py') -) as _version_file: - VERSION = re.match( - r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL - ).group(1) - - -# Custom commands. -class PyTest(TestCommand): - '''Pytest command.''' - - def finalize_options(self): - '''Finalize options to be used.''' - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - '''Import pytest and run.''' - import pytest - raise SystemExit(pytest.main(self.test_args)) - - -# Call main setup. -setup( - name='ftrack-python-api', - version=VERSION, - description='Python API for ftrack.', - long_description=open(README_PATH).read(), - keywords='ftrack, python, api', - url='https://bitbucket.org/ftrack/ftrack-python-api', - author='ftrack', - author_email='support@ftrack.com', - license='Apache License (2.0)', - packages=find_packages(SOURCE_PATH), - package_dir={ - '': 'source' - }, - setup_requires=[ - 'sphinx >= 1.2.2, < 2', - 'sphinx_rtd_theme >= 0.1.6, < 1', - 'lowdown >= 0.1.0, < 2' - ], - install_requires=[ - 'requests >= 2, <3', - 'arrow >= 0.4.4, < 1', - 'termcolor >= 1.1.0, < 2', - 'pyparsing >= 2.0, < 3', - 'clique >= 1.2.0, < 2', - 'websocket-client >= 0.40.0, < 1' - ], - tests_require=[ - 'pytest >= 2.7, < 3', - 'pytest-mock >= 0.4, < 1', - 'pytest-catchlog >= 1, <=2' - ], - cmdclass={ - 'test': PyTest - }, - zip_safe=False, - python_requires=">=2.7.9, <3.0" - -) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py deleted file mode 100644 index 34833aa0dd6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from ftrack_api import * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py deleted file mode 100644 index d8ee30bd8f7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from ._version import __version__ -from .session import Session - - -def mixin(instance, mixin_class, name=None): - '''Mixin *mixin_class* to *instance*. - - *name* can be used to specify new class name. If not specified then one will - be generated. - - ''' - if name is None: - name = '{0}{1}'.format( - instance.__class__.__name__, mixin_class.__name__ - ) - - # Check mixin class not already present in mro in order to avoid consistent - # method resolution failure. - if mixin_class in instance.__class__.mro(): - return - - instance.__class__ = type( - name, - ( - mixin_class, - instance.__class__ - ), - {} - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py deleted file mode 100644 index fbe14f32772..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py +++ /dev/null @@ -1,656 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -from __future__ import absolute_import - -import logging -import json -import sys -import os - -import ftrack_api -import ftrack_api.structure.standard as _standard -from ftrack_api.logging import LazyLogMessage as L - - -scenario_name = 'ftrack.centralized-storage' - - -class ConfigureCentralizedStorageScenario(object): - '''Configure a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - @property - def storage_scenario(self): - '''Return storage scenario setting.''' - return self.session.query( - 'select value from Setting ' - 'where name is "storage_scenario" and group is "STORAGE"' - ).one() - - @property - def existing_centralized_storage_configuration(self): - '''Return existing centralized storage configuration.''' - storage_scenario = self.storage_scenario - - try: - configuration = json.loads(storage_scenario['value']) - except (ValueError, TypeError): - return None - - if not isinstance(configuration, dict): - return None - - if configuration.get('scenario') != scenario_name: - return None - - return configuration.get('data', {}) - - def _get_confirmation_text(self, configuration): - '''Return confirmation text from *configuration*.''' - configure_location = configuration.get('configure_location') - select_location = configuration.get('select_location') - select_mount_point = configuration.get('select_mount_point') - - if configure_location: - location_text = unicode( - 'A new location will be created:\n\n' - '* Label: {location_label}\n' - '* Name: {location_name}\n' - '* Description: {location_description}\n' - ).format(**configure_location) - else: - location = self.session.get( - 'Location', select_location['location_id'] - ) - location_text = ( - u'You have choosen to use an existing location: {0}'.format( - location['label'] - ) - ) - - mount_points_text = unicode( - '* Linux: {linux}\n' - '* OS X: {osx}\n' - '* Windows: {windows}\n\n' - ).format( - linux=select_mount_point.get('linux_mount_point') or '*Not set*', - osx=select_mount_point.get('osx_mount_point') or '*Not set*', - windows=select_mount_point.get('windows_mount_point') or '*Not set*' - ) - - mount_points_not_set = [] - - if not select_mount_point.get('linux_mount_point'): - mount_points_not_set.append('Linux') - - if not select_mount_point.get('osx_mount_point'): - mount_points_not_set.append('OS X') - - if not select_mount_point.get('windows_mount_point'): - mount_points_not_set.append('Windows') - - if mount_points_not_set: - mount_points_text += unicode( - 'Please be aware that this location will not be working on ' - '{missing} because the mount points are not set up.' - ).format( - missing=' and '.join(mount_points_not_set) - ) - - text = unicode( - '#Confirm storage setup#\n\n' - 'Almost there! Please take a moment to verify the settings you ' - 'are about to save. You can always come back later and update the ' - 'configuration.\n' - '##Location##\n\n' - '{location}\n' - '##Mount points##\n\n' - '{mount_points}' - ).format( - location=location_text, - mount_points=mount_points_text - ) - - return text - - def configure_scenario(self, event): - '''Configure scenario based on *event* and return form items.''' - steps = ( - 'select_scenario', - 'select_location', - 'configure_location', - 'select_structure', - 'select_mount_point', - 'confirm_summary', - 'save_configuration' - ) - - warning_message = '' - values = event['data'].get('values', {}) - - # Calculate previous step and the next. - previous_step = values.get('step', 'select_scenario') - next_step = steps[steps.index(previous_step) + 1] - state = 'configuring' - - self.logger.info(L( - u'Configuring scenario, previous step: {0}, next step: {1}. ' - u'Values {2!r}.', - previous_step, next_step, values - )) - - if 'configuration' in values: - configuration = values.pop('configuration') - else: - configuration = {} - - if values: - # Update configuration with values from the previous step. - configuration[previous_step] = values - - if previous_step == 'select_location': - values = configuration['select_location'] - if values.get('location_id') != 'create_new_location': - location_exists = self.session.query( - 'Location where id is "{0}"'.format( - values.get('location_id') - ) - ).first() - if not location_exists: - next_step = 'select_location' - warning_message = ( - '**The selected location does not exist. Please choose ' - 'one from the dropdown or create a new one.**' - ) - - if next_step == 'select_location': - try: - location_id = ( - self.existing_centralized_storage_configuration['location_id'] - ) - except (KeyError, TypeError): - location_id = None - - options = [{ - 'label': 'Create new location', - 'value': 'create_new_location' - }] - for location in self.session.query( - 'select name, label, description from Location' - ): - if location['name'] not in ( - 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', - 'ftrack.server', 'ftrack.review' - ): - options.append({ - 'label': u'{label} ({name})'.format( - label=location['label'], name=location['name'] - ), - 'description': location['description'], - 'value': location['id'] - }) - - warning = '' - if location_id is not None: - # If there is already a location configured we must make the - # user aware that changing the location may be problematic. - warning = ( - '\n\n**Be careful if you switch to another location ' - 'for an existing storage scenario. Components that have ' - 'already been published to the previous location will be ' - 'made unavailable for common use.**' - ) - default_value = location_id - elif location_id is None and len(options) == 1: - # No location configured and no existing locations to use. - default_value = 'create_new_location' - else: - # There are existing locations to choose from but non of them - # are currently active in the centralized storage scenario. - default_value = None - - items = [{ - 'type': 'label', - 'value': ( - '#Select location#\n' - 'Choose an already existing location or create a new one ' - 'to represent your centralized storage. {0}'.format( - warning - ) - ) - }, { - 'type': 'enumerator', - 'label': 'Location', - 'name': 'location_id', - 'value': default_value, - 'data': options - }] - - default_location_name = 'studio.central-storage-location' - default_location_label = 'Studio location' - default_location_description = ( - 'The studio central location where all components are ' - 'stored.' - ) - - if previous_step == 'configure_location': - configure_location = configuration.get( - 'configure_location' - ) - - if configure_location: - try: - existing_location = self.session.query( - u'Location where name is "{0}"'.format( - configure_location.get('location_name') - ) - ).first() - except UnicodeEncodeError: - next_step = 'configure_location' - warning_message += ( - '**The location name contains non-ascii characters. ' - 'Please change the name and try again.**' - ) - values = configuration['select_location'] - else: - if existing_location: - next_step = 'configure_location' - warning_message += ( - u'**There is already a location named {0}. ' - u'Please change the name and try again.**'.format( - configure_location.get('location_name') - ) - ) - values = configuration['select_location'] - - if ( - not configure_location.get('location_name') or - not configure_location.get('location_label') or - not configure_location.get('location_description') - ): - next_step = 'configure_location' - warning_message += ( - '**Location name, label and description cannot ' - 'be empty.**' - ) - values = configuration['select_location'] - - if next_step == 'configure_location': - # Populate form with previous configuration. - default_location_label = configure_location['location_label'] - default_location_name = configure_location['location_name'] - default_location_description = ( - configure_location['location_description'] - ) - - if next_step == 'configure_location': - - if values.get('location_id') == 'create_new_location': - # Add options to create a new location. - items = [{ - 'type': 'label', - 'value': ( - '#Create location#\n' - 'Here you will create a new location to be used ' - 'with your new Storage scenario. For your ' - 'convenience we have already filled in some default ' - 'values. If this is the first time you are configuring ' - 'a storage scenario in ftrack we recommend that you ' - 'stick with these settings.' - ) - }, { - 'label': 'Label', - 'name': 'location_label', - 'value': default_location_label, - 'type': 'text' - }, { - 'label': 'Name', - 'name': 'location_name', - 'value': default_location_name, - 'type': 'text' - }, { - 'label': 'Description', - 'name': 'location_description', - 'value': default_location_description, - 'type': 'text' - }] - - else: - # The user selected an existing location. Move on to next - # step. - next_step = 'select_mount_point' - - if next_step == 'select_structure': - # There is only one structure to choose from, go to next step. - next_step = 'select_mount_point' - # items = [ - # { - # 'type': 'label', - # 'value': ( - # '#Select structure#\n' - # 'Select which structure to use with your location. ' - # 'The structure is used to generate the filesystem ' - # 'path for components that are added to this location.' - # ) - # }, - # { - # 'type': 'enumerator', - # 'label': 'Structure', - # 'name': 'structure_id', - # 'value': 'standard', - # 'data': [{ - # 'label': 'Standard', - # 'value': 'standard', - # 'description': ( - # 'The Standard structure uses the names in your ' - # 'project structure to determine the path.' - # ) - # }] - # } - # ] - - if next_step == 'select_mount_point': - try: - mount_points = ( - self.existing_centralized_storage_configuration['accessor']['mount_points'] - ) - except (KeyError, TypeError): - mount_points = dict() - - items = [ - { - 'value': ( - '#Mount points#\n' - 'Set mount points for your centralized storage ' - 'location. For the location to work as expected each ' - 'platform that you intend to use must have the ' - 'corresponding mount point set and the storage must ' - 'be accessible. If not set correctly files will not be ' - 'saved or read.' - ), - 'type': 'label' - }, { - 'type': 'text', - 'label': 'Linux', - 'name': 'linux_mount_point', - 'empty_text': 'E.g. /usr/mnt/MyStorage ...', - 'value': mount_points.get('linux', '') - }, { - 'type': 'text', - 'label': 'OS X', - 'name': 'osx_mount_point', - 'empty_text': 'E.g. /Volumes/MyStorage ...', - 'value': mount_points.get('osx', '') - }, { - 'type': 'text', - 'label': 'Windows', - 'name': 'windows_mount_point', - 'empty_text': 'E.g. \\\\MyStorage ...', - 'value': mount_points.get('windows', '') - } - ] - - if next_step == 'confirm_summary': - items = [{ - 'type': 'label', - 'value': self._get_confirmation_text(configuration) - }] - state = 'confirm' - - if next_step == 'save_configuration': - mount_points = configuration['select_mount_point'] - select_location = configuration['select_location'] - - if select_location['location_id'] == 'create_new_location': - configure_location = configuration['configure_location'] - location = self.session.create( - 'Location', - { - 'name': configure_location['location_name'], - 'label': configure_location['location_label'], - 'description': ( - configure_location['location_description'] - ) - } - ) - - else: - location = self.session.query( - 'Location where id is "{0}"'.format( - select_location['location_id'] - ) - ).one() - - setting_value = json.dumps({ - 'scenario': scenario_name, - 'data': { - 'location_id': location['id'], - 'location_name': location['name'], - 'accessor': { - 'mount_points': { - 'linux': mount_points['linux_mount_point'], - 'osx': mount_points['osx_mount_point'], - 'windows': mount_points['windows_mount_point'] - } - } - } - }) - - self.storage_scenario['value'] = setting_value - self.session.commit() - - # Broadcast an event that storage scenario has been configured. - event = ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.configure-done' - ) - self.session.event_hub.publish(event) - - items = [{ - 'type': 'label', - 'value': ( - '#Done!#\n' - 'Your storage scenario is now configured and ready ' - 'to use. **Note that you may have to restart Connect and ' - 'other applications to start using it.**' - ) - }] - state = 'done' - - if warning_message: - items.insert(0, { - 'type': 'label', - 'value': warning_message - }) - - items.append({ - 'type': 'hidden', - 'value': configuration, - 'name': 'configuration' - }) - items.append({ - 'type': 'hidden', - 'value': next_step, - 'name': 'step' - }) - - return { - 'items': items, - 'state': state - } - - def discover_centralized_scenario(self, event): - '''Return action discover dictionary for *event*.''' - return { - 'id': scenario_name, - 'name': 'Centralized storage scenario', - 'description': ( - '(Recommended) centralized storage scenario where all files ' - 'are kept on a storage that is mounted and available to ' - 'everyone in the studio.' - ) - } - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - #: TODO: Move these to a separate function. - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.discover ' - 'and source.user.username="{0}"' - ).format( - session.api_user - ), - self.discover_centralized_scenario - ) - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.configure ' - 'and data.scenario_id="{0}" ' - 'and source.user.username="{1}"' - ).format( - scenario_name, - session.api_user - ), - self.configure_scenario - ) - - -class ActivateCentralizedStorageScenario(object): - '''Activate a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def activate(self, event): - '''Activate scenario in *event*.''' - storage_scenario = event['data']['storage_scenario'] - - try: - location_data = storage_scenario['data'] - location_name = location_data['location_name'] - location_id = location_data['location_id'] - mount_points = location_data['accessor']['mount_points'] - - except KeyError: - error_message = ( - 'Unable to read storage scenario data.' - ) - self.logger.error(L(error_message)) - raise ftrack_api.exception.LocationError( - 'Unable to configure location based on scenario.' - ) - - else: - location = self.session.create( - 'Location', - data=dict( - name=location_name, - id=location_id - ), - reconstructing=True - ) - - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - else: - raise ftrack_api.exception.LocationError( - ( - 'Unable to find accessor prefix for platform {0}.' - ).format(sys.platform) - ) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=prefix - ) - location.structure = _standard.StandardStructure() - location.priority = 1 - self.logger.info(L( - u'Storage scenario activated. Configured {0!r} from ' - u'{1!r}', - location, storage_scenario - )) - - def _verify_startup(self, event): - '''Verify the storage scenario configuration.''' - storage_scenario = event['data']['storage_scenario'] - location_data = storage_scenario['data'] - mount_points = location_data['accessor']['mount_points'] - - prefix = None - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - - if not prefix: - return ( - u'The storage scenario has not been configured for your ' - u'operating system. ftrack may not be able to ' - u'store and track files correctly.' - ) - - if not os.path.isdir(prefix): - return ( - unicode( - 'The path {0} does not exist. ftrack may not be able to ' - 'store and track files correctly. \n\nIf the storage is ' - 'newly setup you may want to create necessary folder ' - 'structures. If the storage is a network drive you should ' - 'make sure that it is mounted correctly.' - ).format(prefix) - ) - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - session.event_hub.subscribe( - ( - 'topic=ftrack.storage-scenario.activate ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self.activate - ) - - # Listen to verify startup event from ftrack connect to allow responding - # with a message if something is not working correctly with this - # scenario that the user should be notified about. - self.session.event_hub.subscribe( - ( - 'topic=ftrack.connect.verify-startup ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self._verify_startup - ) - -def register(session): - '''Register storage scenario.''' - scenario = ActivateCentralizedStorageScenario() - scenario.register(session) - - -def register_configuration(session): - '''Register storage scenario.''' - scenario = ConfigureCentralizedStorageScenario() - scenario.register(session) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py deleted file mode 100644 index 9f79a1850ce..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py +++ /dev/null @@ -1,534 +0,0 @@ -# pragma: no cover -# Module 'ntpath' -- common operations on WinNT/Win95 pathnames -"""Common pathname manipulations, WindowsNT/95 version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -import os -import sys -import stat -import genericpath -import warnings - -from genericpath import * - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","walk","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames","relpath"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '\\' -pathsep = ';' -altsep = '/' -defpath = '.;C:\\bin' -if 'ce' in sys.builtin_module_names: - defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all slashes into backslashes.""" - return s.replace("/", "\\").lower() - - -# Return whether a path is absolute. -# Trivial in Posix, harder on the Mac or MS-DOS. -# For DOS it is absolute if it starts with a slash or backslash (current -# volume), or if a pathname after the volume letter and colon / UNC resource -# starts with a slash or backslash. - -def isabs(s): - """Test whether a path is absolute""" - s = splitdrive(s)[1] - return s != '' and s[:1] in '/\\' - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting "\\" as needed. - If any component is an absolute path, all previous path components - will be discarded.""" - path = a - for b in p: - b_wins = 0 # set to 1 iff b makes path irrelevant - if path == "": - b_wins = 1 - - elif isabs(b): - # This probably wipes out path so far. However, it's more - # complicated if path begins with a drive letter: - # 1. join('c:', '/a') == 'c:/a' - # 2. join('c:/', '/a') == 'c:/a' - # But - # 3. join('c:/a', '/b') == '/b' - # 4. join('c:', 'd:/') = 'd:/' - # 5. join('c:/', 'd:/') = 'd:/' - if path[1:2] != ":" or b[1:2] == ":": - # Path doesn't start with a drive letter, or cases 4 and 5. - b_wins = 1 - - # Else path has a drive letter, and b doesn't but is absolute. - elif len(path) > 3 or (len(path) == 3 and - path[-1] not in "/\\"): - # case 3 - b_wins = 1 - - if b_wins: - path = b - else: - # Join, and ensure there's a separator. - assert len(path) > 0 - if path[-1] in "/\\": - if b and b[0] in "/\\": - path += b[1:] - else: - path += b - elif path[-1] == ":": - path += b - elif b: - if b[0] in "/\\": - path += b - else: - path += "\\" + b - else: - # path is not empty and does not end with a backslash, - # but b is empty; since, e.g., split('a/') produces - # ('a', ''), it's best if join() adds a backslash in - # this case. - path += '\\' - - return path - - -# Split a path in a drive specification (a drive letter followed by a -# colon) and the path specification. -# It is always true that drivespec + pathspec == p -def splitdrive(p): - """Split a pathname into drive and path specifiers. Returns a 2-tuple -"(drive,path)"; either part may be empty""" - if p[1:2] == ':': - return p[0:2], p[2:] - return '', p - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '//' or firstTwo == '\\\\': - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('\\', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('\\', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Split a path in head (everything up to the last '/') and tail (the -# rest). After the trailing '/' is stripped, the invariant -# join(head, tail) == p holds. -# The resulting head won't end in '/' unless it is the root. - -def split(p): - """Split a pathname. - - Return tuple (head, tail) where tail is everything after the final slash. - Either part may be empty.""" - - d, p = splitdrive(p) - # set i to index beyond p's last slash - i = len(p) - while i and p[i-1] not in '/\\': - i = i - 1 - head, tail = p[:i], p[i:] # now tail has no slashes - # remove trailing slashes from head, unless it's all slashes - head2 = head - while head2 and head2[-1] in '/\\': - head2 = head2[:-1] - head = head2 or head - return d + head, tail - - -# Split a path in root and extension. -# The extension is everything starting at the last dot in the last -# pathname component; the root is everything before that. -# It is always true that root + ext == p. - -def splitext(p): - return genericpath._splitext(p, sep, altsep, extsep) -splitext.__doc__ = genericpath._splitext.__doc__ - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - -# Is a path a symbolic link? -# This will always return false on systems where posix.lstat doesn't exist. - -def islink(path): - """Test for symbolic link. - On WindowsNT/95 and OS/2 always returns false - """ - return False - -# alias exists to lexists -lexists = exists - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Directory tree walk. -# For each directory under top (including top itself, but excluding -# '.' and '..'), func(arg, dirname, filenames) is called, where -# dirname is the name of the directory and filenames is the list -# of files (and subdirectories etc.) in the directory. -# The func may modify the filenames list, to implement a filter, -# or to impose a different order of visiting. - -def walk(top, func, arg): - """Directory tree walk with callback function. - - For each directory in the directory tree rooted at top (including top - itself, but excluding '.' and '..'), call func(arg, dirname, fnames). - dirname is the name of the directory, and fnames a list of the names of - the files and subdirectories in dirname (excluding '.' and '..'). func - may modify the fnames list in-place (e.g. via del or slice assignment), - and walk will only recurse into the subdirectories whose names remain in - fnames; this can be used to implement a filter, or to impose a specific - order of visiting. No semantics are defined for, or required of, arg, - beyond that arg is always passed to func. It can be used, e.g., to pass - a filename pattern, or a mutable object designed to accumulate - statistics. Passing None for arg is common.""" - warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", - stacklevel=2) - try: - names = os.listdir(top) - except os.error: - return - func(arg, top, names) - for name in names: - name = join(top, name) - if isdir(name): - walk(name, func, arg) - - -# Expand paths beginning with '~' or '~user'. -# '~' means $HOME; '~user' means that user's home directory. -# If the path doesn't begin with '~', or if the user or $HOME is unknown, -# the path is returned unchanged (leaving error reporting to whatever -# function is called with the expanded path as argument). -# See also module 'glob' for expansion of *, ? and [...] in pathnames. -# (A function should also be defined to do full *sh-style environment -# variable expansion.) - -def expanduser(path): - """Expand ~ and ~user constructs. - - If user or $HOME is unknown, do nothing.""" - if path[:1] != '~': - return path - i, n = 1, len(path) - while i < n and path[i] not in '/\\': - i = i + 1 - - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif not 'HOMEPATH' in os.environ: - return path - else: - try: - drive = os.environ['HOMEDRIVE'] - except KeyError: - drive = '' - userhome = join(drive, os.environ['HOMEPATH']) - - if i != 1: #~user - userhome = join(dirname(userhome), path[1:i]) - - return userhome + path[i:] - - -# Expand paths containing shell variable substitutions. -# The following rules apply: -# - no expansion within single quotes -# - '$$' is translated into '$' -# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% -# - ${varname} is accepted. -# - $varname is accepted. -# - %varname% is accepted. -# - varnames can be made out of letters, digits and the characters '_-' -# (though is not verified in the ${varname} and %varname% cases) -# XXX With COMMAND.COM you can use any characters in a variable name, -# XXX except '^|<>='. - -def expandvars(path): - """Expand shell variables of the forms $var, ${var} and %var%. - - Unknown variables are left unchanged.""" - if '$' not in path and '%' not in path: - return path - import string - varchars = string.ascii_letters + string.digits + '_-' - res = '' - index = 0 - pathlen = len(path) - while index < pathlen: - c = path[index] - if c == '\'': # no expansion within single quotes - path = path[index + 1:] - pathlen = len(path) - try: - index = path.index('\'') - res = res + '\'' + path[:index + 1] - except ValueError: - res = res + path - index = pathlen - 1 - elif c == '%': # variable or '%' - if path[index + 1:index + 2] == '%': - res = res + c - index = index + 1 - else: - path = path[index+1:] - pathlen = len(path) - try: - index = path.index('%') - except ValueError: - res = res + '%' + path - index = pathlen - 1 - else: - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '%' + var + '%' - elif c == '$': # variable or '$$' - if path[index + 1:index + 2] == '$': - res = res + c - index = index + 1 - elif path[index + 1:index + 2] == '{': - path = path[index+2:] - pathlen = len(path) - try: - index = path.index('}') - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '${' + var + '}' - except ValueError: - res = res + '${' + path - index = pathlen - 1 - else: - var = '' - index = index + 1 - c = path[index:index + 1] - while c != '' and c in varchars: - var = var + c - index = index + 1 - c = path[index:index + 1] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '$' + var - if c != '': - index = index - 1 - else: - res = res + c - index = index + 1 - return res - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. -# Previously, this function also truncated pathnames to 8+3 format, -# but as this module is called "ntpath", that's obviously wrong! - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - # Preserve unicode (if path is unicode) - backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.') - if path.startswith(('\\\\.\\', '\\\\?\\')): - # in the case of paths with these prefixes: - # \\.\ -> device names - # \\?\ -> literal paths - # do not do any normalization, but return the path unchanged - return path - path = path.replace("/", "\\") - prefix, path = splitdrive(path) - # We need to be careful here. If the prefix is empty, and the path starts - # with a backslash, it could either be an absolute path on the current - # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It - # is therefore imperative NOT to collapse multiple backslashes blindly in - # that case. - # The code below preserves multiple backslashes when there is no drive - # letter. This means that the invalid filename \\\a\b is preserved - # unchanged, where a\\\b is normalised to a\b. It's not clear that there - # is any better behaviour for such edge cases. - if prefix == '': - # No drive letter - preserve initial backslashes - while path[:1] == "\\": - prefix = prefix + backslash - path = path[1:] - else: - # We have a drive letter - collapse initial backslashes - if path.startswith("\\"): - prefix = prefix + backslash - path = path.lstrip("\\") - comps = path.split("\\") - i = 0 - while i < len(comps): - if comps[i] in ('.', ''): - del comps[i] - elif comps[i] == '..': - if i > 0 and comps[i-1] != '..': - del comps[i-1:i+1] - i -= 1 - elif i == 0 and prefix.endswith("\\"): - del comps[i] - else: - i += 1 - else: - i += 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append(dot) - return prefix + backslash.join(comps) - - -# Return an absolute path. -try: - from nt import _getfullpathname - -except ImportError: # not running on Windows - mock up something sensible - def abspath(path): - """Return the absolute version of a path.""" - if not isabs(path): - if isinstance(path, unicode): - cwd = os.getcwdu() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - -else: # use native Windows method on Windows - def abspath(path): - """Return the absolute version of a path.""" - - if path: # Empty path must return current working directory. - try: - path = _getfullpathname(path) - except WindowsError: - pass # Bad path - return unchanged. - elif isinstance(path, unicode): - path = os.getcwdu() - else: - path = os.getcwd() - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath -# Win9x family and earlier have no Unicode filename support. -supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and - sys.getwindowsversion()[3] >= 2) - -def _abspath_split(path): - abs = abspath(normpath(path)) - prefix, rest = splitunc(abs) - is_unc = bool(prefix) - if not is_unc: - prefix, rest = splitdrive(abs) - return is_unc, prefix, [x for x in rest.split(sep) if x] - -def relpath(path, start=curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_is_unc, start_prefix, start_list = _abspath_split(start) - path_is_unc, path_prefix, path_list = _abspath_split(path) - - if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - if path_prefix.lower() != start_prefix.lower(): - if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if e1.lower() != e2.lower(): - break - i += 1 - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -try: - # The genericpath.isdir implementation uses os.stat and checks the mode - # attribute to tell whether or not the path is a directory. - # This is overkill on Windows - just pass the path to GetFileAttributes - # and check the attribute from there. - from nt import _isdir as isdir -except ImportError: - # Use genericpath.isdir as imported above. - pass diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py deleted file mode 100644 index aa1a8c4aba7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '1.8.2' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py deleted file mode 100644 index 69cc6f4b4f5..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Yet another backport of WeakMethod for Python 2.7. -Changes include removing exception chaining and adding args to super() calls. - -Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. - -Full license available in LICENSE.python. -""" -from weakref import ref - - -class WeakMethod(ref): - """ - A custom `weakref.ref` subclass which simulates a weak reference to - a bound method, working around the lifetime problem of bound methods. - """ - - __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" - - def __new__(cls, meth, callback=None): - try: - obj = meth.__self__ - func = meth.__func__ - except AttributeError: - raise TypeError( - "argument should be a bound method, not {}".format(type(meth)) - ) - - def _cb(arg): - # The self-weakref trick is needed to avoid creating a reference - # cycle. - self = self_wr() - if self._alive: - self._alive = False - if callback is not None: - callback(self) - - self = ref.__new__(cls, obj, _cb) - self._func_ref = ref(func, _cb) - self._meth_type = type(meth) - self._alive = True - self_wr = ref(self) - return self - - def __call__(self): - obj = super(WeakMethod, self).__call__() - func = self._func_ref() - if obj is None or func is None: - return None - return self._meth_type(func, obj) - - def __eq__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is other - return ref.__eq__(self, other) and self._func_ref == other._func_ref - return NotImplemented - - def __ne__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is not other - return ref.__ne__(self, other) or self._func_ref != other._func_ref - return NotImplemented - - __hash__ = ref.__hash__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py deleted file mode 100644 index 1aab07ed77a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py deleted file mode 100644 index 6aa9cf0281d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py +++ /dev/null @@ -1,124 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import abc - -import ftrack_api.exception - - -class Accessor(object): - '''Provide data access to a location. - - A location represents a specific storage, but access to that storage may - vary. For example, both local filesystem and FTP access may be possible for - the same storage. An accessor implements these different ways of accessing - the same data location. - - As different accessors may access the same location, only part of a data - path that is commonly understood may be stored in the database. The format - of this path should be a contract between the accessors that require access - to the same location and is left as an implementation detail. As such, this - system provides no guarantee that two different accessors can provide access - to the same location, though this is a clear goal. The path stored centrally - is referred to as the **resource identifier** and should be used when - calling any of the accessor methods that accept a *resource_identifier* - argument. - - ''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise location accessor.''' - super(Accessor, self).__init__() - - @abc.abstractmethod - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - - @abc.abstractmethod - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - - @abc.abstractmethod - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - - @abc.abstractmethod - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - - @abc.abstractmethod - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - - @abc.abstractmethod - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' - - @abc.abstractmethod - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - - @abc.abstractmethod - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - Should silently ignore existing containers and not recreate them. - - ''' - - @abc.abstractmethod - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` - if container of *resource_identifier* could not be determined. - - ''' - - def remove_container(self, resource_identifier): # pragma: no cover - '''Remove container at *resource_identifier*.''' - return self.remove(resource_identifier) - - def get_filesystem_path(self, resource_identifier): # pragma: no cover - '''Return filesystem path for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - filesystem path could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving filesystem paths is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_filesystem_path', resource_identifier=resource_identifier - ) - - def get_url(self, resource_identifier): - '''Return URL for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_url', resource_identifier=resource_identifier - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py deleted file mode 100644 index 65769603f65..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py +++ /dev/null @@ -1,250 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -import sys -import errno -import contextlib - -import ftrack_api._python_ntpath as ntpath -import ftrack_api.accessor.base -import ftrack_api.data -from ftrack_api.exception import ( - AccessorFilesystemPathError, - AccessorUnsupportedOperationError, - AccessorResourceNotFoundError, - AccessorOperationFailedError, - AccessorPermissionDeniedError, - AccessorResourceInvalidError, - AccessorContainerNotEmptyError, - AccessorParentResourceNotFoundError -) - - -class DiskAccessor(ftrack_api.accessor.base.Accessor): - '''Provide disk access to a location. - - Expect resource identifiers to refer to relative filesystem paths. - - ''' - - def __init__(self, prefix, **kw): - '''Initialise location accessor. - - *prefix* specifies the base folder for the disk based structure and - will be prepended to any path. It should be specified in the syntax of - the current OS. - - ''' - if prefix: - prefix = os.path.expanduser(os.path.expandvars(prefix)) - prefix = os.path.abspath(prefix) - self.prefix = prefix - - super(DiskAccessor, self).__init__(**kw) - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='list', resource_identifier=resource_identifier - ): - listing = [] - for entry in os.listdir(filesystem_path): - listing.append(os.path.join(resource_identifier, entry)) - - return listing - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.exists(filesystem_path) - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isfile(filesystem_path) - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isdir(filesystem_path) - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise AccessorUnsupportedOperationError(operation='is_sequence') - - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='open', resource_identifier=resource_identifier - ): - data = ftrack_api.data.File(filesystem_path, mode) - - return data - - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - if self.is_file(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.remove(filesystem_path) - - elif self.is_container(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.rmdir(filesystem_path) - - else: - raise AccessorResourceNotFoundError( - resource_identifier=resource_identifier - ) - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='makeContainer', resource_identifier=resource_identifier - ): - try: - if recursive: - os.makedirs(filesystem_path) - else: - try: - os.mkdir(filesystem_path) - except OSError as error: - if error.errno == errno.ENOENT: - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier - ) - else: - raise - - except OSError, error: - if error.errno != errno.EEXIST: - raise - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if - container of *resource_identifier* could not be determined. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - container = os.path.dirname(filesystem_path) - - if self.prefix: - if not container.startswith(self.prefix): - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier, - message='Could not determine container for ' - '{resource_identifier} as container falls outside ' - 'of configured prefix.' - ) - - # Convert container filesystem path into resource identifier. - container = container[len(self.prefix):] - if ntpath.isabs(container): - # Ensure that resulting path is relative by stripping any - # leftover prefixed slashes from string. - # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the - # result will be 'foo/bar'. - container = container.lstrip('\\/') - - return container - - def get_filesystem_path(self, resource_identifier): - '''Return filesystem path for *resource_identifier*. - - For example:: - - >>> accessor = DiskAccessor('my.location', '/mountpoint') - >>> print accessor.get_filesystem_path('test.txt') - /mountpoint/test.txt - >>> print accessor.get_filesystem_path('/mountpoint/test.txt') - /mountpoint/test.txt - - Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem - path could not be determined from *resource_identifier*. - - ''' - filesystem_path = resource_identifier - if filesystem_path: - filesystem_path = os.path.normpath(filesystem_path) - - if self.prefix: - if not os.path.isabs(filesystem_path): - filesystem_path = os.path.normpath( - os.path.join(self.prefix, filesystem_path) - ) - - if not filesystem_path.startswith(self.prefix): - raise AccessorFilesystemPathError( - resource_identifier=resource_identifier, - message='Could not determine access path for ' - 'resource_identifier outside of configured prefix: ' - '{resource_identifier}.' - ) - - return filesystem_path - - -@contextlib.contextmanager -def error_handler(**kw): - '''Conform raised OSError/IOError exception to appropriate FTrack error.''' - try: - yield - - except (OSError, IOError) as error: - (exception_type, exception_value, traceback) = sys.exc_info() - kw.setdefault('error', error) - - error_code = getattr(error, 'errno') - if not error_code: - raise AccessorOperationFailedError(**kw), None, traceback - - if error_code == errno.ENOENT: - raise AccessorResourceNotFoundError(**kw), None, traceback - - elif error_code == errno.EPERM: - raise AccessorPermissionDeniedError(**kw), None, traceback - - elif error_code == errno.ENOTEMPTY: - raise AccessorContainerNotEmptyError(**kw), None, traceback - - elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): - raise AccessorResourceInvalidError(**kw), None, traceback - - else: - raise AccessorOperationFailedError(**kw), None, traceback - - except Exception: - raise diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py deleted file mode 100644 index 9c735084d5c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py +++ /dev/null @@ -1,240 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import hashlib -import base64 -import json - -import requests - -from .base import Accessor -from ..data import String -import ftrack_api.exception -import ftrack_api.symbol - - -class ServerFile(String): - '''Representation of a server file.''' - - def __init__(self, resource_identifier, session, mode='rb'): - '''Initialise file.''' - self.mode = mode - self.resource_identifier = resource_identifier - self._session = session - self._has_read = False - - super(ServerFile, self).__init__() - - def flush(self): - '''Flush all changes.''' - super(ServerFile, self).flush() - - if self.mode == 'wb': - self._write() - - def read(self, limit=None): - '''Read file.''' - if not self._has_read: - self._read() - self._has_read = True - - return super(ServerFile, self).read(limit) - - def _read(self): - '''Read all remote content from key into wrapped_file.''' - position = self.tell() - self.seek(0) - - response = requests.get( - '{0}/component/get'.format(self._session.server_url), - params={ - 'id': self.resource_identifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - }, - stream=True - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to read data: {0}.'.format(error) - ) - - for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): - self.wrapped_file.write(block) - - self.flush() - self.seek(position) - - def _write(self): - '''Write current data to remote key.''' - position = self.tell() - self.seek(0) - - # Retrieve component from cache to construct a filename. - component = self._session.get('FileComponent', self.resource_identifier) - if not component: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Unable to retrieve component with id: {0}.'.format( - self.resource_identifier - ) - ) - - # Construct a name from component name and file_type. - name = component['name'] - if component['file_type']: - name = u'{0}.{1}'.format( - name, - component['file_type'].lstrip('.') - ) - - try: - metadata = self._session.get_upload_metadata( - component_id=self.resource_identifier, - file_name=name, - file_size=self._get_size(), - checksum=self._compute_checksum() - ) - except Exception as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to get put metadata: {0}.'.format(error) - ) - - # Ensure at beginning of file before put. - self.seek(0) - - # Put the file based on the metadata. - response = requests.put( - metadata['url'], - data=self.wrapped_file, - headers=metadata['headers'] - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to put file to server: {0}.'.format(error) - ) - - self.seek(position) - - def _get_size(self): - '''Return size of file in bytes.''' - position = self.tell() - self.seek(0, os.SEEK_END) - length = self.tell() - self.seek(position) - return length - - def _compute_checksum(self): - '''Return checksum for file.''' - fp = self.wrapped_file - buf_size = ftrack_api.symbol.CHUNK_SIZE - hash_obj = hashlib.md5() - spos = fp.tell() - - s = fp.read(buf_size) - while s: - hash_obj.update(s) - s = fp.read(buf_size) - - base64_digest = base64.encodestring(hash_obj.digest()) - if base64_digest[-1] == '\n': - base64_digest = base64_digest[0:-1] - - fp.seek(spos) - return base64_digest - - -class _ServerAccessor(Accessor): - '''Provide server location access.''' - - def __init__(self, session, **kw): - '''Initialise location accessor.''' - super(_ServerAccessor, self).__init__(**kw) - - self._session = session - - def open(self, resource_identifier, mode='rb'): - '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' - return ServerFile(resource_identifier, session=self._session, mode=mode) - - def remove(self, resourceIdentifier): - '''Remove *resourceIdentifier*.''' - response = requests.get( - '{0}/component/remove'.format(self._session.server_url), - params={ - 'id': resourceIdentifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - } - ) - if response.status_code != 200: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to remove file.' - ) - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*.''' - return None - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*.''' - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container.''' - raise NotImplementedError() - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - return False - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - raise NotImplementedError() - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - raise NotImplementedError() - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise NotImplementedError() - - def get_url(self, resource_identifier): - '''Return url for *resource_identifier*.''' - url_string = ( - u'{url}/component/get?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - return url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - - def get_thumbnail_url(self, resource_identifier, size=None): - '''Return thumbnail url for *resource_identifier*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - ''' - url_string = ( - u'{url}/component/thumbnail?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - url = url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - if size: - url += u'&size={0}'.format(size) - - return url diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py deleted file mode 100644 index 719b612f394..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py +++ /dev/null @@ -1,707 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import collections -import copy -import logging -import functools - -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.collection -import ftrack_api.inspection -import ftrack_api.operation - -logger = logging.getLogger( - __name__ -) - - -def merge_references(function): - '''Decorator to handle merging of references / collections.''' - - @functools.wraps(function) - def get_value(attribute, entity): - '''Merge the attribute with the local cache.''' - - if attribute.name not in entity._inflated: - # Only merge on first access to avoid - # inflating them multiple times. - - logger.debug( - 'Merging potential new data into attached ' - 'entity for attribute {0}.'.format( - attribute.name - ) - ) - - # Local attributes. - local_value = attribute.get_local_value(entity) - if isinstance( - local_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging local value for attribute {0}.'.format(attribute) - ) - - merged_local_value = entity.session._merge( - local_value, merged=dict() - ) - - if merged_local_value is not local_value: - with entity.session.operation_recording(False): - attribute.set_local_value(entity, merged_local_value) - - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - merged_remote_value = entity.session._merge( - remote_value, merged=dict() - ) - - if merged_remote_value is not remote_value: - attribute.set_remote_value(entity, merged_remote_value) - - entity._inflated.add( - attribute.name - ) - - return function( - attribute, entity - ) - - return get_value - - -class Attributes(object): - '''Collection of properties accessible by name.''' - - def __init__(self, attributes=None): - super(Attributes, self).__init__() - self._data = dict() - if attributes is not None: - for attribute in attributes: - self.add(attribute) - - def add(self, attribute): - '''Add *attribute*.''' - existing = self._data.get(attribute.name, None) - if existing: - raise ftrack_api.exception.NotUniqueError( - 'Attribute with name {0} already added as {1}' - .format(attribute.name, existing) - ) - - self._data[attribute.name] = attribute - - def remove(self, attribute): - '''Remove attribute.''' - self._data.pop(attribute.name) - - def get(self, name): - '''Return attribute by *name*. - - If no attribute matches *name* then return None. - - ''' - return self._data.get(name, None) - - def keys(self): - '''Return list of attribute names.''' - return self._data.keys() - - def __contains__(self, item): - '''Return whether *item* present.''' - if not isinstance(item, Attribute): - return False - - return item.name in self._data - - def __iter__(self): - '''Return iterator over attributes.''' - return self._data.itervalues() - - def __len__(self): - '''Return count of attributes.''' - return len(self._data) - - -class Attribute(object): - '''A name and value pair persisted remotely.''' - - def __init__( - self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True, - computed=False - ): - '''Initialise attribute with *name*. - - *default_value* represents the default value for the attribute. It may - be a callable. It is not used within the attribute when providing - values, but instead exists for other parts of the system to reference. - - If *mutable* is set to False then the local value of the attribute on an - entity can only be set when both the existing local and remote values - are :attr:`ftrack_api.symbol.NOT_SET`. The exception to this is when the - target value is also :attr:`ftrack_api.symbol.NOT_SET`. - - If *computed* is set to True the value is a remote side computed value - and should not be long-term cached. - - ''' - super(Attribute, self).__init__() - self._name = name - self._mutable = mutable - self._computed = computed - self.default_value = default_value - - self._local_key = 'local' - self._remote_key = 'remote' - - def __repr__(self): - '''Return representation of entity.''' - return '<{0}.{1}({2}) object at {3}>'.format( - self.__module__, - self.__class__.__name__, - self.name, - id(self) - ) - - def get_entity_storage(self, entity): - '''Return attribute storage on *entity* creating if missing.''' - storage_key = '_ftrack_attribute_storage' - storage = getattr(entity, storage_key, None) - if storage is None: - storage = collections.defaultdict( - lambda: - { - self._local_key: ftrack_api.symbol.NOT_SET, - self._remote_key: ftrack_api.symbol.NOT_SET - } - ) - setattr(entity, storage_key, storage) - - return storage - - @property - def name(self): - '''Return name.''' - return self._name - - @property - def mutable(self): - '''Return whether attribute is mutable.''' - return self._mutable - - @property - def computed(self): - '''Return whether attribute is computed.''' - return self._computed - - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - ''' - value = self.get_local_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - value = self.get_remote_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - if not entity.session.auto_populate: - return value - - self.populate_remote_value(entity) - return self.get_remote_value(entity) - - def get_local_value(self, entity): - '''Return locally set value for *entity*.''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._local_key] - - def get_remote_value(self, entity): - '''Return remote value for *entity*. - - .. note:: - - Only return locally stored remote value, do not fetch from remote. - - ''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._remote_key] - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if ( - not self.mutable - and self.is_set(entity) - and value is not ftrack_api.symbol.NOT_SET - ): - raise ftrack_api.exception.ImmutableAttributeError(self) - - old_value = self.get_local_value(entity) - - storage = self.get_entity_storage(entity) - storage[self.name][self._local_key] = value - - # Record operation. - if entity.session.record_operations: - entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity), - self.name, - old_value, - value - ) - ) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - storage = self.get_entity_storage(entity) - storage[self.name][self._remote_key] = value - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*.''' - entity.session.populate([entity], self.name) - - def is_modified(self, entity): - '''Return whether local value set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - return ( - local_value is not ftrack_api.symbol.NOT_SET - and local_value != remote_value - ) - - def is_set(self, entity): - '''Return whether a value is set for *entity*.''' - return any([ - self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, - self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET - ]) - - -class ScalarAttribute(Attribute): - '''Represent a scalar value.''' - - def __init__(self, name, data_type, **kw): - '''Initialise property.''' - super(ScalarAttribute, self).__init__(name, **kw) - self.data_type = data_type - - -class ReferenceAttribute(Attribute): - '''Reference another entity.''' - - def __init__(self, name, entity_type, **kw): - '''Initialise property.''' - super(ReferenceAttribute, self).__init__(name, **kw) - self.entity_type = entity_type - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*. - - As attribute references another entity, use that entity's configured - default projections to auto populate useful attributes when loading. - - ''' - reference_entity_type = entity.session.types[self.entity_type] - default_projections = reference_entity_type.default_projections - - projections = [] - if default_projections: - for projection in default_projections: - projections.append('{0}.{1}'.format(self.name, projection)) - else: - projections.append(self.name) - - entity.session.populate([entity], ', '.join(projections)) - - def is_modified(self, entity): - '''Return whether a local value has been set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - - if local_value is ftrack_api.symbol.NOT_SET: - return False - - if remote_value is ftrack_api.symbol.NOT_SET: - return True - - if ( - ftrack_api.inspection.identity(local_value) - != ftrack_api.inspection.identity(remote_value) - ): - return True - - return False - - - @merge_references - def get_value(self, entity): - return super(ReferenceAttribute, self).get_value( - entity - ) - -class AbstractCollectionAttribute(Attribute): - '''Base class for collection attributes.''' - - #: Collection class used by attribute. - collection_class = None - - @merge_references - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - .. note:: - - As value is a collection that is mutable, will transfer a remote - value into the local value on access if no local value currently - set. - - ''' - super(AbstractCollectionAttribute, self).get_value(entity) - - # Conditionally, copy remote value into local value so that it can be - # mutated without side effects. - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - if ( - local_value is ftrack_api.symbol.NOT_SET - and isinstance(remote_value, self.collection_class) - ): - try: - with entity.session.operation_recording(False): - self.set_local_value(entity, copy.copy(remote_value)) - except ftrack_api.exception.ImmutableAttributeError: - pass - - value = self.get_local_value(entity) - - # If the local value is still not set then attempt to set it with a - # suitable placeholder collection so that the caller can interact with - # the collection using its normal interface. This is required for a - # newly created entity for example. It *could* be done as a simple - # default value, but that would incur cost for every collection even - # when they are not modified before commit. - if value is ftrack_api.symbol.NOT_SET: - try: - with entity.session.operation_recording(False): - self.set_local_value( - entity, - # None should be treated as empty collection. - None - ) - except ftrack_api.exception.ImmutableAttributeError: - pass - - return self.get_local_value(entity) - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = self.mutable - - super(AbstractCollectionAttribute, self).set_local_value(entity, value) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = False - - super(AbstractCollectionAttribute, self).set_remote_value(entity, value) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to appropriate collection instance for *entity*. - - .. note:: - - If *value* is None then return a suitable empty collection. - - ''' - raise NotImplementedError() - - -class CollectionAttribute(AbstractCollectionAttribute): - '''Represent a collection of other entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.Collection - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to a Collection instance on *entity*.''' - - if not isinstance(value, ftrack_api.collection.Collection): - - if value is None: - value = ftrack_api.collection.Collection(entity, self) - - elif isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute' - ) - - return value - - -class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped key, value collection of entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy - - def __init__( - self, name, creator, key_attribute, value_attribute, **kw - ): - '''Initialise attribute with *name*. - - *creator* should be a function that accepts a dictionary of data and - is used by the referenced collection to create new entities in the - collection. - - *key_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'key' of the dictionary. - - *value_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'value' of the dictionary. - - ''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - - super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.KeyValueMappedCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - ftrack_api.collection.Collection(entity, self), - self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - value, self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.KeyValueMappedCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a KeyValueMappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.KeyValueMappedCollectionProxy( - collection, self.creator, - self.key_attribute, self.value_attribute - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value - - -class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped custom attribute collection of entities.''' - - #: Collection class used by attribute. - collection_class = ( - ftrack_api.collection.CustomAttributeCollectionProxy - ) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.CustomAttributeCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.CustomAttributeCollectionProxy( - ftrack_api.collection.Collection(entity, self) - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - # Why are we creating a new if it is a list? This will cause - # any merge to create a new proxy and collection. - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.CustomAttributeCollectionProxy( - value - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.CustomAttributeCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a MappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.CustomAttributeCollectionProxy( - collection - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py deleted file mode 100644 index 49456dc2d79..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py +++ /dev/null @@ -1,579 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''Caching framework. - -Defines a standardised :class:`Cache` interface for storing data against -specific keys. Key generation is also standardised using a :class:`KeyMaker` -interface. - -Combining a Cache and KeyMaker allows for memoisation of function calls with -respect to the arguments used by using a :class:`Memoiser`. - -As a convenience a simple :func:`memoise` decorator is included for quick -memoisation of function using a global cache and standard key maker. - -''' - -import collections -import functools -import abc -import copy -import inspect -import re -import anydbm -import contextlib -try: - import cPickle as pickle -except ImportError: # pragma: no cover - import pickle - -import ftrack_api.inspection -import ftrack_api.symbol - - -class Cache(object): - '''Cache interface. - - Derive from this to define concrete cache implementations. A cache is - centered around the concept of key:value pairings where the key is unique - across the cache. - - ''' - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - @abc.abstractmethod - def set(self, key, value): - '''Set *value* for *key*.''' - - @abc.abstractmethod - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - raise NotImplementedError() # pragma: no cover - - def values(self): - '''Return values for current keys.''' - values = [] - for key in self.keys(): - try: - value = self.get(key) - except KeyError: - continue - else: - values.append(value) - - return values - - def clear(self, pattern=None): - '''Remove all keys matching *pattern*. - - *pattern* should be a regular expression string. - - If *pattern* is None then all keys will be removed. - - ''' - if pattern is not None: - pattern = re.compile(pattern) - - for key in self.keys(): - if pattern is not None: - if not pattern.search(key): - continue - - try: - self.remove(key) - except KeyError: - pass - - -class ProxyCache(Cache): - '''Proxy another cache.''' - - def __init__(self, proxied): - '''Initialise cache with *proxied* cache instance.''' - self.proxied = proxied - super(ProxyCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.get(key) - - def set(self, key, value): - '''Set *value* for *key*.''' - return self.proxied.set(key, value) - - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.remove(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self.proxied.keys() - - -class LayeredCache(Cache): - '''Layered cache.''' - - def __init__(self, caches): - '''Initialise cache with *caches*.''' - super(LayeredCache, self).__init__() - self.caches = caches - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - Attempt to retrieve from cache layers in turn, starting with shallowest. - If value retrieved, then also set the value in each higher level cache - up from where retrieved. - - ''' - target_caches = [] - value = ftrack_api.symbol.NOT_SET - - for cache in self.caches: - try: - value = cache.get(key) - except KeyError: - target_caches.append(cache) - continue - else: - break - - if value is ftrack_api.symbol.NOT_SET: - raise KeyError(key) - - # Set value on all higher level caches. - for cache in target_caches: - cache.set(key, value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - for cache in self.caches: - cache.set(key, value) - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found in any layer. - - ''' - removed = False - for cache in self.caches: - try: - cache.remove(key) - except KeyError: - pass - else: - removed = True - - if not removed: - raise KeyError(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - keys = [] - for cache in self.caches: - keys.extend(cache.keys()) - - return list(set(keys)) - - -class MemoryCache(Cache): - '''Memory based cache.''' - - def __init__(self): - '''Initialise cache.''' - self._cache = {} - super(MemoryCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self._cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - self._cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - del self._cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self._cache.keys() - - -class FileCache(Cache): - '''File based cache that uses :mod:`anydbm` module. - - .. note:: - - No locking of the underlying file is performed. - - ''' - - def __init__(self, path): - '''Initialise cache at *path*.''' - self.path = path - - # Initialise cache. - cache = anydbm.open(self.path, 'c') - cache.close() - - super(FileCache, self).__init__() - - @contextlib.contextmanager - def _database(self): - '''Yield opened database file.''' - cache = anydbm.open(self.path, 'w') - try: - yield cache - finally: - cache.close() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - return cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - with self._database() as cache: - cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - del cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - with self._database() as cache: - return cache.keys() - - -class SerialisedCache(ProxyCache): - '''Proxied cache that stores values as serialised data.''' - - def __init__(self, proxied, encode=None, decode=None): - '''Initialise cache with *encode* and *decode* callables. - - *proxied* is the underlying cache to use for storage. - - ''' - self.encode = encode - self.decode = decode - super(SerialisedCache, self).__init__(proxied) - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - value = super(SerialisedCache, self).get(key) - if self.decode: - value = self.decode(value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - if self.encode: - value = self.encode(value) - - super(SerialisedCache, self).set(key, value) - - -class KeyMaker(object): - '''Generate unique keys.''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise key maker.''' - super(KeyMaker, self).__init__() - self.item_separator = '' - - def key(self, *items): - '''Return key for *items*.''' - keys = [] - for item in items: - keys.append(self._key(item)) - - return self.item_separator.join(keys) - - @abc.abstractmethod - def _key(self, obj): - '''Return key for *obj*.''' - - -class StringKeyMaker(KeyMaker): - '''Generate string key.''' - - def _key(self, obj): - '''Return key for *obj*.''' - return str(obj) - - -class ObjectKeyMaker(KeyMaker): - '''Generate unique keys for objects.''' - - def __init__(self): - '''Initialise key maker.''' - super(ObjectKeyMaker, self).__init__() - self.item_separator = '\0' - self.mapping_identifier = '\1' - self.mapping_pair_separator = '\2' - self.iterable_identifier = '\3' - self.name_identifier = '\4' - - def _key(self, item): - '''Return key for *item*. - - Returned key will be a pickle like string representing the *item*. This - allows for typically non-hashable objects to be used in key generation - (such as dictionaries). - - If *item* is iterable then each item in it shall also be passed to this - method to ensure correct key generation. - - Special markers are used to distinguish handling of specific cases in - order to ensure uniqueness of key corresponds directly to *item*. - - Example:: - - >>> key_maker = ObjectKeyMaker() - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... return x + y - ... - >>> key_maker.key(add, (1, 2)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' - >>> key_maker.key(add, (1, 3)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' - - ''' - # TODO: Consider using a more robust and comprehensive solution such as - # dill (https://github.com/uqfoundation/dill). - if isinstance(item, collections.Iterable): - if isinstance(item, basestring): - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - if isinstance(item, collections.Mapping): - contents = self.item_separator.join([ - ( - self._key(key) + - self.mapping_pair_separator + - self._key(value) - ) - for key, value in sorted(item.items()) - ]) - return ( - self.mapping_identifier + - contents + - self.mapping_identifier - ) - - else: - contents = self.item_separator.join([ - self._key(item) for item in item - ]) - return ( - self.iterable_identifier + - contents + - self.iterable_identifier - ) - - elif inspect.ismethod(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.im_class.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isfunction(item) or inspect.isclass(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isbuiltin(item): - return self.name_identifier + item.__name__ - - else: - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - -class Memoiser(object): - '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. - - Example:: - - >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... print 'Called' - ... return x + y - ... - >>> memoiser.call(add, (1, 2), {}) - Called - >>> memoiser.call(add, (1, 2), {}) - >>> memoiser.call(add, (1, 3), {}) - Called - - ''' - - def __init__(self, cache=None, key_maker=None, return_copies=True): - '''Initialise with *cache* and *key_maker* to use. - - If *cache* is not specified a default :class:`MemoryCache` will be - used. Similarly, if *key_maker* is not specified a default - :class:`ObjectKeyMaker` will be used. - - If *return_copies* is True then all results returned from the cache will - be deep copies to avoid indirect mutation of cached values. - - ''' - self.cache = cache - if self.cache is None: - self.cache = MemoryCache() - - self.key_maker = key_maker - if self.key_maker is None: - self.key_maker = ObjectKeyMaker() - - self.return_copies = return_copies - super(Memoiser, self).__init__() - - def call(self, function, args=None, kw=None): - '''Call *function* with *args* and *kw* and return result. - - If *function* was previously called with exactly the same arguments - then return cached result if available. - - Store result for call in cache. - - ''' - if args is None: - args = () - - if kw is None: - kw = {} - - # Support arguments being passed as positionals or keywords. - arguments = inspect.getcallargs(function, *args, **kw) - - key = self.key_maker.key(function, arguments) - try: - value = self.cache.get(key) - - except KeyError: - value = function(*args, **kw) - self.cache.set(key, value) - - # If requested, deep copy value to return in order to avoid cached value - # being inadvertently altered by the caller. - if self.return_copies: - value = copy.deepcopy(value) - - return value - - -def memoise_decorator(memoiser): - '''Decorator to memoise function calls using *memoiser*.''' - def outer(function): - - @functools.wraps(function) - def inner(*args, **kw): - return memoiser.call(function, args, kw) - - return inner - - return outer - - -#: Default memoiser. -memoiser = Memoiser() - -#: Default memoise decorator using standard cache and key maker. -memoise = memoise_decorator(memoiser) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py deleted file mode 100644 index 91655a7b022..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py +++ /dev/null @@ -1,507 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging - -import collections -import copy - -import ftrack_api.exception -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.operation -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Collection(collections.MutableSequence): - '''A collection of entities.''' - - def __init__(self, entity, attribute, mutable=True, data=None): - '''Initialise collection.''' - self.entity = entity - self.attribute = attribute - self._data = [] - self._identities = set() - - # Set initial dataset. - # Note: For initialisation, immutability is deferred till after initial - # population as otherwise there would be no public way to initialise an - # immutable collection. The reason self._data is not just set directly - # is to ensure other logic can be applied without special handling. - self.mutable = True - try: - if data is None: - data = [] - - with self.entity.session.operation_recording(False): - self.extend(data) - finally: - self.mutable = mutable - - def _identity_key(self, entity): - '''Return identity key for *entity*.''' - return str(ftrack_api.inspection.identity(entity)) - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying data store. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance._data = copy.copy(self._data) - copied_instance._identities = copy.copy(self._identities) - - return copied_instance - - def _notify(self, old_value): - '''Notify about modification.''' - # Record operation. - if self.entity.session.record_operations: - self.entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - self.entity.entity_type, - ftrack_api.inspection.primary_key(self.entity), - self.attribute.name, - old_value, - self - ) - ) - - def insert(self, index, item): - '''Insert *item* at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - if item in self: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - self._data.insert(index, item) - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __contains__(self, value): - '''Return whether *value* present in collection.''' - return self._identity_key(value) in self._identities - - def __getitem__(self, index): - '''Return item at *index*.''' - return self._data[index] - - def __setitem__(self, index, item): - '''Set *item* against *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - try: - existing_index = self.index(item) - except ValueError: - pass - else: - if index != existing_index: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - try: - existing_item = self._data[index] - except IndexError: - pass - else: - self._identities.remove(self._identity_key(existing_item)) - - self._data[index] = item - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __delitem__(self, index): - '''Remove item at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - old_value = copy.copy(self) - item = self._data[index] - del self._data[index] - self._identities.remove(self._identity_key(item)) - self._notify(old_value) - - def __len__(self): - '''Return count of items.''' - return len(self._data) - - def __eq__(self, other): - '''Return whether this collection is equal to *other*.''' - if not isinstance(other, Collection): - return False - - return sorted(self._identities) == sorted(other._identities) - - def __ne__(self, other): - '''Return whether this collection is not equal to *other*.''' - return not self == other - - -class MappedCollectionProxy(collections.MutableMapping): - '''Common base class for mapped collection of entities.''' - - def __init__(self, collection): - '''Initialise proxy for *collection*.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.collection = collection - super(MappedCollectionProxy, self).__init__() - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying collection. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance.collection = copy.copy(self.collection) - - return copied_instance - - @property - def mutable(self): - '''Return whether collection is mutable.''' - return self.collection.mutable - - @mutable.setter - def mutable(self, value): - '''Set whether collection is mutable to *value*.''' - self.collection.mutable = value - - @property - def attribute(self): - '''Return attribute bound to.''' - return self.collection.attribute - - @attribute.setter - def attribute(self, value): - '''Set bound attribute to *value*.''' - self.collection.attribute = value - - -class KeyValueMappedCollectionProxy(MappedCollectionProxy): - '''A mapped collection of key, value entities. - - Proxy a standard :class:`Collection` as a mapping where certain attributes - from the entities in the collection are mapped to key, value pairs. - - For example:: - - >>> collection = [Metadata(key='foo', value='bar'), ...] - >>> mapped = KeyValueMappedCollectionProxy( - ... collection, create_metadata, - ... key_attribute='key', value_attribute='value' - ... ) - >>> print mapped['foo'] - 'bar' - >>> mapped['bam'] = 'biz' - >>> print mapped.collection[-1] - Metadata(key='bam', value='biz') - - ''' - - def __init__( - self, collection, creator, key_attribute, value_attribute - ): - '''Initialise collection.''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - super(KeyValueMappedCollectionProxy, self).__init__(collection) - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - for entity in self.collection: - if entity[self.key_attribute] == key: - return entity - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - return entity[self.value_attribute] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - try: - entity = self._get_entity_by_key(key) - except KeyError: - data = { - self.key_attribute: key, - self.value_attribute: value - } - entity = self.creator(self, data) - - if ( - ftrack_api.inspection.state(entity) is - ftrack_api.symbol.CREATED - ): - # Persisting this entity will be handled here, record the - # operation. - self.collection.append(entity) - - else: - # The entity is created and persisted separately by the - # creator. Do not record this operation. - with self.collection.entity.session.operation_recording(False): - # Do not record this operation since it will trigger - # redudant and potentially failing operations. - self.collection.append(entity) - - else: - entity[self.value_attribute] = value - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - for index, entity in enumerate(self.collection): - if entity[self.key_attribute] == key: - break - else: - raise KeyError(key) - - del self.collection[index] - entity.session.delete(entity) - - def __iter__(self): - '''Iterate over all keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return len(keys) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for session.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - session = obj.get('session') - if session is not None: - # Key by session only. - return str(id(session)) - - return str(obj) - - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type from ' - 'CustomAttributeConfiguration' - ).all() - - -class CustomAttributeCollectionProxy(MappedCollectionProxy): - '''A mapped collection of custom attribute value entities.''' - - def __init__( - self, collection - ): - '''Initialise collection.''' - self.key_attribute = 'configuration_id' - self.value_attribute = 'value' - super(CustomAttributeCollectionProxy, self).__init__(collection) - - def _get_entity_configurations(self): - '''Return all configurations for current collection entity.''' - entity = self.collection.entity - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Asset': - project_id = entity['parent']['project_id'] - entity_type = 'asset' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - def _get_keys(self): - '''Return a list of all keys.''' - keys = [] - for configuration in self._get_entity_configurations(): - keys.append(configuration['key']) - - return keys - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - configuration_id = self.get_configuration_id_from_key(key) - for entity in self.collection: - if entity[self.key_attribute] == configuration_id: - return entity - - return None - - def get_configuration_id_from_key(self, key): - '''Return id of configuration with matching *key*. - - Raise :exc:`KeyError` if no configuration with matching *key* found. - - ''' - for configuration in self._get_entity_configurations(): - if key == configuration['key']: - return configuration['id'] - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - - if entity: - return entity[self.value_attribute] - - for configuration in self._get_entity_configurations(): - if configuration['key'] == key: - return configuration['default'] - - raise KeyError(key) - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - custom_attribute_value[self.value_attribute] = value - else: - entity = self.collection.entity - session = entity.session - data = { - self.key_attribute: self.get_configuration_id_from_key(key), - self.value_attribute: value, - 'entity_id': entity['id'] - } - - # Make sure to use the currently active collection. This is - # necessary since a merge might have replaced the current one. - self.collection.entity['custom_attributes'].collection.append( - session.create('CustomAttributeValue', data) - ) - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - index = self.collection.index(custom_attribute_value) - del self.collection[index] - - custom_attribute_value.session.delete(custom_attribute_value) - else: - self.logger.warning(L( - 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', - key, self.collection.entity - )) - - def __eq__(self, collection): - '''Return True if *collection* equals proxy collection.''' - if collection is ftrack_api.symbol.NOT_SET: - return False - - return collection.collection == self.collection - - def __iter__(self): - '''Iterate over all keys.''' - keys = self._get_keys() - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = self._get_keys() - return len(keys) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py deleted file mode 100644 index 1802e380c05..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py +++ /dev/null @@ -1,119 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -from abc import ABCMeta, abstractmethod -import tempfile - - -class Data(object): - '''File-like object for manipulating data.''' - - __metaclass__ = ABCMeta - - def __init__(self): - '''Initialise data access.''' - self.closed = False - - @abstractmethod - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - @abstractmethod - def write(self, content): - '''Write content at current position.''' - - def flush(self): - '''Flush buffers ensuring data written.''' - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*. - - The *whence* argument is optional and defaults to os.SEEK_SET or 0 - (absolute file positioning); other values are os.SEEK_CUR or 1 - (seek relative to the current position) and os.SEEK_END or 2 - (seek relative to the file's end). - - ''' - raise NotImplementedError('Seek not supported.') - - def tell(self): - '''Return current position of internal pointer.''' - raise NotImplementedError('Tell not supported.') - - def close(self): - '''Flush buffers and prevent further access.''' - self.flush() - self.closed = True - - -class FileWrapper(Data): - '''Data wrapper for Python file objects.''' - - def __init__(self, wrapped_file): - '''Initialise access to *wrapped_file*.''' - self.wrapped_file = wrapped_file - self._read_since_last_write = False - super(FileWrapper, self).__init__() - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - self._read_since_last_write = True - - if limit is None: - limit = -1 - - return self.wrapped_file.read(limit) - - def write(self, content): - '''Write content at current position.''' - if self._read_since_last_write: - # Windows requires a seek before switching from read to write. - self.seek(self.tell()) - - self.wrapped_file.write(content) - self._read_since_last_write = False - - def flush(self): - '''Flush buffers ensuring data written.''' - super(FileWrapper, self).flush() - if hasattr(self.wrapped_file, 'flush'): - self.wrapped_file.flush() - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*.''' - self.wrapped_file.seek(offset, whence) - - def tell(self): - '''Return current position of internal pointer.''' - return self.wrapped_file.tell() - - def close(self): - '''Flush buffers and prevent further access.''' - if not self.closed: - super(FileWrapper, self).close() - if hasattr(self.wrapped_file, 'close'): - self.wrapped_file.close() - - -class File(FileWrapper): - '''Data wrapper accepting filepath.''' - - def __init__(self, path, mode='rb'): - '''Open file at *path* with *mode*.''' - file_object = open(path, mode) - super(File, self).__init__(file_object) - - -class String(FileWrapper): - '''Data wrapper using TemporaryFile instance.''' - - def __init__(self, content=None): - '''Initialise data with *content*.''' - super(String, self).__init__( - tempfile.TemporaryFile() - ) - - if content is not None: - self.wrapped_file.write(content) - self.wrapped_file.seek(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py deleted file mode 100644 index 1d452f2828f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py deleted file mode 100644 index 859d94e4360..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class AssetVersion(ftrack_api.entity.base.Entity): - '''Represent asset version.''' - - def create_component( - self, path, data=None, location=None - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). This version is - automatically set as the component's version. - - If *location* is specified then automatically add component to that - location. - - ''' - if data is None: - data = {} - - data.pop('version_id', None) - data['version'] = self - - return self.session.create_component(path, data=data, location=location) - - def encode_media(self, media, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible, and will be - set as the version's thumbnail. - - The new components will automatically be associated with the version. - A server version of 3.3.32 or higher is required for this to function - properly. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - return self.session.encode_media( - media, version_id=self['id'], keep_original=keep_original - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py deleted file mode 100644 index f5a1a3cec35..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py +++ /dev/null @@ -1,402 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import abc -import collections -import logging - -import ftrack_api.symbol -import ftrack_api.attribute -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation -from ftrack_api.logging import LazyLogMessage as L - - -class DynamicEntityTypeMetaclass(abc.ABCMeta): - '''Custom metaclass to customise representation of dynamic classes. - - .. note:: - - Derive from same metaclass as derived bases to avoid conflicts. - - ''' - def __repr__(self): - '''Return representation of class.''' - return ''.format(self.__name__) - - -class Entity(collections.MutableMapping): - '''Base class for all entities.''' - - __metaclass__ = DynamicEntityTypeMetaclass - - entity_type = 'Entity' - attributes = None - primary_key_attributes = None - default_projections = None - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - super(Entity, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.session = session - self._inflated = set() - - if data is None: - data = {} - - self.logger.debug(L( - '{0} entity from {1!r}.', - ('Reconstructing' if reconstructing else 'Constructing'), data - )) - - self._ignore_data_keys = ['__entity_type__'] - if not reconstructing: - self._construct(data) - else: - self._reconstruct(data) - - def _construct(self, data): - '''Construct from *data*.''' - # Suspend operation recording so that all modifications can be applied - # in single create operation. In addition, recording a modification - # operation requires a primary key which may not be available yet. - - relational_attributes = dict() - - with self.session.operation_recording(False): - # Set defaults for any unset local attributes. - for attribute in self.__class__.attributes: - if attribute.name not in data: - default_value = attribute.default_value - if callable(default_value): - default_value = default_value(self) - - attribute.set_local_value(self, default_value) - - - # Data represents locally set values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such ' - 'attribute found on entity {1!r}.', key, self - )) - continue - - if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - relational_attributes.setdefault( - attribute, value - ) - - else: - attribute.set_local_value(self, value) - - # Record create operation. - # Note: As this operation is recorded *before* any Session.merge takes - # place there is the possibility that the operation will hold references - # to outdated data in entity_data. However, this would be unusual in - # that it would mean the same new entity was created twice and only one - # altered. Conversely, if this operation were recorded *after* - # Session.merge took place, any cache would not be able to determine - # the status of the entity, which could be important if the cache should - # not store newly created entities that have not yet been persisted. Out - # of these two 'evils' this approach is deemed the lesser at this time. - # A third, more involved, approach to satisfy both might be to record - # the operation with a PENDING entity_data value and then update with - # merged values post merge. - if self.session.record_operations: - entity_data = {} - - # Lower level API used here to avoid including any empty - # collections that are automatically generated on access. - for attribute in self.attributes: - value = attribute.get_local_value(self) - if value is not ftrack_api.symbol.NOT_SET: - entity_data[attribute.name] = value - - self.session.recorded_operations.push( - ftrack_api.operation.CreateEntityOperation( - self.entity_type, - ftrack_api.inspection.primary_key(self), - entity_data - ) - ) - - for attribute, value in relational_attributes.items(): - # Finally we set values for "relational" attributes, we need - # to do this at the end in order to get the create operations - # in the correct order as the newly created attributes might - # contain references to the newly created entity. - - attribute.set_local_value( - self, value - ) - - def _reconstruct(self, data): - '''Reconstruct from *data*.''' - # Data represents remote values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such attribute ' - 'found on entity {1!r}.', key, self - )) - continue - - attribute.set_remote_value(self, value) - - def __repr__(self): - '''Return representation of instance.''' - return ''.format( - self.__class__.__name__, id(self) - ) - - def __str__(self): - '''Return string representation of instance.''' - with self.session.auto_populating(False): - primary_key = ['Unknown'] - try: - primary_key = ftrack_api.inspection.primary_key(self).values() - except KeyError: - pass - - return '<{0}({1})>'.format( - self.__class__.__name__, ', '.join(primary_key) - ) - - def __hash__(self): - '''Return hash representing instance.''' - return hash(str(ftrack_api.inspection.identity(self))) - - def __eq__(self, other): - '''Return whether *other* is equal to this instance. - - .. note:: - - Equality is determined by both instances having the same identity. - Values of attributes are not considered. - - ''' - try: - return ( - ftrack_api.inspection.identity(other) - == ftrack_api.inspection.identity(self) - ) - except (AttributeError, KeyError): - return False - - def __getitem__(self, key): - '''Return attribute value for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - return attribute.get_value(self) - - def __setitem__(self, key, value): - '''Set attribute *value* for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - attribute.set_local_value(self, value) - - def __delitem__(self, key): - '''Clear attribute value for *key*. - - .. note:: - - Will not remove the attribute, but instead clear any local value - and revert to the last known server value. - - ''' - attribute = self.__class__.attributes.get(key) - attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) - - def __iter__(self): - '''Iterate over all attributes keys.''' - for attribute in self.__class__.attributes: - yield attribute.name - - def __len__(self): - '''Return count of attributes.''' - return len(self.__class__.attributes) - - def values(self): - '''Return list of values.''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).values() - - def items(self): - '''Return list of tuples of (key, value) pairs. - - .. note:: - - Will fetch all values from the server if not already fetched or set - locally. - - ''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).items() - - def clear(self): - '''Reset all locally modified attribute values.''' - for attribute in self: - del self[attribute] - - def merge(self, entity, merged=None): - '''Merge *entity* attribute values and other data into this entity. - - Only merge values from *entity* that are not - :attr:`ftrack_api.symbol.NOT_SET`. - - Return a list of changes made with each change being a mapping with - the keys: - - * type - Either 'remote_attribute', 'local_attribute' or 'property'. - * name - The name of the attribute / property modified. - * old_value - The previous value. - * new_value - The new merged value. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' - changes = [] - - # Attributes. - - # Prioritise by type so that scalar values are set first. This should - # guarantee that the attributes making up the identity of the entity - # are merged before merging any collections that may have references to - # this entity. - attributes = collections.deque() - for attribute in entity.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - attributes.appendleft(attribute) - else: - attributes.append(attribute) - - for other_attribute in attributes: - attribute = self.attributes.get(other_attribute.name) - - # Local attributes. - other_local_value = other_attribute.get_local_value(entity) - if other_local_value is not ftrack_api.symbol.NOT_SET: - local_value = attribute.get_local_value(self) - if local_value != other_local_value: - merged_local_value = self.session.merge( - other_local_value, merged=merged - ) - - attribute.set_local_value(self, merged_local_value) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_local_value - }) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # Remote attributes. - other_remote_value = other_attribute.get_remote_value(entity) - if other_remote_value is not ftrack_api.symbol.NOT_SET: - remote_value = attribute.get_remote_value(self) - if remote_value != other_remote_value: - merged_remote_value = self.session.merge( - other_remote_value, merged=merged - ) - - attribute.set_remote_value( - self, merged_remote_value - ) - - changes.append({ - 'type': 'remote_attribute', - 'name': attribute.name, - 'old_value': remote_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # We need to handle collections separately since - # they may store a local copy of the remote attribute - # even though it may not be modified. - if not isinstance( - attribute, ftrack_api.attribute.AbstractCollectionAttribute - ): - continue - - local_value = attribute.get_local_value( - self - ) - - # Populated but not modified, update it. - if ( - local_value is not ftrack_api.symbol.NOT_SET and - local_value == remote_value - ): - attribute.set_local_value( - self, merged_remote_value - ) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - return changes - - def _populate_unset_scalar_attributes(self): - '''Populate all unset scalar attributes in one query.''' - projections = [] - for attribute in self.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - if attribute.get_remote_value(self) is ftrack_api.symbol.NOT_SET: - projections.append(attribute.name) - - if projections: - self.session.populate([self], ', '.join(projections)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py deleted file mode 100644 index 9d59c4c051d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Component(ftrack_api.entity.base.Entity): - '''Represent a component.''' - - def get_availability(self, locations=None): - '''Return availability in *locations*. - - If *locations* is None, all known locations will be checked. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.session.get_component_availability( - self, locations=locations - ) - - -class CreateThumbnailMixin(object): - '''Mixin to add create_thumbnail method on entity class.''' - - def create_thumbnail(self, path, data=None): - '''Set entity thumbnail from *path*. - - Creates a thumbnail component using in the ftrack.server location - :meth:`Session.create_component - ` The thumbnail component - will be created using *data* if specified. If no component name is - given, `thumbnail` will be used. - - The file is expected to be of an appropriate size and valid file - type. - - .. note:: - - A :meth:`Session.commit` will be - automatically issued. - - ''' - if data is None: - data = {} - if not data.get('name'): - data['name'] = 'thumbnail' - - thumbnail_component = self.session.create_component( - path, data, location=None - ) - - origin_location = self.session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - server_location = self.session.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - server_location.add_component(thumbnail_component, [origin_location]) - - # TODO: This commit can be avoided by reordering the operations in - # this method so that the component is transferred to ftrack.server - # after the thumbnail has been set. - # - # There is currently a bug in the API backend, causing the operations - # to *some* times be ordered wrongly, where the update occurs before - # the component has been created, causing an integrity error. - # - # Once this issue has been resolved, this commit can be removed and - # and the update placed between component creation and registration. - self['thumbnail_id'] = thumbnail_component['id'] - self.session.commit() - - return thumbnail_component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py deleted file mode 100644 index e925b70f5a6..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py +++ /dev/null @@ -1,435 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import uuid -import functools - -import ftrack_api.attribute -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.entity.component -import ftrack_api.entity.asset_version -import ftrack_api.entity.project_schema -import ftrack_api.entity.note -import ftrack_api.entity.job -import ftrack_api.entity.user -import ftrack_api.symbol -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Factory(object): - '''Entity class factory.''' - - def __init__(self): - '''Initialise factory.''' - super(Factory, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*. - - *bases* should be a list of bases to give the constructed class. If not - specified, default to :class:`ftrack_api.entity.base.Entity`. - - ''' - entity_type = schema['id'] - class_name = entity_type - - class_bases = bases - if class_bases is None: - class_bases = [ftrack_api.entity.base.Entity] - - class_namespace = dict() - - # Build attributes for class. - attributes = ftrack_api.attribute.Attributes() - immutable_properties = schema.get('immutable', []) - computed_properties = schema.get('computed', []) - for name, fragment in schema.get('properties', {}).items(): - mutable = name not in immutable_properties - computed = name in computed_properties - - default = fragment.get('default', ftrack_api.symbol.NOT_SET) - if default == '{uid}': - default = lambda instance: str(uuid.uuid4()) - - data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) - - if data_type is not ftrack_api.symbol.NOT_SET: - - if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable', - 'object' - ): - # Basic scalar attribute. - if data_type == 'number': - data_type = 'float' - - if data_type == 'string': - data_format = fragment.get('format') - if data_format == 'date-time': - data_type = 'datetime' - - attribute = self.create_scalar_attribute( - class_name, name, mutable, computed, default, data_type - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'array': - attribute = self.create_collection_attribute( - class_name, name, mutable - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'mapped_array': - reference = fragment.get('items', {}).get('$ref') - if not reference: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_mapped_collection_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - else: - self.logger.debug(L( - 'Skipping {0}.{1} attribute with unrecognised data ' - 'type {2}', class_name, name, data_type - )) - else: - # Reference attribute. - reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) - if reference is ftrack_api.symbol.NOT_SET: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_reference_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - default_projections = schema.get('default_projections', []) - - # Construct class. - class_namespace['entity_type'] = entity_type - class_namespace['attributes'] = attributes - class_namespace['primary_key_attributes'] = schema['primary_key'][:] - class_namespace['default_projections'] = default_projections - - cls = type( - str(class_name), # type doesn't accept unicode. - tuple(class_bases), - class_namespace - ) - - return cls - - def create_scalar_attribute( - self, class_name, name, mutable, computed, default, data_type - ): - '''Return appropriate scalar attribute instance.''' - return ftrack_api.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable, - computed=computed - ) - - def create_reference_attribute(self, class_name, name, mutable, reference): - '''Return appropriate reference attribute instance.''' - return ftrack_api.attribute.ReferenceAttribute( - name, reference, mutable=mutable - ) - - def create_collection_attribute(self, class_name, name, mutable): - '''Return appropriate collection attribute instance.''' - return ftrack_api.attribute.CollectionAttribute( - name, mutable=mutable - ) - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has ' - 'no implementation defined for reference {2}.', - class_name, name, reference - )) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for defaults.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - entity = obj.get('entity') - if entity is not None: - # Key by session only. - return str(id(entity.session)) - - return str(obj) - - -#: Memoiser for use with default callables that should only be called once per -# session. -memoise_defaults = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type, ' - 'is_hierarchical from CustomAttributeConfiguration' - ).all() - - -def _get_entity_configurations(entity): - '''Return all configurations for current collection entity.''' - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity.entity_type == 'Asset': - entity_type = 'asset' - - if entity.entity_type in ('TypedContextList', 'AssetVersionList'): - entity_type = 'list' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - # The custom attribute configuration is for the target entity type. - configurations.append(configuration) - elif ( - entity_type in ('asset', 'assetversion', 'show', 'task') and - configuration['project_id'] in (project_id, None) and - configuration['is_hierarchical'] - ): - # The target entity type allows hierarchical attributes. - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - -class StandardFactory(Factory): - '''Standard entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - if not bases: - bases = [] - - extra_bases = [] - # Customise classes. - if schema['id'] == 'ProjectSchema': - extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] - - elif schema['id'] == 'Location': - extra_bases = [ftrack_api.entity.location.Location] - - elif schema['id'] == 'AssetVersion': - extra_bases = [ftrack_api.entity.asset_version.AssetVersion] - - elif schema['id'].endswith('Component'): - extra_bases = [ftrack_api.entity.component.Component] - - elif schema['id'] == 'Note': - extra_bases = [ftrack_api.entity.note.Note] - - elif schema['id'] == 'Job': - extra_bases = [ftrack_api.entity.job.Job] - - elif schema['id'] == 'User': - extra_bases = [ftrack_api.entity.user.User] - - bases = extra_bases + bases - - # If bases does not contain any items, add the base entity class. - if not bases: - bases = [ftrack_api.entity.base.Entity] - - # Add mixins. - if 'notes' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.note.CreateNoteMixin - ) - - if 'thumbnail_id' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.component.CreateThumbnailMixin - ) - - cls = super(StandardFactory, self).create(schema, bases=bases) - - return cls - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - if reference == 'Metadata': - - def create_metadata(proxy, data, reference): - '''Return metadata for *data*.''' - entity = proxy.collection.entity - session = entity.session - data.update({ - 'parent_id': entity['id'], - 'parent_type': entity.entity_type - }) - return session.create(reference, data) - - creator = functools.partial( - create_metadata, reference=reference - ) - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - elif reference == 'CustomAttributeValue': - return ( - ftrack_api.attribute.CustomAttributeCollectionAttribute( - name, mutable=mutable - ) - ) - - elif reference.endswith('CustomAttributeValue'): - def creator(proxy, data): - '''Create a custom attribute based on *proxy* and *data*. - - Raise :py:exc:`KeyError` if related entity is already presisted - to the server. The proxy represents dense custom attribute - values and should never create new custom attribute values - through the proxy if entity exists on the remote. - - If the entity is not persisted the ususal - CustomAttributeValue items cannot be updated as - the related entity does not exist on remote and values not in - the proxy. Instead a CustomAttributeValue will - be reconstructed and an update operation will be recorded. - - ''' - entity = proxy.collection.entity - if ( - ftrack_api.inspection.state(entity) is not - ftrack_api.symbol.CREATED - ): - raise KeyError( - 'Custom attributes must be created explicitly for the ' - 'given entity type before being set.' - ) - - configuration = None - for candidate in _get_entity_configurations(entity): - if candidate['key'] == data['key']: - configuration = candidate - break - - if configuration is None: - raise ValueError( - u'No valid custom attribute for data {0!r} was found.' - .format(data) - ) - - create_data = dict(data.items()) - create_data['configuration_id'] = configuration['id'] - create_data['entity_id'] = entity['id'] - - session = entity.session - - # Create custom attribute by reconstructing it and update the - # value. This will prevent a create operation to be sent to the - # remote, as create operations for this entity type is not - # allowed. Instead an update operation will be recorded. - value = create_data.pop('value') - item = session.create( - reference, - create_data, - reconstructing=True - ) - - # Record update operation. - item['value'] = value - - return item - - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has no configuration ' - 'for reference {2}.', class_name, name, reference - )) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py deleted file mode 100644 index ae37922c515..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Job(ftrack_api.entity.base.Entity): - '''Represent job.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - To set a job `description` visible in the web interface, *data* can - contain a key called `data` which should be a JSON serialised - dictionary containing description:: - - data = { - 'status': 'running', - 'data': json.dumps(dict(description='My job description.')), - ... - } - - Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` - is set to something not equal to "api_job". - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - - if not reconstructing: - if data.get('type') not in ('api_job', None): - raise ValueError( - 'Invalid job type "{0}". Must be "api_job"'.format( - data.get('type') - ) - ) - - super(Job, self).__init__( - session, data=data, reconstructing=reconstructing - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py deleted file mode 100644 index 707f4fa6526..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py +++ /dev/null @@ -1,733 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections -import functools - -import ftrack_api.entity.base -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.symbol -import ftrack_api.inspection -from ftrack_api.logging import LazyLogMessage as L - - -class Location(ftrack_api.entity.base.Entity): - '''Represent storage for components.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - self.accessor = ftrack_api.symbol.NOT_SET - self.structure = ftrack_api.symbol.NOT_SET - self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET - self.priority = 95 - super(Location, self).__init__( - session, data=data, reconstructing=reconstructing - ) - - def __str__(self): - '''Return string representation of instance.''' - representation = super(Location, self).__str__() - - with self.session.auto_populating(False): - name = self['name'] - if name is not ftrack_api.symbol.NOT_SET: - representation = representation.replace( - '(', '("{0}", '.format(name) - ) - - return representation - - def add_component(self, component, source, recursive=True): - '''Add *component* to location. - - *component* should be a single component instance. - - *source* should be an instance of another location that acts as the - source. - - Raise :exc:`ftrack_api.ComponentInLocationError` if the *component* - already exists in this location. - - Raise :exc:`ftrack_api.LocationError` if managing data and the generated - target structure for the component already exists according to the - accessor. This helps prevent potential data loss by avoiding overwriting - existing data. Note that there is a race condition between the check and - the write so if another process creates data at the same target during - that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component registration. - - ''' - return self.add_components( - [component], sources=source, recursive=recursive - ) - - def add_components(self, components, sources, recursive=True, _depth=0): - '''Add *components* to location. - - *components* should be a list of component instances. - - *sources* may be either a single source or a list of sources. If a list - then each corresponding index in *sources* will be used for each - *component*. A source should be an instance of another location. - - Raise :exc:`ftrack_api.exception.ComponentInLocationError` if any - component in *components* already exists in this location. In this case, - no changes will be made and no data transferred. - - Raise :exc:`ftrack_api.exception.LocationError` if managing data and the - generated target structure for the component already exists according to - the accessor. This helps prevent potential data loss by avoiding - overwriting existing data. Note that there is a race condition between - the check and the write so if another process creates data at the same - target during that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration. - - .. important:: - - If this location manages data then the *components* data is first - transferred to the target prescribed by the structure plugin, using - the configured accessor. If any component fails to transfer then - :exc:`ftrack_api.exception.LocationError` is raised and none of the - components are registered with the database. In this case it is left - up to the caller to decide and act on manually cleaning up any - transferred data using the 'transferred' detail in the raised error. - - Likewise, after transfer, all components are registered with the - database in a batch call. If any component causes an error then all - components will remain unregistered and - :exc:`ftrack_api.exception.LocationError` will be raised detailing - issues and any transferred data under the 'transferred' detail key. - - ''' - if ( - isinstance(sources, basestring) - or not isinstance(sources, collections.Sequence) - ): - sources = [sources] - - sources_count = len(sources) - if sources_count not in (1, len(components)): - raise ValueError( - 'sources must be either a single source or a sequence of ' - 'sources with indexes corresponding to passed components.' - ) - - if not self.structure: - raise ftrack_api.exception.LocationError( - 'No structure defined for location {location}.', - details=dict(location=self) - ) - - if not components: - # Optimisation: Return early when no components to process, such as - # when called recursively on an empty sequence component. - return - - indent = ' ' * (_depth + 1) - - # Check that components not already added to location. - existing_components = [] - try: - self.get_resource_identifiers(components) - - except ftrack_api.exception.ComponentNotInLocationError as error: - missing_component_ids = [ - missing_component['id'] - for missing_component in error.details['components'] - ] - for component in components: - if component['id'] not in missing_component_ids: - existing_components.append(component) - - else: - existing_components.extend(components) - - if existing_components: - # Some of the components already present in location. - raise ftrack_api.exception.ComponentInLocationError( - existing_components, self - ) - - # Attempt to transfer each component's data to this location. - transferred = [] - - for index, component in enumerate(components): - try: - # Determine appropriate source. - if sources_count == 1: - source = sources[0] - else: - source = sources[index] - - # Add members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.add_components( - component['members'], source, recursive=recursive, - _depth=(_depth + 1) - ) - - # Add component to this location. - context = self._get_context(component, source) - resource_identifier = self.structure.get_resource_identifier( - component, context - ) - - # Manage data transfer. - self._add_data(component, resource_identifier, source) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to transfer component {component} data to location ' - '{location} due to error:\n{indent}{error}\n{indent}' - 'Transferred component data that may require cleanup: ' - '{transferred}', - details=dict( - indent=indent, - component=component, - location=self, - error=error, - transferred=transferred - ) - ) - - else: - transferred.append((component, resource_identifier)) - - # Register all successfully transferred components. - components_to_register = [] - component_resource_identifiers = [] - - try: - for component, resource_identifier in transferred: - if self.resource_identifier_transformer: - # Optionally encode resource identifier before storing. - resource_identifier = ( - self.resource_identifier_transformer.encode( - resource_identifier, - context={'component': component} - ) - ) - - components_to_register.append(component) - component_resource_identifiers.append(resource_identifier) - - # Store component in location information. - self._register_components_in_location( - components, component_resource_identifiers - ) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to register components with location {location} due to ' - 'error:\n{indent}{error}\n{indent}Transferred component data ' - 'that may require cleanup: {transferred}', - details=dict( - indent=indent, - location=self, - error=error, - transferred=transferred - ) - ) - - # Publish events. - for component in components_to_register: - - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ), - ), - on_error='ignore' - ) - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - try: - source_resource_identifier = source.get_resource_identifier( - component - ) - except ftrack_api.exception.ComponentNotInLocationError: - pass - else: - context.update(dict( - source_resource_identifier=source_resource_identifier - )) - - return context - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - ''' - self.logger.debug(L( - 'Adding data for component {0!r} from source {1!r} to location ' - '{2!r} using resource identifier {3!r}.', - component, resource_identifier, source, self - )) - - # Read data from source and write to this location. - if not source.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for source location {location}.', - details=dict(location=source) - ) - - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for target location {location}.', - details=dict(location=self) - ) - - is_container = 'members' in component.keys() - if is_container: - # TODO: Improve this check. Possibly introduce an inspection - # such as ftrack_api.inspection.is_sequence_component. - if component.entity_type != 'SequenceComponent': - self.accessor.make_container(resource_identifier) - - else: - # Try to make container of component. - try: - container = self.accessor.get_container( - resource_identifier - ) - - except ftrack_api.exception.AccessorParentResourceNotFoundError: - # Container could not be retrieved from - # resource_identifier. Assume that there is no need to - # make the container. - pass - - else: - # No need for existence check as make_container does not - # recreate existing containers. - self.accessor.make_container(container) - - if self.accessor.exists(resource_identifier): - # Note: There is a race condition here in that the - # data may be added externally between the check for - # existence and the actual write which would still - # result in potential data loss. However, there is no - # good cross platform, cross accessor solution for this - # at present. - raise ftrack_api.exception.LocationError( - 'Cannot add component as data already exists and ' - 'overwriting could result in data loss. Computed ' - 'target resource identifier was: {0}' - .format(resource_identifier) - ) - - # Read and write data. - source_data = source.accessor.open( - source.get_resource_identifier(component), 'rb' - ) - target_data = self.accessor.open(resource_identifier, 'wb') - - # Read/write data in chunks to avoid reading all into memory at the - # same time. - chunked_read = functools.partial( - source_data.read, ftrack_api.symbol.CHUNK_SIZE - ) - for chunk in iter(chunked_read, ''): - target_data.write(chunk) - - target_data.close() - source_data.close() - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location against *resource_identifier*.''' - return self._register_components_in_location( - [component], [resource_identifier] - ) - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self.session.create( - 'ComponentLocation', data=dict( - component=component, - location=self, - resource_identifier=resource_identifier - ) - ) - - self.session.commit() - - def remove_component(self, component, recursive=True): - '''Remove *component* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component deregistration. - - ''' - return self.remove_components([component], recursive=recursive) - - def remove_components(self, components, recursive=True): - '''Remove *components* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components deregistration. - - ''' - for component in components: - # Check component is in this location - self.get_resource_identifier(component) - - # Remove members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.remove_components( - component['members'], recursive=recursive - ) - - # Remove data. - self._remove_data(component) - - # Remove metadata. - self._deregister_component_in_location(component) - - # Emit event. - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ) - ), - on_error='ignore' - ) - - def _remove_data(self, component): - '''Remove data associated with *component*.''' - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for location {location}.', - details=dict(location=self) - ) - - try: - self.accessor.remove( - self.get_resource_identifier(component) - ) - except ftrack_api.exception.AccessorResourceNotFoundError: - # If accessor does not support detecting sequence paths then an - # AccessorResourceNotFoundError is raised. For now, if the - # component type is 'SequenceComponent' assume success. - if not component.entity_type == 'SequenceComponent': - raise - - def _deregister_component_in_location(self, component): - '''Deregister *component* from location.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - - # TODO: Use session.get for optimisation. - component_location = self.session.query( - 'ComponentLocation where component_id is {0} and location_id is ' - '{1}'.format(component_id, location_id) - )[0] - - self.session.delete(component_location) - - # TODO: Should auto-commit here be optional? - self.session.commit() - - def get_component_availability(self, component): - '''Return availability of *component* in this location as a float.''' - return self.session.get_component_availability( - component, locations=[self] - )[self['id']] - - def get_component_availabilities(self, components): - '''Return availabilities of *components* in this location. - - Return list of float values corresponding to each component. - - ''' - return [ - availability[self['id']] for availability in - self.session.get_component_availabilities( - components, locations=[self] - ) - ] - - def get_resource_identifier(self, component): - '''Return resource identifier for *component*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the - component is not present in this location. - - ''' - return self.get_resource_identifiers([component])[0] - - def get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - resource_identifiers = self._get_resource_identifiers(components) - - # Optionally decode resource identifier. - if self.resource_identifier_transformer: - for index, resource_identifier in enumerate(resource_identifiers): - resource_identifiers[index] = ( - self.resource_identifier_transformer.decode( - resource_identifier, - context={'component': components[index]} - ) - ) - - return resource_identifiers - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - component_ids_mapping = collections.OrderedDict() - for component in components: - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - component_ids_mapping[component_id] = component - - component_locations = self.session.query( - 'select component_id, resource_identifier from ComponentLocation ' - 'where location_id is {0} and component_id in ({1})' - .format( - ftrack_api.inspection.primary_key(self).values()[0], - ', '.join(component_ids_mapping.keys()) - ) - ) - - resource_identifiers_map = {} - for component_location in component_locations: - resource_identifiers_map[component_location['component_id']] = ( - component_location['resource_identifier'] - ) - - resource_identifiers = [] - missing = [] - for component_id, component in component_ids_mapping.items(): - if component_id not in resource_identifiers_map: - missing.append(component) - else: - resource_identifiers.append( - resource_identifiers_map[component_id] - ) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - def get_filesystem_path(self, component): - '''Return filesystem path for *component*.''' - return self.get_filesystem_paths([component])[0] - - def get_filesystem_paths(self, components): - '''Return filesystem paths for *components*.''' - resource_identifiers = self.get_resource_identifiers(components) - - filesystem_paths = [] - for resource_identifier in resource_identifiers: - filesystem_paths.append( - self.accessor.get_filesystem_path(resource_identifier) - ) - - return filesystem_paths - - def get_url(self, component): - '''Return url for *component*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *component* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - - return self.accessor.get_url(resource_identifier) - - -class MemoryLocationMixin(object): - '''Represent storage for components. - - Unlike a standard location, only store metadata for components in this - location in memory rather than persisting to the database. - - ''' - - @property - def _cache(self): - '''Return cache.''' - try: - cache = self.__cache - except AttributeError: - cache = self.__cache = {} - - return cache - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location with *resource_identifier*.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - self._cache[component_id] = resource_identifier - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self._register_component_in_location(component, resource_identifier) - - def _deregister_component_in_location(self, component): - '''Deregister *component* in location.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - self._cache.pop(component_id) - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the referenced components are not present in this location. - - ''' - resource_identifiers = [] - missing = [] - for component in components: - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - resource_identifier = self._cache.get(component_id) - if resource_identifier is None: - missing.append(component) - else: - resource_identifiers.append(resource_identifier) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - -class UnmanagedLocationMixin(object): - '''Location that does not manage data.''' - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - Overridden to have no effect. - - ''' - return - - def _remove_data(self, component): - '''Remove data associated with *component*. - - Overridden to have no effect. - - ''' - return - - -class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): - '''Special origin location that expects sources as filepaths.''' - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - context.update(dict( - source_resource_identifier=source - )) - - return context - - -class ServerLocationMixin(object): - '''Location representing ftrack server. - - Adds convenience methods to location, specific to ftrack server. - ''' - def get_thumbnail_url(self, component, size=None): - '''Return thumbnail url for *component*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py deleted file mode 100644 index f5a9403728b..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py +++ /dev/null @@ -1,105 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import warnings - -import ftrack_api.entity.base - - -class Note(ftrack_api.entity.base.Entity): - '''Represent a note.''' - - def create_reply( - self, content, author - ): - '''Create a reply with *content* and *author*. - - .. note:: - - This is a helper method. To create replies manually use the - standard :meth:`Session.create` method. - - ''' - reply = self.session.create( - 'Note', { - 'author': author, - 'content': content - } - ) - - self['replies'].append(reply) - - return reply - - -class CreateNoteMixin(object): - '''Mixin to add create_note method on entity class.''' - - def create_note( - self, content, author, recipients=None, category=None, labels=None - ): - '''Create note with *content*, *author*. - - NoteLabels can be set by including *labels*. - - Note category can be set by including *category*. - - *recipients* can be specified as a list of user or group instances. - - ''' - note_label_support = 'NoteLabel' in self.session.types - - if not labels: - labels = [] - - if labels and not note_label_support: - raise ValueError( - 'NoteLabel is not supported by the current server version.' - ) - - if category and labels: - raise ValueError( - 'Both category and labels cannot be set at the same time.' - ) - - if not recipients: - recipients = [] - - data = { - 'content': content, - 'author': author - } - - if category: - if note_label_support: - labels = [category] - warnings.warn( - 'category argument will be removed in an upcoming version, ' - 'please use labels instead.', - PendingDeprecationWarning - ) - else: - data['category_id'] = category['id'] - - note = self.session.create('Note', data) - - self['notes'].append(note) - - for resource in recipients: - recipient = self.session.create('Recipient', { - 'note_id': note['id'], - 'resource_id': resource['id'] - }) - - note['recipients'].append(recipient) - - for label in labels: - self.session.create( - 'NoteLabelLink', - { - 'label_id': label['id'], - 'note_id': note['id'] - } - ) - - return note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py deleted file mode 100644 index ec6db7c0196..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py +++ /dev/null @@ -1,94 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class ProjectSchema(ftrack_api.entity.base.Entity): - '''Class representing ProjectSchema.''' - - def get_statuses(self, schema, type_id=None): - '''Return statuses for *schema* and optional *type_id*. - - *type_id* is the id of the Type for a TypedContext and can be used to - get statuses where the workflow has been overridden. - - ''' - # Task has overrides and need to be handled separately. - if schema == 'Task': - if type_id is not None: - overrides = self['_overrides'] - for override in overrides: - if override['type_id'] == type_id: - return override['workflow_schema']['statuses'][:] - - return self['_task_workflow']['statuses'][:] - - elif schema == 'AssetVersion': - return self['_version_workflow']['statuses'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have statuses.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_status from SchemaStatus ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [ - schema_type['task_status'] for schema_type in result - ] - - raise ValueError( - 'No valid statuses were found for schema {0}.'.format(schema) - ) - - def get_types(self, schema): - '''Return types for *schema*.''' - # Task need to be handled separately. - if schema == 'Task': - return self['_task_type_schema']['types'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have types.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_type from SchemaType ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [schema_type['task_type'] for schema_type in result] - - raise ValueError( - 'No valid types were found for schema {0}.'.format(schema) - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py deleted file mode 100644 index 511ad4ba999..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py +++ /dev/null @@ -1,123 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import arrow - -import ftrack_api.entity.base -import ftrack_api.exception - - -class User(ftrack_api.entity.base.Entity): - '''Represent a user.''' - - def start_timer(self, context=None, comment='', name=None, force=False): - '''Start a timer for *context* and return it. - - *force* can be used to automatically stop an existing timer and create a - timelog for it. If you need to get access to the created timelog, use - :func:`stop_timer` instead. - - *comment* and *name* are optional but will be set on the timer. - - .. note:: - - This method will automatically commit the changes and if *force* is - False then it will fail with a - :class:`ftrack_api.exception.NotUniqueError` exception if a - timer is already running. - - ''' - if force: - try: - self.stop_timer() - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Failed to stop existing timer.') - - timer = self.session.create('Timer', { - 'user': self, - 'context': context, - 'name': name, - 'comment': comment - }) - - # Commit the new timer and try to catch any error that indicate another - # timelog already exists and inform the user about it. - try: - self.session.commit() - except ftrack_api.exception.ServerError as error: - if 'IntegrityError' in str(error): - raise ftrack_api.exception.NotUniqueError( - ('Failed to start a timelog for user with id: {0}, it is ' - 'likely that a timer is already running. Either use ' - 'force=True or stop the timer first.').format(self['id']) - ) - else: - # Reraise the error as it might be something unrelated. - raise - - return timer - - def stop_timer(self): - '''Stop the current timer and return a timelog created from it. - - If a timer is not running, a - :exc:`ftrack_api.exception.NoResultFoundError` exception will be - raised. - - .. note:: - - This method will automatically commit the changes. - - ''' - timer = self.session.query( - 'Timer where user_id = "{0}"'.format(self['id']) - ).one() - - # If the server is running in the same timezone as the local - # timezone, we remove the TZ offset to get the correct duration. - is_timezone_support_enabled = self.session.server_information.get( - 'is_timezone_support_enabled', None - ) - if is_timezone_support_enabled is None: - self.logger.warning( - 'Could not identify if server has timezone support enabled. ' - 'Will assume server is running in UTC.' - ) - is_timezone_support_enabled = True - - if is_timezone_support_enabled: - now = arrow.now() - else: - now = arrow.now().replace(tzinfo='utc') - - delta = now - timer['start'] - duration = delta.days * 24 * 60 * 60 + delta.seconds - - timelog = self.session.create('Timelog', { - 'user_id': timer['user_id'], - 'context_id': timer['context_id'], - 'comment': timer['comment'], - 'start': timer['start'], - 'duration': duration, - 'name': timer['name'] - }) - - self.session.delete(timer) - self.session.commit() - - return timelog - - def send_invite(self): - '''Send a invation email to the user''' - - self.session.send_user_invite( - self - ) - def reset_api_key(self): - '''Reset the users api key.''' - - response = self.session.reset_remote( - 'api_key', entity=self - ) - - return response['api_key'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py deleted file mode 100644 index 1aab07ed77a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py deleted file mode 100644 index b5fd57da784..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py +++ /dev/null @@ -1,85 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import uuid -import collections - - -class Event(collections.MutableMapping): - '''Represent a single event.''' - - def __init__(self, topic, id=None, data=None, sent=None, - source=None, target='', in_reply_to_event=None): - '''Initialise event. - - *topic* is the required topic for the event. It can use a dotted - notation to demarcate groupings. For example, 'ftrack.update'. - - *id* is the unique id for this event instance. It is primarily used when - replying to an event. If not supplied a default uuid based value will - be used. - - *data* refers to event specific data. It should be a mapping structure - and defaults to an empty dictionary if not supplied. - - *sent* is the timestamp the event is sent. It will be set automatically - as send time unless specified here. - - *source* is information about where the event originated. It should be - a mapping and include at least a unique id value under an 'id' key. If - not specified, senders usually populate the value automatically at - publish time. - - *target* can be an expression that targets this event. For example, - a reply event would target the event to the sender of the source event. - The expression will be tested against subscriber information only. - - *in_reply_to_event* is used when replying to an event and should contain - the unique id of the event being replied to. - - ''' - super(Event, self).__init__() - self._data = dict( - id=id or uuid.uuid4().hex, - data=data or {}, - topic=topic, - sent=sent, - source=source or {}, - target=target, - in_reply_to_event=in_reply_to_event - ) - self._stopped = False - - def stop(self): - '''Stop further processing of this event.''' - self._stopped = True - - def is_stopped(self): - '''Return whether event has been stopped.''' - return self._stopped - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py deleted file mode 100644 index 0535e4fd5f1..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py +++ /dev/null @@ -1,282 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from operator import eq, ne, ge, le, gt, lt - -from pyparsing import (Group, Word, CaselessKeyword, Forward, - FollowedBy, Suppress, oneOf, OneOrMore, Optional, - alphanums, quotedString, removeQuotes) - -import ftrack_api.exception - -# Do not enable packrat since it is not thread-safe and will result in parsing -# exceptions in a multi threaded environment. -# ParserElement.enablePackrat() - - -class Parser(object): - '''Parse string based expression into :class:`Expression` instance.''' - - def __init__(self): - '''Initialise parser.''' - self._operators = { - '=': eq, - '!=': ne, - '>=': ge, - '<=': le, - '>': gt, - '<': lt - } - self._parser = self._construct_parser() - super(Parser, self).__init__() - - def _construct_parser(self): - '''Construct and return parser.''' - field = Word(alphanums + '_.') - operator = oneOf(self._operators.keys()) - value = Word(alphanums + '-_,./*@+') - quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) - - condition = Group( - field + operator + (quoted_value | value) - )('condition') - - not_ = Optional(Suppress(CaselessKeyword('not')))('not') - and_ = Suppress(CaselessKeyword('and'))('and') - or_ = Suppress(CaselessKeyword('or'))('or') - - expression = Forward() - parenthesis = Suppress('(') + expression + Suppress(')') - previous = condition | parenthesis - - for conjunction in (not_, and_, or_): - current = Forward() - - if conjunction in (and_, or_): - conjunction_expression = ( - FollowedBy(previous + conjunction + previous) - + Group( - previous + OneOrMore(conjunction + previous) - )(conjunction.resultsName) - ) - - elif conjunction in (not_, ): - conjunction_expression = ( - FollowedBy(conjunction.expr + current) - + Group(conjunction + current)(conjunction.resultsName) - ) - - else: # pragma: no cover - raise ValueError('Unrecognised conjunction.') - - current <<= (conjunction_expression | previous) - previous = current - - expression <<= previous - return expression('expression') - - def parse(self, expression): - '''Parse string *expression* into :class:`Expression`. - - Raise :exc:`ftrack_api.exception.ParseError` if *expression* could - not be parsed. - - ''' - result = None - expression = expression.strip() - if expression: - try: - result = self._parser.parseString( - expression, parseAll=True - ) - except Exception as error: - raise ftrack_api.exception.ParseError( - 'Failed to parse: {0}. {1}'.format(expression, error) - ) - - return self._process(result) - - def _process(self, result): - '''Process *result* using appropriate method. - - Method called is determined by the name of the result. - - ''' - method_name = '_process_{0}'.format(result.getName()) - method = getattr(self, method_name) - return method(result) - - def _process_expression(self, result): - '''Process *result* as expression.''' - return self._process(result[0]) - - def _process_not(self, result): - '''Process *result* as NOT operation.''' - return Not(self._process(result[0])) - - def _process_and(self, result): - '''Process *result* as AND operation.''' - return All([self._process(entry) for entry in result]) - - def _process_or(self, result): - '''Process *result* as OR operation.''' - return Any([self._process(entry) for entry in result]) - - def _process_condition(self, result): - '''Process *result* as condition.''' - key, operator, value = result - return Condition(key, self._operators[operator], value) - - def _process_quoted_value(self, result): - '''Process *result* as quoted value.''' - return result - - -class Expression(object): - '''Represent a structured expression to test candidates against.''' - - def __str__(self): - '''Return string representation.''' - return '<{0}>'.format(self.__class__.__name__) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return True - - -class All(Expression): - '''Match candidate that matches all of the specified expressions. - - .. note:: - - If no expressions are supplied then will always match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(All, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return all([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Any(Expression): - '''Match candidate that matches any of the specified expressions. - - .. note:: - - If no expressions are supplied then will never match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(Any, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return any([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Not(Expression): - '''Negate expression.''' - - def __init__(self, expression): - '''Initialise with *expression* to negate.''' - self._expression = expression - super(Not, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, - self._expression - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return not self._expression.match(candidate) - - -class Condition(Expression): - '''Represent condition.''' - - def __init__(self, key, operator, value): - '''Initialise condition. - - *key* is the key to check on the data when matching. It can be a nested - key represented by dots. For example, 'data.eventType' would attempt to - match candidate['data']['eventType']. If the candidate is missing any - of the requested keys then the match fails immediately. - - *operator* is the operator function to use to perform the match between - the retrieved candidate value and the conditional *value*. - - If *value* is a string, it can use a wildcard '*' at the end to denote - that any values matching the substring portion are valid when matching - equality only. - - ''' - self._key = key - self._operator = operator - self._value = value - self._wildcard = '*' - self._operatorMapping = { - eq: '=', - ne: '!=', - ge: '>=', - le: '<=', - gt: '>', - lt: '<' - } - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}{2}{3}>'.format( - self.__class__.__name__, - self._key, - self._operatorMapping.get(self._operator, self._operator), - self._value - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - key_parts = self._key.split('.') - - try: - value = candidate - for keyPart in key_parts: - value = value[keyPart] - except (KeyError, TypeError): - return False - - if ( - self._operator is eq - and isinstance(self._value, basestring) - and self._value[-1] == self._wildcard - ): - return self._value[:-1] in value - else: - return self._operator(value, self._value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py deleted file mode 100644 index 9f4ba80c6ef..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py +++ /dev/null @@ -1,1091 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from __future__ import absolute_import - -import collections -import urlparse -import threading -import Queue as queue -import logging -import time -import uuid -import operator -import functools -import json -import socket -import warnings - -import requests -import requests.exceptions -import websocket - -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.event.subscriber -import ftrack_api.event.expression -from ftrack_api.logging import LazyLogMessage as L - - -SocketIoSession = collections.namedtuple('SocketIoSession', [ - 'id', - 'heartbeatTimeout', - 'supportedTransports', -]) - - -ServerDetails = collections.namedtuple('ServerDetails', [ - 'scheme', - 'hostname', - 'port', -]) - - - - -class EventHub(object): - '''Manage routing of events.''' - - _future_signature_warning = ( - 'When constructing your Session object you did not explicitly define ' - 'auto_connect_event_hub as True even though you appear to be publishing ' - 'and / or subscribing to asynchronous events. In version version 2.0 of ' - 'the ftrack-python-api the default behavior will change from True ' - 'to False. Please make sure to update your tools. You can read more at ' - 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' - ) - - def __init__(self, server_url, api_user, api_key): - '''Initialise hub, connecting to ftrack *server_url*. - - *api_user* is the user to authenticate as and *api_key* is the API key - to authenticate with. - - ''' - super(EventHub, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.id = uuid.uuid4().hex - self._connection = None - - self._unique_packet_id = 0 - self._packet_callbacks = {} - self._lock = threading.RLock() - - self._wait_timeout = 4 - - self._subscribers = [] - self._reply_callbacks = {} - self._intentional_disconnect = False - - self._event_queue = queue.Queue() - self._event_namespace = 'ftrack.event' - self._expression_parser = ftrack_api.event.expression.Parser() - - # Default values for auto reconnection timeout on unintentional - # disconnection. Equates to 5 minutes. - self._auto_reconnect_attempts = 30 - self._auto_reconnect_delay = 10 - - self._deprecation_warning_auto_connect = False - - # Mapping of Socket.IO codes to meaning. - self._code_name_mapping = { - '0': 'disconnect', - '1': 'connect', - '2': 'heartbeat', - '3': 'message', - '4': 'json', - '5': 'event', - '6': 'acknowledge', - '7': 'error' - } - self._code_name_mapping.update( - dict((name, code) for code, name in self._code_name_mapping.items()) - ) - - self._server_url = server_url - self._api_user = api_user - self._api_key = api_key - - # Parse server URL and store server details. - url_parse_result = urlparse.urlparse(self._server_url) - if not url_parse_result.scheme: - raise ValueError('Could not determine scheme from server url.') - - if not url_parse_result.hostname: - raise ValueError('Could not determine hostname from server url.') - - self.server = ServerDetails( - url_parse_result.scheme, - url_parse_result.hostname, - url_parse_result.port - ) - - def get_server_url(self): - '''Return URL to server.''' - return '{0}://{1}'.format( - self.server.scheme, self.get_network_location() - ) - - def get_network_location(self): - '''Return network location part of url (hostname with optional port).''' - if self.server.port: - return '{0}:{1}'.format(self.server.hostname, self.server.port) - else: - return self.server.hostname - - @property - def secure(self): - '''Return whether secure connection used.''' - return self.server.scheme == 'https' - - def connect(self): - '''Initialise connection to server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already - connected or connection fails. - - ''' - - self._deprecation_warning_auto_connect = False - - if self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Already connected.' - ) - - # Reset flag tracking whether disconnection was intentional. - self._intentional_disconnect = False - - try: - # Connect to socket.io server using websocket transport. - session = self._get_socket_io_session() - - if 'websocket' not in session.supportedTransports: - raise ValueError( - 'Server does not support websocket sessions.' - ) - - scheme = 'wss' if self.secure else 'ws' - url = '{0}://{1}/socket.io/1/websocket/{2}'.format( - scheme, self.get_network_location(), session.id - ) - - # timeout is set to 60 seconds to avoid the issue where the socket - # ends up in a bad state where it is reported as connected but the - # connection has been closed. The issue happens often when connected - # to a secure socket and the computer goes to sleep. - # More information on how the timeout works can be found here: - # https://docs.python.org/2/library/socket.html#socket.socket.setblocking - self._connection = websocket.create_connection(url, timeout=60) - - except Exception as error: - error_message = ( - 'Failed to connect to event server at {server_url} with ' - 'error: "{error}".' - ) - - error_details = { - 'error': unicode(error), - 'server_url': self.get_server_url() - } - - self.logger.debug( - L( - error_message, **error_details - ), - exc_info=1 - ) - raise ftrack_api.exception.EventHubConnectionError( - error_message, - details=error_details - ) - - # Start background processing thread. - self._processor_thread = _ProcessorThread(self) - self._processor_thread.start() - - # Subscribe to reply events if not already. Note: Only adding the - # subscriber locally as the following block will notify server of all - # existing subscribers, which would cause the server to report a - # duplicate subscriber error if EventHub.subscribe was called here. - try: - self._add_subscriber( - 'topic=ftrack.meta.reply', - self._handle_reply, - subscriber=dict( - id=self.id - ) - ) - except ftrack_api.exception.NotUniqueError: - pass - - # Now resubscribe any existing stored subscribers. This can happen when - # reconnecting automatically for example. - for subscriber in self._subscribers[:]: - self._notify_server_about_subscriber(subscriber) - - @property - def connected(self): - '''Return if connected.''' - return self._connection is not None and self._connection.connected - - def disconnect(self, unsubscribe=True): - '''Disconnect from server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - If *unsubscribe* is True then unsubscribe all current subscribers - automatically before disconnecting. - - ''' - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Not currently connected.' - ) - - else: - # Set flag to indicate disconnection was intentional. - self._intentional_disconnect = True - - # Set blocking to true on socket to make sure unsubscribe events - # are emitted before closing the connection. - self._connection.sock.setblocking(1) - - # Unsubscribe all subscribers. - if unsubscribe: - for subscriber in self._subscribers[:]: - self.unsubscribe(subscriber.metadata['id']) - - # Now disconnect. - self._connection.close() - self._connection = None - - # Shutdown background processing thread. - self._processor_thread.cancel() - - # Join to it if it is not current thread to help ensure a clean - # shutdown. - if threading.current_thread() != self._processor_thread: - self._processor_thread.join(self._wait_timeout) - - def reconnect(self, attempts=10, delay=5): - '''Reconnect to server. - - Make *attempts* number of attempts with *delay* in seconds between each - attempt. - - .. note:: - - All current subscribers will be automatically resubscribed after - successful reconnection. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to - reconnect. - - ''' - try: - self.disconnect(unsubscribe=False) - except ftrack_api.exception.EventHubConnectionError: - pass - - for attempt in range(attempts): - self.logger.debug(L( - 'Reconnect attempt {0} of {1}', attempt, attempts - )) - - # Silence logging temporarily to avoid lots of failed connection - # related information. - try: - logging.disable(logging.CRITICAL) - - try: - self.connect() - except ftrack_api.exception.EventHubConnectionError: - time.sleep(delay) - else: - break - - finally: - logging.disable(logging.NOTSET) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to reconnect to event server at {0} after {1} attempts.' - .format(self.get_server_url(), attempts) - ) - - def wait(self, duration=None): - '''Wait for events and handle as they arrive. - - If *duration* is specified, then only process events until duration is - reached. *duration* is in seconds though float values can be used for - smaller values. - - ''' - started = time.time() - - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - pass - else: - self._handle(event) - - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def get_subscriber_by_identifier(self, identifier): - '''Return subscriber with matching *identifier*. - - Return None if no subscriber with *identifier* found. - - ''' - for subscriber in self._subscribers[:]: - if subscriber.metadata.get('id') == identifier: - return subscriber - - return None - - def subscribe(self, subscription, callback, subscriber=None, priority=100): - '''Register *callback* for *subscription*. - - A *subscription* is a string that can specify in detail which events the - callback should receive. The filtering is applied against each event - object. Nested references are supported using '.' separators. - For example, 'topic=foo and data.eventType=Shot' would match the - following event:: - - - - The *callback* should accept an instance of - :class:`ftrack_api.event.base.Event` as its sole argument. - - Callbacks are called in order of *priority*. The lower the priority - number the sooner it will be called, with 0 being the first. The - default priority is 100. Note that priority only applies against other - callbacks registered with this hub and not as a global priority. - - An earlier callback can prevent processing of subsequent callbacks by - calling :meth:`Event.stop` on the passed `event` before - returning. - - .. warning:: - - Handlers block processing of other received events. For long - running callbacks it is advisable to delegate the main work to - another process or thread. - - A *callback* can be attached to *subscriber* information that details - the subscriber context. A subscriber context will be generated - automatically if not supplied. - - .. note:: - - The subscription will be stored locally, but until the server - receives notification of the subscription it is possible the - callback will not be called. - - Return subscriber identifier. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - # Add subscriber locally. - subscriber = self._add_subscriber( - subscription, callback, subscriber, priority - ) - - # Notify server now if possible. - try: - self._notify_server_about_subscriber(subscriber) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server about new subscriber {0} ' - 'as server not currently reachable.', subscriber.metadata['id'] - )) - - return subscriber.metadata['id'] - - def _add_subscriber( - self, subscription, callback, subscriber=None, priority=100 - ): - '''Add subscriber locally. - - See :meth:`subscribe` for argument descriptions. - - Return :class:`ftrack_api.event.subscriber.Subscriber` instance. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - if subscriber is None: - subscriber = {} - - subscriber.setdefault('id', uuid.uuid4().hex) - - # Check subscriber not already subscribed. - existing_subscriber = self.get_subscriber_by_identifier( - subscriber['id'] - ) - - if existing_subscriber is not None: - raise ftrack_api.exception.NotUniqueError( - 'Subscriber with identifier {0} already exists.' - .format(subscriber['id']) - ) - - subscriber = ftrack_api.event.subscriber.Subscriber( - subscription=subscription, - callback=callback, - metadata=subscriber, - priority=priority - ) - - self._subscribers.append(subscriber) - - return subscriber - - def _notify_server_about_subscriber(self, subscriber): - '''Notify server of new *subscriber*.''' - subscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.subscribe', - data=dict( - subscriber=subscriber.metadata, - subscription=str(subscriber.subscription) - ) - ) - - self._publish( - subscribe_event, - callback=functools.partial(self._on_subscribed, subscriber) - ) - - def _on_subscribed(self, subscriber, response): - '''Handle acknowledgement of subscription.''' - if response.get('success') is False: - self.logger.warning(L( - 'Server failed to subscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def unsubscribe(self, subscriber_identifier): - '''Unsubscribe subscriber with *subscriber_identifier*. - - .. note:: - - If the server is not reachable then it won't be notified of the - unsubscription. However, the subscriber will be removed locally - regardless. - - ''' - subscriber = self.get_subscriber_by_identifier(subscriber_identifier) - - if subscriber is None: - raise ftrack_api.exception.NotFoundError( - 'Cannot unsubscribe missing subscriber with identifier {0}' - .format(subscriber_identifier) - ) - - self._subscribers.pop(self._subscribers.index(subscriber)) - - # Notify the server if possible. - unsubscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.unsubscribe', - data=dict(subscriber=subscriber.metadata) - ) - - try: - self._publish( - unsubscribe_event, - callback=functools.partial(self._on_unsubscribed, subscriber) - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server to unsubscribe subscriber {0} as ' - 'server not currently reachable.', subscriber.metadata['id'] - )) - - def _on_unsubscribed(self, subscriber, response): - '''Handle acknowledgement of unsubscribing *subscriber*.''' - if response.get('success') is not True: - self.logger.warning(L( - 'Server failed to unsubscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def _prepare_event(self, event): - '''Prepare *event* for sending.''' - event['source'].setdefault('id', self.id) - event['source'].setdefault('user', { - 'username': self._api_user - }) - - def _prepare_reply_event(self, event, source_event, source=None): - '''Prepare *event* as a reply to another *source_event*. - - Modify *event*, setting appropriate values to target event correctly as - a reply. - - ''' - event['target'] = 'id={0}'.format(source_event['source']['id']) - event['in_reply_to_event'] = source_event['id'] - if source is not None: - event['source'] = source - - def publish( - self, event, synchronous=False, on_reply=None, on_error='raise' - ): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. - - .. note:: - - Will not be called when *synchronous* is True. - - If *on_error* is set to 'ignore' then errors raised during publish of - event will be caught by this method and ignored. - - ''' - if self._deprecation_warning_auto_connect and not synchronous: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - try: - return self._publish( - event, synchronous=synchronous, on_reply=on_reply - ) - except Exception: - if on_error == 'ignore': - pass - else: - raise - - def publish_reply(self, source_event, data, source=None): - '''Publish a reply event to *source_event* with supplied *data*. - - If *source* is specified it will be used for the source value of the - sent event. - - ''' - reply_event = ftrack_api.event.base.Event( - 'ftrack.meta.reply', - data=data - ) - self._prepare_reply_event(reply_event, source_event, source=source) - self.publish(reply_event) - - def _publish(self, event, synchronous=False, callback=None, on_reply=None): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - A *callback* can also be specified. This callback will be called once - the server acknowledges receipt of the sent event. A default callback - that checks for errors from the server will be used if not specified. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. Note that there is no - guarantee that a reply will be sent. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - ''' - # Prepare event adding any relevant additional information. - self._prepare_event(event) - - if synchronous: - # Bypass emitting event to server and instead call locally - # registered handlers directly, collecting and returning results. - return self._handle(event, synchronous=synchronous) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Cannot publish event asynchronously as not connected to ' - 'server.' - ) - - # Use standard callback if none specified. - if callback is None: - callback = functools.partial(self._on_published, event) - - # Emit event to central server for asynchronous processing. - try: - # Register on reply callback if specified. - if on_reply is not None: - # TODO: Add cleanup process that runs after a set duration to - # garbage collect old reply callbacks and prevent dictionary - # growing too large. - self._reply_callbacks[event['id']] = on_reply - - try: - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except ftrack_api.exception.EventHubConnectionError: - # Connection may have dropped temporarily. Wait a few moments to - # see if background thread reconnects automatically. - time.sleep(15) - - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except: - raise - - except Exception: - # Failure to send event should not cause caller to fail. - # TODO: This behaviour is inconsistent with the failing earlier on - # lack of connection and also with the error handling parameter of - # EventHub.publish. Consider refactoring. - self.logger.exception(L('Error sending event {0}.', event)) - - def _on_published(self, event, response): - '''Handle acknowledgement of published event.''' - if response.get('success', False) is False: - self.logger.error(L( - 'Server responded with error while publishing event {0}. ' - 'Error was: {1}', event, response.get('message') - )) - - def _handle(self, event, synchronous=False): - '''Handle *event*. - - If *synchronous* is True, do not send any automatic reply events. - - ''' - # Sort by priority, lower is higher. - # TODO: Use a sorted list to avoid sorting each time in order to improve - # performance. - subscribers = sorted( - self._subscribers, key=operator.attrgetter('priority') - ) - - results = [] - - target = event.get('target', None) - target_expression = None - if target: - try: - target_expression = self._expression_parser.parse(target) - except Exception: - self.logger.exception(L( - 'Cannot handle event as failed to parse event target ' - 'information: {0}', event - )) - return - - for subscriber in subscribers: - # Check if event is targeted to the subscriber. - if ( - target_expression is not None - and not target_expression.match(subscriber.metadata) - ): - continue - - # Check if subscriber interested in the event. - if not subscriber.interested_in(event): - continue - - response = None - - try: - response = subscriber.callback(event) - results.append(response) - except Exception: - self.logger.exception(L( - 'Error calling subscriber {0} for event {1}.', - subscriber, event - )) - - # Automatically publish a non None response as a reply when not in - # synchronous mode. - if not synchronous: - if self._deprecation_warning_auto_connect: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - if response is not None: - try: - self.publish_reply( - event, data=response, source=subscriber.metadata - ) - - except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) - - # Check whether to continue processing topic event. - if event.is_stopped(): - self.logger.debug(L( - 'Subscriber {0} stopped event {1}. Will not process ' - 'subsequent subscriber callbacks for this event.', - subscriber, event - )) - break - - return results - - def _handle_reply(self, event): - '''Handle reply *event*, passing it to any registered callback.''' - callback = self._reply_callbacks.get(event['in_reply_to_event'], None) - if callback is not None: - callback(event) - - def subscription(self, subscription, callback, subscriber=None, - priority=100): - '''Return context manager with *callback* subscribed to *subscription*. - - The subscribed callback will be automatically unsubscribed on exit - of the context manager. - - ''' - return _SubscriptionContext( - self, subscription, callback, subscriber=subscriber, - priority=priority, - ) - - # Socket.IO interface. - # - - def _get_socket_io_session(self): - '''Connect to server and retrieve session information.''' - socket_io_url = ( - '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' - ).format( - self.server.scheme, - self.get_network_location(), - self._api_user, - self._api_key - ) - try: - response = requests.get( - socket_io_url, - timeout=60 # 60 seconds timeout to recieve errors faster. - ) - except requests.exceptions.Timeout as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Timed out connecting to server: {0}.'.format(error) - ) - except requests.exceptions.SSLError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to negotiate SSL with server: {0}.'.format(error) - ) - except requests.exceptions.ConnectionError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to connect to server: {0}.'.format(error) - ) - else: - status = response.status_code - if status != 200: - raise ftrack_api.exception.EventHubConnectionError( - 'Received unexpected status code {0}.'.format(status) - ) - - # Parse result and return session information. - parts = response.text.split(':') - return SocketIoSession( - parts[0], - parts[1], - parts[3].split(',') - ) - - def _add_packet_callback(self, callback): - '''Store callback against a new unique packet ID. - - Return the unique packet ID. - - ''' - with self._lock: - self._unique_packet_id += 1 - unique_identifier = self._unique_packet_id - - self._packet_callbacks[unique_identifier] = callback - - return '{0}+'.format(unique_identifier) - - def _pop_packet_callback(self, packet_identifier): - '''Pop and return callback for *packet_identifier*.''' - return self._packet_callbacks.pop(packet_identifier) - - def _emit_event_packet(self, namespace, event, callback): - '''Send *event* packet under *namespace*.''' - data = self._encode( - dict(name=namespace, args=[event]) - ) - self._send_packet( - self._code_name_mapping['event'], data=data, callback=callback - ) - - def _acknowledge_packet(self, packet_identifier, *args): - '''Send acknowledgement of packet with *packet_identifier*.''' - packet_identifier = packet_identifier.rstrip('+') - data = str(packet_identifier) - if args: - data += '+{1}'.format(self._encode(args)) - - self._send_packet(self._code_name_mapping['acknowledge'], data=data) - - def _send_packet(self, code, data='', callback=None): - '''Send packet via connection.''' - path = '' - packet_identifier = ( - self._add_packet_callback(callback) if callback else '' - ) - packet_parts = (str(code), packet_identifier, path, data) - packet = ':'.join(packet_parts) - - try: - self._connection.send(packet) - self.logger.debug(L(u'Sent packet: {0}', packet)) - except socket.error as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to send packet: {0}'.format(error) - ) - - def _receive_packet(self): - '''Receive and return packet via connection.''' - try: - packet = self._connection.recv() - except Exception as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Error receiving packet: {0}'.format(error) - ) - - try: - parts = packet.split(':', 3) - except AttributeError: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - code, packet_identifier, path, data = None, None, None, None - - count = len(parts) - if count == 4: - code, packet_identifier, path, data = parts - elif count == 3: - code, packet_identifier, path = parts - elif count == 1: - code = parts[0] - else: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - self.logger.debug(L('Received packet: {0}', packet)) - return code, packet_identifier, path, data - - def _handle_packet(self, code, packet_identifier, path, data): - '''Handle packet received from server.''' - code_name = self._code_name_mapping[code] - - if code_name == 'connect': - self.logger.debug('Connected to event server.') - event = ftrack_api.event.base.Event('ftrack.meta.connected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'disconnect': - self.logger.debug('Disconnected from event server.') - if not self._intentional_disconnect: - self.logger.debug( - 'Disconnected unexpectedly. Attempting to reconnect.' - ) - try: - self.reconnect( - attempts=self._auto_reconnect_attempts, - delay=self._auto_reconnect_delay - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug('Failed to reconnect automatically.') - else: - self.logger.debug('Reconnected successfully.') - - if not self.connected: - event = ftrack_api.event.base.Event('ftrack.meta.disconnected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'heartbeat': - # Reply with heartbeat. - self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == 'message': - self.logger.debug(L('Message received: {0}', data)) - - elif code_name == 'event': - payload = self._decode(data) - args = payload.get('args', []) - - if len(args) == 1: - event_payload = args[0] - if isinstance(event_payload, collections.Mapping): - try: - event = ftrack_api.event.base.Event(**event_payload) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_payload - )) - return - - self._event_queue.put(event) - - elif code_name == 'acknowledge': - parts = data.split('+', 1) - acknowledged_packet_identifier = int(parts[0]) - args = [] - if len(parts) == 2: - args = self._decode(parts[1]) - - try: - callback = self._pop_packet_callback( - acknowledged_packet_identifier - ) - except KeyError: - pass - else: - callback(*args) - - elif code_name == 'error': - self.logger.error(L('Event server reported error: {0}.', data)) - - else: - self.logger.debug(L('{0}: {1}', code_name, data)) - - def _encode(self, data): - '''Return *data* encoded as JSON formatted string.''' - return json.dumps( - data, - default=self._encode_object_hook, - ensure_ascii=False - ) - - def _encode_object_hook(self, item): - '''Return *item* transformed for encoding.''' - if isinstance(item, ftrack_api.event.base.Event): - # Convert to dictionary for encoding. - item = dict(**item) - - if 'in_reply_to_event' in item: - # Convert keys to server convention. - item['inReplyToEvent'] = item.pop('in_reply_to_event') - - return item - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def _decode(self, string): - '''Return decoded JSON *string* as Python object.''' - return json.loads(string, object_hook=self._decode_object_hook) - - def _decode_object_hook(self, item): - '''Return *item* transformed.''' - if isinstance(item, collections.Mapping): - if 'inReplyToEvent' in item: - item['in_reply_to_event'] = item.pop('inReplyToEvent') - - return item - - -class _SubscriptionContext(object): - '''Context manager for a one-off subscription.''' - - def __init__(self, hub, subscription, callback, subscriber, priority): - '''Initialise context.''' - self._hub = hub - self._subscription = subscription - self._callback = callback - self._subscriber = subscriber - self._priority = priority - self._subscriberIdentifier = None - - def __enter__(self): - '''Enter context subscribing callback to topic.''' - self._subscriberIdentifier = self._hub.subscribe( - self._subscription, self._callback, subscriber=self._subscriber, - priority=self._priority - ) - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context unsubscribing callback from topic.''' - self._hub.unsubscribe(self._subscriberIdentifier) - - -class _ProcessorThread(threading.Thread): - '''Process messages from server.''' - - daemon = True - - def __init__(self, client): - '''Initialise thread with Socket.IO *client* instance.''' - super(_ProcessorThread, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.client = client - self.done = threading.Event() - - def run(self): - '''Perform work in thread.''' - while not self.done.is_set(): - try: - code, packet_identifier, path, data = self.client._receive_packet() - self.client._handle_packet(code, packet_identifier, path, data) - - except ftrack_api.exception.EventHubPacketError as error: - self.logger.debug(L('Ignoring invalid packet: {0}', error)) - continue - - except ftrack_api.exception.EventHubConnectionError: - self.cancel() - - # Fake a disconnection event in order to trigger reconnection - # when necessary. - self.client._handle_packet('0', '', '', '') - - break - - except Exception as error: - self.logger.debug(L('Aborting processor thread: {0}', error)) - self.cancel() - break - - def cancel(self): - '''Cancel work as soon as possible.''' - self.done.set() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py deleted file mode 100644 index 0d38463aaf7..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py +++ /dev/null @@ -1,27 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.event.subscription - - -class Subscriber(object): - '''Represent event subscriber.''' - - def __init__(self, subscription, callback, metadata, priority): - '''Initialise subscriber.''' - self.subscription = ftrack_api.event.subscription.Subscription( - subscription - ) - self.callback = callback - self.metadata = metadata - self.priority = priority - - def __str__(self): - '''Return string representation.''' - return '<{0} metadata={1} subscription="{2}">'.format( - self.__class__.__name__, self.metadata, self.subscription - ) - - def interested_in(self, event): - '''Return whether subscriber interested in *event*.''' - return self.subscription.includes(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py deleted file mode 100644 index 0b208d9977c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py +++ /dev/null @@ -1,23 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.event.expression - - -class Subscription(object): - '''Represent a subscription.''' - - parser = ftrack_api.event.expression.Parser() - - def __init__(self, subscription): - '''Initialise with *subscription*.''' - self._subscription = subscription - self._expression = self.parser.parse(subscription) - - def __str__(self): - '''Return string representation.''' - return self._subscription - - def includes(self, event): - '''Return whether subscription includes *event*.''' - return self._expression.match(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py deleted file mode 100644 index 8a2eb9bc041..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py +++ /dev/null @@ -1,392 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import traceback - -import ftrack_api.entity.base - - -class Error(Exception): - '''ftrack specific error.''' - - default_message = 'Unspecified error occurred.' - - def __init__(self, message=None, details=None): - '''Initialise exception with *message*. - - If *message* is None, the class 'default_message' will be used. - - *details* should be a mapping of extra information that can be used in - the message and also to provide more context. - - ''' - if message is None: - message = self.default_message - - self.message = message - self.details = details - if self.details is None: - self.details = {} - - self.traceback = traceback.format_exc() - - def __str__(self): - '''Return string representation.''' - keys = {} - for key, value in self.details.iteritems(): - if isinstance(value, unicode): - value = value.encode(sys.getfilesystemencoding()) - keys[key] = value - - return str(self.message.format(**keys)) - - -class AuthenticationError(Error): - '''Raise when an authentication error occurs.''' - - default_message = 'Authentication error.' - - -class ServerError(Error): - '''Raise when the server reports an error.''' - - default_message = 'Server reported error processing request.' - - -class ServerCompatibilityError(ServerError): - '''Raise when server appears incompatible.''' - - default_message = 'Server incompatible.' - - -class NotFoundError(Error): - '''Raise when something that should exist is not found.''' - - default_message = 'Not found.' - - -class NotUniqueError(Error): - '''Raise when unique value required and duplicate detected.''' - - default_message = 'Non-unique value detected.' - - -class IncorrectResultError(Error): - '''Raise when a result is incorrect.''' - - default_message = 'Incorrect result detected.' - - -class NoResultFoundError(IncorrectResultError): - '''Raise when a result was expected but no result was found.''' - - default_message = 'Expected result, but no result was found.' - - -class MultipleResultsFoundError(IncorrectResultError): - '''Raise when a single result expected, but multiple results found.''' - - default_message = 'Expected single result, but received multiple results.' - - -class EntityTypeError(Error): - '''Raise when an entity type error occurs.''' - - default_message = 'Entity type error.' - - -class UnrecognisedEntityTypeError(EntityTypeError): - '''Raise when an unrecognised entity type detected.''' - - default_message = 'Entity type "{entity_type}" not recognised.' - - def __init__(self, entity_type, **kw): - '''Initialise with *entity_type* that is unrecognised.''' - kw.setdefault('details', {}).update(dict( - entity_type=entity_type - )) - super(UnrecognisedEntityTypeError, self).__init__(**kw) - - -class OperationError(Error): - '''Raise when an operation error occurs.''' - - default_message = 'Operation error.' - - -class InvalidStateError(Error): - '''Raise when an invalid state detected.''' - - default_message = 'Invalid state.' - - -class InvalidStateTransitionError(InvalidStateError): - '''Raise when an invalid state transition detected.''' - - default_message = ( - 'Invalid transition from {current_state!r} to {target_state!r} state ' - 'for entity {entity!r}' - ) - - def __init__(self, current_state, target_state, entity, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - current_state=current_state, - target_state=target_state, - entity=entity - )) - super(InvalidStateTransitionError, self).__init__(**kw) - - -class AttributeError(Error): - '''Raise when an error related to an attribute occurs.''' - - default_message = 'Attribute error.' - - -class ImmutableAttributeError(AttributeError): - '''Raise when modification of immutable attribute attempted.''' - - default_message = ( - 'Cannot modify value of immutable {attribute.name!r} attribute.' - ) - - def __init__(self, attribute, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - attribute=attribute - )) - super(ImmutableAttributeError, self).__init__(**kw) - - -class CollectionError(Error): - '''Raise when an error related to collections occurs.''' - - default_message = 'Collection error.' - - def __init__(self, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - collection=collection - )) - super(CollectionError, self).__init__(**kw) - - -class ImmutableCollectionError(CollectionError): - '''Raise when modification of immutable collection attempted.''' - - default_message = ( - 'Cannot modify value of immutable collection {collection!r}.' - ) - - -class DuplicateItemInCollectionError(CollectionError): - '''Raise when duplicate item in collection detected.''' - - default_message = ( - 'Item {item!r} already exists in collection {collection!r}.' - ) - - def __init__(self, item, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - item=item - )) - super(DuplicateItemInCollectionError, self).__init__(collection, **kw) - - -class ParseError(Error): - '''Raise when a parsing error occurs.''' - - default_message = 'Failed to parse.' - - -class EventHubError(Error): - '''Raise when issues related to event hub occur.''' - - default_message = 'Event hub error occurred.' - - -class EventHubConnectionError(EventHubError): - '''Raise when event hub encounters connection problem.''' - - default_message = 'Event hub is not connected.' - - -class EventHubPacketError(EventHubError): - '''Raise when event hub encounters an issue with a packet.''' - - default_message = 'Invalid packet.' - - -class PermissionDeniedError(Error): - '''Raise when permission is denied.''' - - default_message = 'Permission denied.' - - -class LocationError(Error): - '''Base for errors associated with locations.''' - - default_message = 'Unspecified location error' - - -class ComponentNotInAnyLocationError(LocationError): - '''Raise when component not available in any location.''' - - default_message = 'Component not available in any location.' - - -class ComponentNotInLocationError(LocationError): - '''Raise when component(s) not in location.''' - - default_message = ( - 'Component(s) {formatted_components} not found in location {location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentNotInLocationError, self).__init__(**kw) - - -class ComponentInLocationError(LocationError): - '''Raise when component(s) already exists in location.''' - - default_message = ( - 'Component(s) {formatted_components} already exist in location ' - '{location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentInLocationError, self).__init__(**kw) - - -class AccessorError(Error): - '''Base for errors associated with accessors.''' - - default_message = 'Unspecified accessor error' - - -class AccessorOperationFailedError(AccessorError): - '''Base for failed operations on accessors.''' - - default_message = 'Operation {operation} failed: {error}' - - def __init__( - self, operation='', resource_identifier=None, error=None, **kw - ): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier, - error=error - )) - super(AccessorOperationFailedError, self).__init__(**kw) - - -class AccessorUnsupportedOperationError(AccessorOperationFailedError): - '''Raise when operation is unsupported.''' - - default_message = 'Operation {operation} unsupported.' - - -class AccessorPermissionDeniedError(AccessorOperationFailedError): - '''Raise when permission denied.''' - - default_message = ( - 'Cannot {operation} {resource_identifier}. Permission denied.' - ) - - -class AccessorResourceIdentifierError(AccessorError): - '''Raise when a error related to a resource_identifier occurs.''' - - default_message = 'Resource identifier is invalid: {resource_identifier}.' - - def __init__(self, resource_identifier, **kw): - kw.setdefault('details', {}).update(dict( - resource_identifier=resource_identifier - )) - super(AccessorResourceIdentifierError, self).__init__(**kw) - - -class AccessorFilesystemPathError(AccessorResourceIdentifierError): - '''Raise when a error related to an accessor filesystem path occurs.''' - - default_message = ( - 'Could not determine filesystem path from resource identifier: ' - '{resource_identifier}.' - ) - - -class AccessorResourceError(AccessorError): - '''Base for errors associated with specific resource.''' - - default_message = 'Unspecified resource error: {resource_identifier}' - - def __init__(self, operation='', resource_identifier=None, error=None, - **kw): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier - )) - super(AccessorResourceError, self).__init__(**kw) - - -class AccessorResourceNotFoundError(AccessorResourceError): - '''Raise when a required resource is not found.''' - - default_message = 'Resource not found: {resource_identifier}' - - -class AccessorParentResourceNotFoundError(AccessorResourceError): - '''Raise when a parent resource (such as directory) is not found.''' - - default_message = 'Parent resource is missing: {resource_identifier}' - - -class AccessorResourceInvalidError(AccessorResourceError): - '''Raise when a resource is not the right type.''' - - default_message = 'Resource invalid: {resource_identifier}' - - -class AccessorContainerNotEmptyError(AccessorResourceError): - '''Raise when container is not empty.''' - - default_message = 'Container is not empty: {resource_identifier}' - - -class StructureError(Error): - '''Base for errors associated with structures.''' - - default_message = 'Unspecified structure error' - - -class ConnectionClosedError(Error): - '''Raise when attempt to use closed connection detected.''' - - default_message = "Connection closed." diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py deleted file mode 100644 index c282fcc8141..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py +++ /dev/null @@ -1,131 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import termcolor - -import ftrack_api.entity.base -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection - - -#: Useful filters to pass to :func:`format`.` -FILTER = { - 'ignore_unset': ( - lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET - ) -} - - -def format( - entity, formatters=None, attribute_filter=None, recursive=False, - indent=0, indent_first_line=True, _seen=None -): - '''Return formatted string representing *entity*. - - *formatters* can be used to customise formatting of elements. It should be a - mapping with one or more of the following keys: - - * header - Used to format entity type. - * label - Used to format attribute names. - - Specify an *attribute_filter* to control which attributes to include. By - default all attributes are included. The *attribute_filter* should be a - callable that accepts `(entity, attribute_name, attribute_value)` and - returns True if the attribute should be included in the output. For example, - to filter out all unset values:: - - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - - If *recursive* is True then recurse into Collections and format each entity - present. - - *indent* specifies the overall indentation in spaces of the formatted text, - whilst *indent_first_line* determines whether to apply that indent to the - first generated line. - - .. warning:: - - Iterates over all *entity* attributes which may cause multiple queries - to the server. Turn off auto populating in the session to prevent this. - - ''' - # Initialise default formatters. - if formatters is None: - formatters = dict() - - formatters.setdefault( - 'header', lambda text: termcolor.colored( - text, 'white', 'on_blue', attrs=['bold'] - ) - ) - formatters.setdefault( - 'label', lambda text: termcolor.colored( - text, 'blue', attrs=['bold'] - ) - ) - - # Determine indents. - spacer = ' ' * indent - if indent_first_line: - first_line_spacer = spacer - else: - first_line_spacer = '' - - # Avoid infinite recursion on circular references. - if _seen is None: - _seen = set() - - identifier = str(ftrack_api.inspection.identity(entity)) - if identifier in _seen: - return ( - first_line_spacer + - formatters['header'](entity.entity_type) + '{...}' - ) - - _seen.add(identifier) - information = list() - - information.append( - first_line_spacer + formatters['header'](entity.entity_type) - ) - for key, value in sorted(entity.items()): - if attribute_filter is not None: - if not attribute_filter(entity, key, value): - continue - - child_indent = indent + len(key) + 3 - - if isinstance(value, ftrack_api.entity.base.Entity): - value = format( - value, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=False, - _seen=_seen.copy() - ) - - if isinstance(value, ftrack_api.collection.Collection): - if recursive: - child_values = [] - for index, child in enumerate(value): - child_value = format( - child, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=index != 0, - _seen=_seen.copy() - ) - child_values.append(child_value) - - value = '\n'.join(child_values) - - information.append( - spacer + u' {0}: {1}'.format(formatters['label'](key), value) - ) - - return '\n'.join(information) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py deleted file mode 100644 index d8b815200ec..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections - -import ftrack_api.symbol -import ftrack_api.operation - - -def identity(entity): - '''Return unique identity of *entity*.''' - return ( - str(entity.entity_type), - primary_key(entity).values() - ) - - -def primary_key(entity): - '''Return primary key of *entity* as an ordered mapping of {field: value}. - - To get just the primary key values:: - - primary_key(entity).values() - - ''' - primary_key = collections.OrderedDict() - for name in entity.primary_key_attributes: - value = entity[name] - if value is ftrack_api.symbol.NOT_SET: - raise KeyError( - 'Missing required value for primary key attribute "{0}" on ' - 'entity {1!r}.'.format(name, entity) - ) - - primary_key[str(name)] = str(value) - - return primary_key - - -def _state(operation, state): - '''Return state following *operation* against current *state*.''' - if ( - isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.CREATED - - elif ( - isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.MODIFIED - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - state = ftrack_api.symbol.DELETED - - return state - - -def state(entity): - '''Return current *entity* state. - - .. seealso:: :func:`ftrack_api.inspection.states`. - - ''' - value = ftrack_api.symbol.NOT_SET - - for operation in entity.session.recorded_operations: - # Determine if operation refers to an entity and whether that entity - # is *entity*. - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - and operation.entity_type == entity.entity_type - and operation.entity_key == primary_key(entity) - ): - value = _state(operation, value) - - return value - - -def states(entities): - '''Return current states of *entities*. - - An optimised function for determining states of multiple entities in one - go. - - .. note:: - - All *entities* should belong to the same session. - - .. seealso:: :func:`ftrack_api.inspection.state`. - - ''' - if not entities: - return [] - - session = entities[0].session - - entities_by_identity = collections.OrderedDict() - for entity in entities: - key = (entity.entity_type, str(primary_key(entity).values())) - entities_by_identity[key] = ftrack_api.symbol.NOT_SET - - for operation in session.recorded_operations: - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - ): - key = (operation.entity_type, str(operation.entity_key.values())) - if key not in entities_by_identity: - continue - - value = _state(operation, entities_by_identity[key]) - entities_by_identity[key] = value - - return entities_by_identity.values() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py deleted file mode 100644 index 41969c5b2a8..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py +++ /dev/null @@ -1,43 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import functools -import warnings - - -def deprecation_warning(message): - def decorator(function): - @functools.wraps(function) - def wrapper(*args, **kwargs): - warnings.warn( - message, - PendingDeprecationWarning - ) - return function(*args, **kwargs) - return wrapper - - return decorator - - -class LazyLogMessage(object): - '''A log message that can be evaluated lazily for improved performance. - - Example:: - - # Formatting of string will not occur unless debug logging enabled. - logger.debug(LazyLogMessage( - 'Hello {0}', 'world' - )) - - ''' - - def __init__(self, message, *args, **kwargs): - '''Initialise with *message* format string and arguments.''' - self.message = message - self.args = args - self.kwargs = kwargs - - def __str__(self): - '''Return string representation.''' - return self.message.format(*self.args, **self.kwargs) - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py deleted file mode 100644 index bb3bb4ee2c9..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy - - -class Operations(object): - '''Stack of operations.''' - - def __init__(self): - '''Initialise stack.''' - self._stack = [] - super(Operations, self).__init__() - - def clear(self): - '''Clear all operations.''' - del self._stack[:] - - def push(self, operation): - '''Push *operation* onto stack.''' - self._stack.append(operation) - - def pop(self): - '''Pop and return most recent operation from stack.''' - return self._stack.pop() - - def __len__(self): - '''Return count of operations.''' - return len(self._stack) - - def __iter__(self): - '''Return iterator over operations.''' - return iter(self._stack) - - -class Operation(object): - '''Represent an operation.''' - - -class CreateEntityOperation(Operation): - '''Represent create entity operation.''' - - def __init__(self, entity_type, entity_key, entity_data): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *entity_data* should be a mapping of the initial data to populate the - entity with when creating. - - .. note:: - - Shallow copies will be made of each value in *entity_data*. - - ''' - super(CreateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.entity_data = {} - for key, value in entity_data.items(): - self.entity_data[key] = copy.copy(value) - - -class UpdateEntityOperation(Operation): - '''Represent update entity operation.''' - - def __init__( - self, entity_type, entity_key, attribute_name, old_value, new_value - ): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *attribute_name* should be the string name of the attribute being - modified and *old_value* and *new_value* should reflect the change in - value. - - .. note:: - - Shallow copies will be made of both *old_value* and *new_value*. - - ''' - super(UpdateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.attribute_name = attribute_name - self.old_value = copy.copy(old_value) - self.new_value = copy.copy(new_value) - - -class DeleteEntityOperation(Operation): - '''Represent delete entity operation.''' - - def __init__(self, entity_type, entity_key): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - ''' - super(DeleteEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py deleted file mode 100644 index 2c7a9a45009..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py +++ /dev/null @@ -1,121 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import os -import uuid -import imp -import inspect - - -def discover(paths, positional_arguments=None, keyword_arguments=None): - '''Find and load plugins in search *paths*. - - Each discovered module should implement a register function that accepts - *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs - respectively. - - If a register function does not accept variable arguments, then attempt to - only pass accepted arguments to the function by inspecting its signature. - - ''' - logger = logging.getLogger(__name__ + '.discover') - - if positional_arguments is None: - positional_arguments = [] - - if keyword_arguments is None: - keyword_arguments = {} - - for path in paths: - # Ignore empty paths that could resolve to current directory. - path = path.strip() - if not path: - continue - - for base, directories, filenames in os.walk(path): - for filename in filenames: - name, extension = os.path.splitext(filename) - if extension != '.py': - continue - - module_path = os.path.join(base, filename) - unique_name = uuid.uuid4().hex - - try: - module = imp.load_source(unique_name, module_path) - except Exception as error: - logger.warning( - 'Failed to load plugin from "{0}": {1}' - .format(module_path, error) - ) - continue - - try: - module.register - except AttributeError: - logger.warning( - 'Failed to load plugin that did not define a ' - '"register" function at the module level: {0}' - .format(module_path) - ) - else: - # Attempt to only pass arguments that are accepted by the - # register function. - specification = inspect.getargspec(module.register) - - selected_positional_arguments = positional_arguments - selected_keyword_arguments = keyword_arguments - - if ( - not specification.varargs and - len(positional_arguments) > len(specification.args) - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - - selected_positional_arguments = positional_arguments[ - len(specification.args): - ] - selected_keyword_arguments = {} - - elif not specification.keywords: - # Remove arguments that have been passed as positionals. - remainder = specification.args[ - len(positional_arguments): - ] - - # Determine remaining available keyword arguments. - defined_keyword_arguments = [] - if specification.defaults: - defined_keyword_arguments = specification.args[ - -len(specification.defaults): - ] - - remaining_keyword_arguments = set([ - keyword_argument for keyword_argument - in defined_keyword_arguments - if keyword_argument in remainder - ]) - - if not set(keyword_arguments.keys()).issubset( - remaining_keyword_arguments - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - selected_keyword_arguments = { - key: value - for key, value in keyword_arguments.items() - if key in remaining_keyword_arguments - } - - module.register( - *selected_positional_arguments, - **selected_keyword_arguments - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py deleted file mode 100644 index ea101a29d4a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py +++ /dev/null @@ -1,202 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import re -import collections - -import ftrack_api.exception - - -class QueryResult(collections.Sequence): - '''Results from a query.''' - - OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') - LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') - - def __init__(self, session, expression, page_size=500): - '''Initialise result set. - - *session* should be an instance of :class:`ftrack_api.session.Session` - that will be used for executing the query *expression*. - - *page_size* should be an integer specifying the maximum number of - records to fetch in one request allowing the results to be fetched - incrementally in a transparent manner for optimal performance. Any - offset or limit specified in *expression* are honoured for final result - set, but intermediate queries may be issued with different offsets and - limits in order to fetch pages. When an embedded limit is smaller than - the given *page_size* it will be used instead and no paging will take - place. - - .. warning:: - - Setting *page_size* to a very large amount may negatively impact - performance of not only the caller, but the server in general. - - ''' - super(QueryResult, self).__init__() - self._session = session - self._results = [] - - ( - self._expression, - self._offset, - self._limit - ) = self._extract_offset_and_limit(expression) - - self._page_size = page_size - if self._limit is not None and self._limit < self._page_size: - # Optimise case where embedded limit is less than fetching a - # single page. - self._page_size = self._limit - - self._next_offset = self._offset - if self._next_offset is None: - # Initialise with zero offset. - self._next_offset = 0 - - def _extract_offset_and_limit(self, expression): - '''Process *expression* extracting offset and limit. - - Return (expression, offset, limit). - - ''' - offset = None - match = self.OFFSET_EXPRESSION.search(expression) - if match: - offset = int(match.group('value')) - expression = ( - expression[:match.start('offset')] + - expression[match.end('offset'):] - ) - - limit = None - match = self.LIMIT_EXPRESSION.search(expression) - if match: - limit = int(match.group('value')) - expression = ( - expression[:match.start('limit')] + - expression[match.end('limit'):] - ) - - return expression.strip(), offset, limit - - def __getitem__(self, index): - '''Return value at *index*.''' - while self._can_fetch_more() and index >= len(self._results): - self._fetch_more() - - return self._results[index] - - def __len__(self): - '''Return number of items.''' - while self._can_fetch_more(): - self._fetch_more() - - return len(self._results) - - def _can_fetch_more(self): - '''Return whether more results are available to fetch.''' - return self._next_offset is not None - - def _fetch_more(self): - '''Fetch next page of results if available.''' - if not self._can_fetch_more(): - return - - expression = '{0} offset {1} limit {2}'.format( - self._expression, self._next_offset, self._page_size - ) - records, metadata = self._session._query(expression) - self._results.extend(records) - - if self._limit is not None and (len(self._results) >= self._limit): - # Original limit reached. - self._next_offset = None - del self._results[self._limit:] - else: - # Retrieve next page offset from returned metadata. - self._next_offset = metadata.get('next', {}).get('offset', None) - - def all(self): - '''Fetch and return all data.''' - return list(self) - - def one(self): - '''Return exactly one single result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - Raise :exc:`ValueError` if an existing offset is already present in the - expression as offset is inappropriate when expecting a single item. - - Raise :exc:`~ftrack_api.exception.MultipleResultsFoundError` if more - than one result was available or - :exc:`~ftrack_api.exception.NoResultFoundError` if no results were - available. - - .. note:: - - Both errors subclass - :exc:`~ftrack_api.exception.IncorrectResultError` if you want to - catch only one error type. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - if self._offset is not None: - raise ValueError( - 'Expression contains an offset clause which does not make ' - 'sense when selecting a single item.' - ) - - # Apply custom limit as optimisation. A limit of 2 is used rather than - # 1 so that it is possible to test for multiple matching entries - # case. - expression += ' limit 2' - - results, metadata = self._session._query(expression) - - if not results: - raise ftrack_api.exception.NoResultFoundError() - - if len(results) != 1: - raise ftrack_api.exception.MultipleResultsFoundError() - - return results[0] - - def first(self): - '''Return first matching result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - If no matching result available return None. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - # Apply custom offset if present. - if self._offset is not None: - expression += ' offset {0}'.format(self._offset) - - # Apply custom limit as optimisation. - expression += ' limit 1' - - results, metadata = self._session._query(expression) - - if results: - return results[0] - - return None diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py deleted file mode 100644 index 1aab07ed77a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py deleted file mode 100644 index ee069b57b68..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py +++ /dev/null @@ -1,50 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - - -class ResourceIdentifierTransformer(object): - '''Transform resource identifiers. - - Provide ability to modify resource identifier before it is stored centrally - (:meth:`encode`), or after it has been retrieved, but before it is used - locally (:meth:`decode`). - - For example, you might want to decompose paths into a set of key, value - pairs to store centrally and then compose a path from those values when - reading back. - - .. note:: - - This is separate from any transformations an - :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted - towards common transformations. - - ''' - - def __init__(self, session): - '''Initialise resource identifier transformer. - - *session* should be the :class:`ftrack_api.session.Session` instance - to use for communication with the server. - - ''' - self.session = session - super(ResourceIdentifierTransformer, self).__init__() - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py deleted file mode 100644 index 1a5da444324..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py +++ /dev/null @@ -1,2515 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import json -import logging -import collections -import datetime -import os -import getpass -import functools -import itertools -import distutils.version -import hashlib -import tempfile -import threading -import atexit -import warnings - -import requests -import requests.auth -import arrow -import clique - -import ftrack_api -import ftrack_api.exception -import ftrack_api.entity.factory -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.cache -import ftrack_api.symbol -import ftrack_api.query -import ftrack_api.attribute -import ftrack_api.collection -import ftrack_api.event.hub -import ftrack_api.event.base -import ftrack_api.plugin -import ftrack_api.inspection -import ftrack_api.operation -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.entity_id -import ftrack_api.accessor.server -import ftrack_api._centralized_storage_scenario -import ftrack_api.logging -from ftrack_api.logging import LazyLogMessage as L - -try: - from weakref import WeakMethod -except ImportError: - from ftrack_api._weakref import WeakMethod - - -class SessionAuthentication(requests.auth.AuthBase): - '''Attach ftrack session authentication information to requests.''' - - def __init__(self, api_key, api_user): - '''Initialise with *api_key* and *api_user*.''' - self.api_key = api_key - self.api_user = api_user - super(SessionAuthentication, self).__init__() - - def __call__(self, request): - '''Modify *request* to have appropriate headers.''' - request.headers.update({ - 'ftrack-api-key': self.api_key, - 'ftrack-user': self.api_user - }) - return request - - -class Session(object): - '''An isolated session for interaction with an ftrack server.''' - - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ftrack_api.event.hub.EventHub( - self._server_url, - self._api_user, - self._api_key, - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(WeakMethod(self.close)) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def __enter__(self): - '''Return session as context manager.''' - return self - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit session context, closing session in process.''' - self.close() - - @property - def _request(self): - '''Return request session. - - Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has - been closed and connection unavailable. - - ''' - if self._managed_request is None: - raise ftrack_api.exception.ConnectionClosedError() - - return self._managed_request - - @_request.setter - def _request(self, value): - '''Set request session to *value*.''' - self._managed_request = value - - @property - def closed(self): - '''Return whether session has been closed.''' - return self._closed - - @property - def server_information(self): - '''Return server information such as server version.''' - return self._server_information.copy() - - @property - def server_url(self): - '''Return server ulr used for session.''' - return self._server_url - - @property - def api_user(self): - '''Return username used for session.''' - return self._api_user - - @property - def api_key(self): - '''Return API key used for session.''' - return self._api_key - - @property - def event_hub(self): - '''Return event hub.''' - return self._event_hub - - @property - def _local_cache(self): - '''Return top level memory cache.''' - return self.cache.caches[0] - - def check_server_compatibility(self): - '''Check compatibility with connected server.''' - server_version = self.server_information.get('version') - if server_version is None: - raise ftrack_api.exception.ServerCompatibilityError( - 'Could not determine server version.' - ) - - # Perform basic version check. - if server_version != 'dev': - min_server_version = '3.3.11' - if ( - distutils.version.LooseVersion(min_server_version) - > distutils.version.LooseVersion(server_version) - ): - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0} incompatible with this version of the ' - 'API which requires a server version >= {1}'.format( - server_version, - min_server_version - ) - ) - - def close(self): - '''Close session. - - Close connections to server. Clear any pending operations and local - cache. - - Use this to ensure that session is cleaned up properly after use. - - ''' - if self.closed: - self.logger.debug('Session already closed.') - return - - self._closed = True - - self.logger.debug('Closing session.') - if self.recorded_operations: - self.logger.warning( - 'Closing session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Close connections. - self._request.close() - self._request = None - - try: - self.event_hub.disconnect() - if self._auto_connect_event_hub_thread: - self._auto_connect_event_hub_thread.join() - except ftrack_api.exception.EventHubConnectionError: - pass - - self.logger.debug('Session closed.') - - def reset(self): - '''Reset session clearing local state. - - Clear all pending operations and expunge all entities from session. - - Also clear the local cache. If the cache used by the session is a - :class:`~ftrack_api.cache.LayeredCache` then only clear top level cache. - Otherwise, clear the entire cache. - - Plugins are not rediscovered or reinitialised, but certain plugin events - are re-emitted to properly configure session aspects that are dependant - on cache (such as location plugins). - - .. warning:: - - Previously attached entities are not reset in memory and will retain - their state, but should not be used. Doing so will cause errors. - - ''' - if self.recorded_operations: - self.logger.warning( - 'Resetting session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Re-configure certain session aspects that may be dependant on cache. - self._configure_locations() - - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.reset', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def auto_populating(self, auto_populate): - '''Temporarily set auto populate to *auto_populate*. - - The current setting will be restored automatically when done. - - Example:: - - with session.auto_populating(False): - print entity['name'] - - ''' - return AutoPopulatingContext(self, auto_populate) - - def operation_recording(self, record_operations): - '''Temporarily set operation recording to *record_operations*. - - The current setting will be restored automatically when done. - - Example:: - - with session.operation_recording(False): - entity['name'] = 'change_not_recorded' - - ''' - return OperationRecordingContext(self, record_operations) - - @property - def created(self): - '''Return list of newly created entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.CREATED - ] - - @property - def modified(self): - '''Return list of locally modified entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.MODIFIED - ] - - @property - def deleted(self): - '''Return list of deleted entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.DELETED - ] - - def reset_remote(self, reset_type, entity=None): - '''Perform a server side reset. - - *reset_type* is a server side supported reset type, - passing the optional *entity* to perform the option upon. - - Please refer to ftrack documentation for a complete list of - supported server side reset types. - ''' - - payload = { - 'action': 'reset_remote', - 'reset_type': reset_type - } - - if entity is not None: - payload.update({ - 'entity_type': entity.entity_type, - 'entity_key': entity.get('id') - }) - - result = self.call( - [payload] - ) - - return result[0]['data'] - - def create(self, entity_type, data=None, reconstructing=False): - '''Create and return an entity of *entity_type* with initial *data*. - - If specified, *data* should be a dictionary of key, value pairs that - should be used to populate attributes on the entity. - - If *reconstructing* is False then create a new entity setting - appropriate defaults for missing data. If True then reconstruct an - existing entity. - - Constructed entity will be automatically :meth:`merged ` - into the session. - - ''' - entity = self._create(entity_type, data, reconstructing=reconstructing) - entity = self.merge(entity) - return entity - - def _create(self, entity_type, data, reconstructing): - '''Create and return an entity of *entity_type* with initial *data*.''' - try: - EntityTypeClass = self.types[entity_type] - except KeyError: - raise ftrack_api.exception.UnrecognisedEntityTypeError(entity_type) - - return EntityTypeClass(self, data=data, reconstructing=reconstructing) - - def ensure(self, entity_type, data, identifying_keys=None): - '''Retrieve entity of *entity_type* with *data*, creating if necessary. - - *data* should be a dictionary of the same form passed to :meth:`create`. - - By default, check for an entity that has matching *data*. If - *identifying_keys* is specified as a list of keys then only consider the - values from *data* for those keys when searching for existing entity. If - *data* is missing an identifying key then raise :exc:`KeyError`. - - If no *identifying_keys* specified then use all of the keys from the - passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be - determined. - - Each key should be a string. - - .. note:: - - Currently only top level scalars supported. To ensure an entity by - looking at relationships, manually issue the :meth:`query` and - :meth:`create` calls. - - If more than one entity matches the determined filter criteria then - raise :exc:`~ftrack_api.exception.MultipleResultsFoundError`. - - If no matching entity found then create entity using supplied *data*. - - If a matching entity is found, then update it if necessary with *data*. - - .. note:: - - If entity created or updated then a :meth:`commit` will be issued - automatically. If this behaviour is undesired, perform the - :meth:`query` and :meth:`create` calls manually. - - Return retrieved or created entity. - - Example:: - - # First time, a new entity with `username=martin` is created. - entity = session.ensure('User', {'username': 'martin'}) - - # After that, the existing entity is retrieved. - entity = session.ensure('User', {'username': 'martin'}) - - # When existing entity retrieved, entity may also be updated to - # match supplied data. - entity = session.ensure( - 'User', {'username': 'martin', 'email': 'martin@example.com'} - ) - - ''' - if not identifying_keys: - identifying_keys = data.keys() - - self.logger.debug(L( - 'Ensuring entity {0!r} with data {1!r} using identifying keys ' - '{2!r}', entity_type, data, identifying_keys - )) - - if not identifying_keys: - raise ValueError( - 'Could not determine any identifying data to check against ' - 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' - .format(entity_type, data, identifying_keys) - ) - - expression = '{0} where'.format(entity_type) - criteria = [] - for identifying_key in identifying_keys: - value = data[identifying_key] - - if isinstance(value, basestring): - value = '"{0}"'.format(value) - - elif isinstance( - value, (arrow.Arrow, datetime.datetime, datetime.date) - ): - # Server does not store microsecond or timezone currently so - # need to strip from query. - # TODO: When datetime handling improved, update this logic. - value = ( - arrow.get(value).naive.replace(microsecond=0).isoformat() - ) - value = '"{0}"'.format(value) - - criteria.append('{0} is {1}'.format(identifying_key, value)) - - expression = '{0} {1}'.format( - expression, ' and '.join(criteria) - ) - - try: - entity = self.query(expression).one() - - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Creating entity as did not already exist.') - - # Create entity. - entity = self.create(entity_type, data) - self.commit() - - else: - self.logger.debug('Retrieved matching existing entity.') - - # Update entity if required. - updated = False - for key, target_value in data.items(): - if entity[key] != target_value: - entity[key] = target_value - updated = True - - if updated: - self.logger.debug('Updating existing entity to match new data.') - self.commit() - - return entity - - def delete(self, entity): - '''Mark *entity* for deletion.''' - if self.record_operations: - self.recorded_operations.push( - ftrack_api.operation.DeleteEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity) - ) - ) - - def get(self, entity_type, entity_key): - '''Return entity of *entity_type* with unique *entity_key*. - - First check for an existing entry in the configured cache, otherwise - issue a query to the server. - - If no matching entity found, return None. - - ''' - self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) - - primary_key_definition = self.types[entity_type].primary_key_attributes - if isinstance(entity_key, basestring): - entity_key = [entity_key] - - if len(entity_key) != len(primary_key_definition): - raise ValueError( - 'Incompatible entity_key {0!r} supplied. Entity type {1} ' - 'expects a primary key composed of {2} values ({3}).' - .format( - entity_key, entity_type, len(primary_key_definition), - ', '.join(primary_key_definition) - ) - ) - - entity = None - try: - entity = self._get(entity_type, entity_key) - - - except KeyError: - - # Query for matching entity. - self.logger.debug( - 'Entity not present in cache. Issuing new query.' - ) - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - expression = '{0} where ({1})'.format( - entity_type, ' and '.join(condition) - ) - - results = self.query(expression).all() - if results: - entity = results[0] - - return entity - - def _get(self, entity_type, entity_key): - '''Return cached entity of *entity_type* with unique *entity_key*. - - Raise :exc:`KeyError` if no such entity in the cache. - - ''' - # Check cache for existing entity emulating - # ftrack_api.inspection.identity result object to pass to key maker. - cache_key = self.cache_key_maker.key( - (str(entity_type), map(str, entity_key)) - ) - self.logger.debug(L( - 'Checking cache for entity with key {0}', cache_key - )) - entity = self.cache.get(cache_key) - self.logger.debug(L( - 'Retrieved existing entity from cache: {0} at {1}', - entity, id(entity) - )) - - return entity - - def query(self, expression, page_size=500): - '''Query against remote data according to *expression*. - - *expression* is not executed directly. Instead return an - :class:`ftrack_api.query.QueryResult` instance that will execute remote - call on access. - - *page_size* specifies the maximum page size that the returned query - result object should be configured with. - - .. seealso:: :ref:`querying` - - ''' - self.logger.debug(L('Query {0!r}', expression)) - - # Add in sensible projections if none specified. Note that this is - # done here rather than on the server to allow local modification of the - # schema setting to include commonly used custom attributes for example. - # TODO: Use a proper parser perhaps? - if not expression.startswith('select'): - entity_type = expression.split(' ', 1)[0] - EntityTypeClass = self.types[entity_type] - projections = EntityTypeClass.default_projections - - expression = 'select {0} from {1}'.format( - ', '.join(projections), - expression - ) - - query_result = ftrack_api.query.QueryResult( - self, expression, page_size=page_size - ) - return query_result - - def _query(self, expression): - '''Execute *query* and return (records, metadata). - - Records will be a list of entities retrieved via the query and metadata - a dictionary of accompanying information about the result set. - - ''' - # TODO: Actually support batching several queries together. - # TODO: Should batches have unique ids to match them up later. - batch = [{ - 'action': 'query', - 'expression': expression - }] - - # TODO: When should this execute? How to handle background=True? - results = self.call(batch) - - # Merge entities into local cache and return merged entities. - data = [] - merged = dict() - for entity in results[0]['data']: - data.append(self._merge_recursive(entity, merged)) - - return data, results[0]['metadata'] - - def merge(self, value, merged=None): - '''Merge *value* into session and return merged value. - - *merged* should be a mapping to record merges during run and should be - used to avoid infinite recursion. If not set will default to a - dictionary. - - ''' - if merged is None: - merged = {} - - with self.operation_recording(False): - return self._merge(value, merged) - - def _merge(self, value, merged): - '''Return merged *value*.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if isinstance(value, ftrack_api.entity.base.Entity): - log_debug and self.logger.debug( - 'Merging entity into session: {0} at {1}' - .format(value, id(value)) - ) - - return self._merge_entity(value, merged=merged) - - elif isinstance(value, ftrack_api.collection.Collection): - log_debug and self.logger.debug( - 'Merging collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): - log_debug and self.logger.debug( - 'Merging mapped collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value.collection: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - else: - return value - - def _merge_recursive(self, entity, merged=None): - '''Merge *entity* and all its attributes recursivly.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - attached = self.merge(entity, merged) - - for attribute in entity.attributes: - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - log_debug and self.logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - if isinstance(remote_value, ftrack_api.entity.base.Entity): - self._merge_recursive(remote_value, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.Collection - ): - for entry in remote_value: - self._merge_recursive(entry, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.MappedCollectionProxy - ): - for entry in remote_value.collection: - self._merge_recursive(entry, merged=merged) - - return attached - - def _merge_entity(self, entity, merged=None): - '''Merge *entity* into session returning merged entity. - - Merge is recursive so any references to other entities will also be - merged. - - *entity* will never be modified in place. Ensure that the returned - merged entity instance is used. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - with self.auto_populating(False): - entity_key = self.cache_key_maker.key( - ftrack_api.inspection.identity(entity) - ) - - # Check whether this entity has already been processed. - attached_entity = merged.get(entity_key) - if attached_entity is not None: - log_debug and self.logger.debug( - 'Entity already processed for key {0} as {1} at {2}' - .format(entity_key, attached_entity, id(attached_entity)) - ) - - return attached_entity - else: - log_debug and self.logger.debug( - 'Entity not already processed for key {0}.' - .format(entity_key) - ) - - # Check for existing instance of entity in cache. - log_debug and self.logger.debug( - 'Checking for entity in cache with key {0}'.format(entity_key) - ) - try: - attached_entity = self.cache.get(entity_key) - - log_debug and self.logger.debug( - 'Retrieved existing entity from cache: {0} at {1}' - .format(attached_entity, id(attached_entity)) - ) - - except KeyError: - # Construct new minimal instance to store in cache. - attached_entity = self._create( - entity.entity_type, {}, reconstructing=True - ) - - log_debug and self.logger.debug( - 'Entity not present in cache. Constructed new instance: ' - '{0} at {1}'.format(attached_entity, id(attached_entity)) - ) - - # Mark entity as seen to avoid infinite loops. - merged[entity_key] = attached_entity - - changes = attached_entity.merge(entity, merged=merged) - if changes: - self.cache.set(entity_key, attached_entity) - self.logger.debug('Cache updated with merged entity.') - - else: - self.logger.debug( - 'Cache not updated with merged entity as no differences ' - 'detected.' - ) - - return attached_entity - - def populate(self, entities, projections): - '''Populate *entities* with attributes specified by *projections*. - - Any locally set values included in the *projections* will not be - overwritten with the retrieved remote value. If this 'synchronise' - behaviour is required, first clear the relevant values on the entity by - setting them to :attr:`ftrack_api.symbol.NOT_SET`. Deleting the key will - have the same effect:: - - >>> print(user['username']) - martin - >>> del user['username'] - >>> print(user['username']) - Symbol(NOT_SET) - - .. note:: - - Entities that have been created and not yet persisted will be - skipped as they have no remote values to fetch. - - ''' - self.logger.debug(L( - 'Populate {0!r} projections for {1}.', projections, entities - )) - - if not isinstance( - entities, (list, tuple, ftrack_api.query.QueryResult) - ): - entities = [entities] - - # TODO: How to handle a mixed collection of different entity types - # Should probably fail, but need to consider handling hierarchies such - # as User and Group both deriving from Resource. Actually, could just - # proceed and ignore projections that are not present in entity type. - - entities_to_process = [] - - for entity in entities: - if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: - # Created entities that are not yet persisted have no remote - # values. Don't raise an error here as it is reasonable to - # iterate over an entities properties and see that some of them - # are NOT_SET. - self.logger.debug(L( - 'Skipping newly created entity {0!r} for population as no ' - 'data will exist in the remote for this entity yet.', entity - )) - continue - - entities_to_process.append(entity) - - if entities_to_process: - reference_entity = entities_to_process[0] - entity_type = reference_entity.entity_type - query = 'select {0} from {1}'.format(projections, entity_type) - - primary_key_definition = reference_entity.primary_key_attributes - entity_keys = [ - ftrack_api.inspection.primary_key(entity).values() - for entity in entities_to_process - ] - - if len(primary_key_definition) > 1: - # Composite keys require full OR syntax unfortunately. - conditions = [] - for entity_key in entity_keys: - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - conditions.append('({0})'.format('and '.join(condition))) - - query = '{0} where {1}'.format(query, ' or '.join(conditions)) - - else: - primary_key = primary_key_definition[0] - - if len(entity_keys) > 1: - query = '{0} where {1} in ({2})'.format( - query, primary_key, - ','.join([ - str(entity_key[0]) for entity_key in entity_keys - ]) - ) - else: - query = '{0} where {1} is {2}'.format( - query, primary_key, str(entity_keys[0][0]) - ) - - result = self.query(query) - - # Fetch all results now. Doing so will cause them to populate the - # relevant entities in the cache. - result.all() - - # TODO: Should we check that all requested attributes were - # actually populated? If some weren't would we mark that to avoid - # repeated calls or perhaps raise an error? - - # TODO: Make atomic. - def commit(self): - '''Commit all local changes to the server.''' - batch = [] - - with self.auto_populating(False): - for operation in self.recorded_operations: - - # Convert operation to payload. - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - # At present, data payload requires duplicating entity - # type in data and also ensuring primary key added. - entity_data = { - '__entity_type__': operation.entity_type, - } - entity_data.update(operation.entity_key) - entity_data.update(operation.entity_data) - - payload = OperationPayload({ - 'action': 'create', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ): - entity_data = { - # At present, data payload requires duplicating entity - # type. - '__entity_type__': operation.entity_type, - operation.attribute_name: operation.new_value - } - - payload = OperationPayload({ - 'action': 'update', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - payload = OperationPayload({ - 'action': 'delete', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values() - }) - - else: - raise ValueError( - 'Cannot commit. Unrecognised operation type {0} ' - 'detected.'.format(type(operation)) - ) - - batch.append(payload) - - # Optimise batch. - # TODO: Might be better to perform these on the operations list instead - # so all operation contextual information available. - - # If entity was created and deleted in one batch then remove all - # payloads for that entity. - created = set() - deleted = set() - - for payload in batch: - if payload['action'] == 'create': - created.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - elif payload['action'] == 'delete': - deleted.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - created_then_deleted = deleted.intersection(created) - if created_then_deleted: - optimised_batch = [] - for payload in batch: - entity_type = payload.get('entity_type') - entity_key = str(payload.get('entity_key')) - - if (entity_type, entity_key) in created_then_deleted: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Remove early update operations so that only last operation on - # attribute is applied server side. - updates_map = set() - for payload in reversed(batch): - if payload['action'] in ('update', ): - for key, value in payload['entity_data'].items(): - if key == '__entity_type__': - continue - - identity = ( - payload['entity_type'], str(payload['entity_key']), key - ) - if identity in updates_map: - del payload['entity_data'][key] - else: - updates_map.add(identity) - - # Remove NOT_SET values from entity_data. - for payload in batch: - entity_data = payload.get('entity_data', {}) - for key, value in entity_data.items(): - if value is ftrack_api.symbol.NOT_SET: - del entity_data[key] - - # Remove payloads with redundant entity_data. - optimised_batch = [] - for payload in batch: - entity_data = payload.get('entity_data') - if entity_data is not None: - keys = entity_data.keys() - if not keys or keys == ['__entity_type__']: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Collapse updates that are consecutive into one payload. Also, collapse - # updates that occur immediately after creation into the create payload. - optimised_batch = [] - previous_payload = None - - for payload in batch: - if ( - previous_payload is not None - and payload['action'] == 'update' - and previous_payload['action'] in ('create', 'update') - and previous_payload['entity_type'] == payload['entity_type'] - and previous_payload['entity_key'] == payload['entity_key'] - ): - previous_payload['entity_data'].update(payload['entity_data']) - continue - - else: - optimised_batch.append(payload) - previous_payload = payload - - batch = optimised_batch - - # Process batch. - if batch: - result = self.call(batch) - - # Clear recorded operations. - self.recorded_operations.clear() - - # As optimisation, clear local values which are not primary keys to - # avoid redundant merges when merging references. Note: primary keys - # remain as needed for cache retrieval on new entities. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - for attribute in entity: - if attribute not in entity.primary_key_attributes: - del entity[attribute] - - # Process results merging into cache relevant data. - for entry in result: - - if entry['action'] in ('create', 'update'): - # Merge returned entities into local cache. - self.merge(entry['data']) - - elif entry['action'] == 'delete': - # TODO: Detach entity - need identity returned? - # TODO: Expunge entity from cache. - pass - # Clear remaining local state, including local values for primary - # keys on entities that were merged. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - entity.clear() - - def rollback(self): - '''Clear all recorded operations and local state. - - Typically this would be used following a failed :meth:`commit` in order - to revert the session to a known good state. - - Newly created entities not yet persisted will be detached from the - session / purged from cache and no longer contribute, but the actual - objects are not deleted from memory. They should no longer be used and - doing so could cause errors. - - ''' - with self.auto_populating(False): - with self.operation_recording(False): - - # Detach all newly created entities and remove from cache. This - # is done because simply clearing the local values of newly - # created entities would result in entities with no identity as - # primary key was local while not persisted. In addition, it - # makes no sense for failed created entities to exist in session - # or cache. - for operation in self.recorded_operations: - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - entity_key = str(( - str(operation.entity_type), - operation.entity_key.values() - )) - try: - self.cache.remove(entity_key) - except KeyError: - pass - - # Clear locally stored modifications on remaining entities. - for entity in self._local_cache.values(): - entity.clear() - - self.recorded_operations.clear() - - def _fetch_server_information(self): - '''Return server information.''' - result = self.call([{'action': 'query_server_information'}]) - return result[0] - - def _discover_plugins(self, plugin_arguments=None): - '''Find and load plugins in search paths. - - Each discovered module should implement a register function that - accepts this session as first argument. Typically the function should - register appropriate event listeners against the session's event hub. - - def register(session): - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) - - *plugin_arguments* should be an optional mapping of keyword arguments - and values to pass to plugin register functions upon discovery. - - ''' - plugin_arguments = plugin_arguments or {} - ftrack_api.plugin.discover( - self._plugin_paths, [self], plugin_arguments - ) - - def _read_schemas_from_cache(self, schema_cache_path): - '''Return schemas and schema hash from *schema_cache_path*. - - *schema_cache_path* should be the path to the file containing the - schemas in JSON format. - - ''' - self.logger.debug(L( - 'Reading schemas from cache {0!r}', schema_cache_path - )) - - if not os.path.exists(schema_cache_path): - self.logger.info(L( - 'Cache file not found at {0!r}.', schema_cache_path - )) - - return [], None - - with open(schema_cache_path, 'r') as schema_file: - schemas = json.load(schema_file) - hash_ = hashlib.md5( - json.dumps(schemas, sort_keys=True) - ).hexdigest() - - return schemas, hash_ - - def _write_schemas_to_cache(self, schemas, schema_cache_path): - '''Write *schemas* to *schema_cache_path*. - - *schema_cache_path* should be a path to a file that the schemas can be - written to in JSON format. - - ''' - self.logger.debug(L( - 'Updating schema cache {0!r} with new schemas.', schema_cache_path - )) - - with open(schema_cache_path, 'w') as local_cache_file: - json.dump(schemas, local_cache_file, indent=4) - - def _load_schemas(self, schema_cache_path): - '''Load schemas. - - First try to load schemas from cache at *schema_cache_path*. If the - cache is not available or the cache appears outdated then load schemas - from server and store fresh copy in cache. - - If *schema_cache_path* is set to `False`, always load schemas from - server bypassing cache. - - ''' - local_schema_hash = None - schemas = [] - - if schema_cache_path: - try: - schemas, local_schema_hash = self._read_schemas_from_cache( - schema_cache_path - ) - except (IOError, TypeError, AttributeError, ValueError): - # Catch any known exceptions when trying to read the local - # schema cache to prevent API from being unusable. - self.logger.exception(L( - 'Schema cache could not be loaded from {0!r}', - schema_cache_path - )) - - # Use `dictionary.get` to retrieve hash to support older version of - # ftrack server not returning a schema hash. - server_hash = self._server_information.get( - 'schema_hash', False - ) - if local_schema_hash != server_hash: - self.logger.debug(L( - 'Loading schemas from server due to hash not matching.' - 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash - )) - schemas = self.call([{'action': 'query_schemas'}])[0] - - if schema_cache_path: - try: - self._write_schemas_to_cache(schemas, schema_cache_path) - except (IOError, TypeError): - self.logger.exception(L( - 'Failed to update schema cache {0!r}.', - schema_cache_path - )) - - else: - self.logger.debug(L( - 'Using cached schemas from {0!r}', schema_cache_path - )) - - return schemas - - def _build_entity_type_classes(self, schemas): - '''Build default entity type classes.''' - fallback_factory = ftrack_api.entity.factory.StandardFactory() - classes = {} - - for schema in schemas: - results = self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ), - synchronous=True - ) - - results = [result for result in results if result is not None] - - if not results: - self.logger.debug(L( - 'Using default StandardFactory to construct entity type ' - 'class for "{0}"', schema['id'] - )) - entity_type_class = fallback_factory.create(schema) - - elif len(results) > 1: - raise ValueError( - 'Expected single entity type to represent schema "{0}" but ' - 'received {1} entity types instead.' - .format(schema['id'], len(results)) - ) - - else: - entity_type_class = results[0] - - classes[entity_type_class.entity_type] = entity_type_class - - return classes - - def _configure_locations(self): - '''Configure locations.''' - # First configure builtin locations, by injecting them into local cache. - - # Origin. - location = self.create( - 'Location', - data=dict( - name='ftrack.origin', - id=ftrack_api.symbol.ORIGIN_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.OriginLocationMixin, - name='OriginLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 100 - - # Unmanaged. - location = self.create( - 'Location', - data=dict( - name='ftrack.unmanaged', - id=ftrack_api.symbol.UNMANAGED_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - # location.resource_identifier_transformer = ( - # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) - # ) - location.priority = 90 - - # Review. - location = self.create( - 'Location', - data=dict( - name='ftrack.review', - id=ftrack_api.symbol.REVIEW_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 110 - - # Server. - location = self.create( - 'Location', - data=dict( - name='ftrack.server', - id=ftrack_api.symbol.SERVER_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.ServerLocationMixin, - name='ServerLocation' - ) - location.accessor = ftrack_api.accessor.server._ServerAccessor( - session=self - ) - location.structure = ftrack_api.structure.entity_id.EntityIdStructure() - location.priority = 150 - - # Master location based on server scenario. - storage_scenario = self.server_information.get('storage_scenario') - - if ( - storage_scenario and - storage_scenario.get('scenario') - ): - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.activate', - data=dict( - storage_scenario=storage_scenario - ) - ), - synchronous=True - ) - - # Next, allow further configuration of locations via events. - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ), - synchronous=True - ) - - @ftrack_api.logging.deprecation_warning( - 'Session._call is now available as public method Session.call. The ' - 'private method will be removed in version 2.0.' - ) - def _call(self, data): - '''Make request to server with *data* batch describing the actions. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.call(data) - - def call(self, data): - '''Make request to server with *data* batch describing the actions.''' - url = self._server_url + '/api' - headers = { - 'content-type': 'application/json', - 'accept': 'application/json' - } - data = self.encode(data, entity_attribute_strategy='modified_only') - - self.logger.debug(L('Calling server {0} with {1!r}', url, data)) - - response = self._request.post( - url, - headers=headers, - data=data - ) - - self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) - - self.logger.debug(L('Response: {0!r}', response.text)) - try: - result = self.decode(response.text) - - except Exception: - error_message = ( - 'Server reported error in unexpected format. Raw error was: {0}' - .format(response.text) - ) - self.logger.exception(error_message) - raise ftrack_api.exception.ServerError(error_message) - - else: - if 'exception' in result: - # Handle exceptions. - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] - ) - self.logger.exception(error_message) - raise ftrack_api.exception.ServerError(error_message) - - return result - - def encode(self, data, entity_attribute_strategy='set_only'): - '''Return *data* encoded as JSON formatted string. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. The following strategies are available: - - * *all* - Encode all attributes, loading any that are currently NOT_SET. - * *set_only* - Encode only attributes that are currently set without - loading any from the remote. - * *modified_only* - Encode only attributes that have been modified - locally. - * *persisted_only* - Encode only remote (persisted) attribute values. - - ''' - entity_attribute_strategies = ( - 'all', 'set_only', 'modified_only', 'persisted_only' - ) - if entity_attribute_strategy not in entity_attribute_strategies: - raise ValueError( - 'Unsupported entity_attribute_strategy "{0}". Must be one of ' - '{1}'.format( - entity_attribute_strategy, - ', '.join(entity_attribute_strategies) - ) - ) - - return json.dumps( - data, - sort_keys=True, - default=functools.partial( - self._encode, - entity_attribute_strategy=entity_attribute_strategy - ) - ) - - def _encode(self, item, entity_attribute_strategy='set_only'): - '''Return JSON encodable version of *item*. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. See :meth:`Session.encode` for available strategies. - - ''' - if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): - return { - '__type__': 'datetime', - 'value': item.isoformat() - } - - if isinstance(item, OperationPayload): - data = dict(item.items()) - if "entity_data" in data: - for key, value in data["entity_data"].items(): - if isinstance(value, ftrack_api.entity.base.Entity): - data["entity_data"][key] = self.entity_reference(value) - - return data - - if isinstance(item, ftrack_api.entity.base.Entity): - data = self.entity_reference(item) - - with self.auto_populating(True): - - for attribute in item.attributes: - value = ftrack_api.symbol.NOT_SET - - if entity_attribute_strategy == 'all': - value = attribute.get_value(item) - - elif entity_attribute_strategy == 'set_only': - if attribute.is_set(item): - value = attribute.get_local_value(item) - if value is ftrack_api.symbol.NOT_SET: - value = attribute.get_remote_value(item) - - elif entity_attribute_strategy == 'modified_only': - if attribute.is_modified(item): - value = attribute.get_local_value(item) - - elif entity_attribute_strategy == 'persisted_only': - if not attribute.computed: - value = attribute.get_remote_value(item) - - if value is not ftrack_api.symbol.NOT_SET: - if isinstance( - attribute, ftrack_api.attribute.ReferenceAttribute - ): - if isinstance(value, ftrack_api.entity.base.Entity): - value = self.entity_reference(value) - - data[attribute.name] = value - - return data - - if isinstance( - item, ftrack_api.collection.MappedCollectionProxy - ): - # Use proxied collection for serialisation. - item = item.collection - - if isinstance(item, ftrack_api.collection.Collection): - data = [] - for entity in item: - data.append(self.entity_reference(entity)) - - return data - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along with - the key, value pairs that make up it's primary key. - - ''' - reference = { - '__entity_type__': entity.entity_type - } - with self.auto_populating(False): - reference.update(ftrack_api.inspection.primary_key(entity)) - - return reference - - @ftrack_api.logging.deprecation_warning( - 'Session._entity_reference is now available as public method ' - 'Session.entity_reference. The private method will be removed ' - 'in version 2.0.' - ) - def _entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along - with the key, value pairs that make up it's primary key. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.entity_reference(entity) - - def decode(self, string): - '''Return decoded JSON *string* as Python object.''' - with self.operation_recording(False): - return json.loads(string, object_hook=self._decode) - - def _decode(self, item): - '''Return *item* transformed into appropriate representation.''' - if isinstance(item, collections.Mapping): - if '__type__' in item: - if item['__type__'] == 'datetime': - item = arrow.get(item['value']) - - elif '__entity_type__' in item: - item = self._create( - item['__entity_type__'], item, reconstructing=True - ) - - return item - - def _get_locations(self, filter_inaccessible=True): - '''Helper to returns locations ordered by priority. - - If *filter_inaccessible* is True then only accessible locations will be - included in result. - - ''' - # Optimise this call. - locations = self.query('Location') - - # Filter. - if filter_inaccessible: - locations = filter( - lambda location: location.accessor, - locations - ) - - # Sort by priority. - locations = sorted( - locations, key=lambda location: location.priority - ) - - return locations - - def pick_location(self, component=None): - '''Return suitable location to use. - - If no *component* specified then return highest priority accessible - location. Otherwise, return highest priority accessible location that - *component* is available in. - - Return None if no suitable location could be picked. - - ''' - if component: - return self.pick_locations([component])[0] - - else: - locations = self._get_locations() - if locations: - return locations[0] - else: - return None - - def pick_locations(self, components): - '''Return suitable locations for *components*. - - Return list of locations corresponding to *components* where each - picked location is the highest priority accessible location for that - component. If a component has no location available then its - corresponding entry will be None. - - ''' - candidate_locations = self._get_locations() - availabilities = self.get_component_availabilities( - components, locations=candidate_locations - ) - - locations = [] - for component, availability in zip(components, availabilities): - location = None - - for candidate_location in candidate_locations: - if availability.get(candidate_location['id']) > 0.0: - location = candidate_location - break - - locations.append(location) - - return locations - - def create_component( - self, path, data=None, location='auto' - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). - - If *location* is specified then automatically add component to that - location. The default of 'auto' will automatically pick a suitable - location to add the component to if one is available. To not add to any - location specifiy locations as None. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration in the - location. - ''' - if data is None: - data = {} - - if location == 'auto': - # Check if the component name matches one of the ftrackreview - # specific names. Add the component to the ftrack.review location if - # so. This is used to not break backwards compatibility. - if data.get('name') in ( - 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' - ): - location = self.get( - 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID - ) - - else: - location = self.pick_location() - - try: - collection = clique.parse(path) - - except ValueError: - # Assume is a single file. - if 'size' not in data: - data['size'] = self._get_filesystem_size(path) - - data.setdefault('file_type', os.path.splitext(path)[-1]) - - return self._create_component( - 'FileComponent', path, data, location - ) - - else: - # Calculate size of container and members. - member_sizes = {} - container_size = data.get('size') - - if container_size is not None: - if len(collection.indexes) > 0: - member_size = int( - round(container_size / len(collection.indexes)) - ) - for item in collection: - member_sizes[item] = member_size - - else: - container_size = 0 - for item in collection: - member_sizes[item] = self._get_filesystem_size(item) - container_size += member_sizes[item] - - # Create sequence component - container_path = collection.format('{head}{padding}{tail}') - data.setdefault('padding', collection.padding) - data.setdefault('file_type', os.path.splitext(container_path)[-1]) - data.setdefault('size', container_size) - - container = self._create_component( - 'SequenceComponent', container_path, data, location=None - ) - - # Create member components for sequence. - for member_path in collection: - member_data = { - 'name': collection.match(member_path).group('index'), - 'container': container, - 'size': member_sizes[member_path], - 'file_type': os.path.splitext(member_path)[-1] - } - - component = self._create_component( - 'FileComponent', member_path, member_data, location=None - ) - container['members'].append(component) - - if location: - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - location.add_component( - container, origin_location, recursive=True - ) - - return container - - def _create_component(self, entity_type, path, data, location): - '''Create and return component. - - See public function :py:func:`createComponent` for argument details. - - ''' - component = self.create(entity_type, data) - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component(component, path, recursive=False) - - if location: - location.add_component(component, origin_location, recursive=False) - - return component - - def _get_filesystem_size(self, path): - '''Return size from *path*''' - try: - size = os.path.getsize(path) - except OSError: - size = 0 - - return size - - def get_component_availability(self, component, locations=None): - '''Return availability of *component*. - - If *locations* is set then limit result to availability of *component* - in those *locations*. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.get_component_availabilities( - [component], locations=locations - )[0] - - def get_component_availabilities(self, components, locations=None): - '''Return availabilities of *components*. - - If *locations* is set then limit result to availabilities of - *components* in those *locations*. - - Return a list of dictionaries of {location_id:percentage_availability}. - The list indexes correspond to those of *components*. - - ''' - availabilities = [] - - if locations is None: - locations = self.query('Location') - - # Separate components into two lists, those that are containers and - # those that are not, so that queries can be optimised. - standard_components = [] - container_components = [] - - for component in components: - if 'members' in component.keys(): - container_components.append(component) - else: - standard_components.append(component) - - # Perform queries. - if standard_components: - self.populate( - standard_components, 'component_locations.location_id' - ) - - if container_components: - self.populate( - container_components, - 'members, component_locations.location_id' - ) - - base_availability = {} - for location in locations: - base_availability[location['id']] = 0.0 - - for component in components: - availability = base_availability.copy() - availabilities.append(availability) - - is_container = 'members' in component.keys() - if is_container and len(component['members']): - member_availabilities = self.get_component_availabilities( - component['members'], locations=locations - ) - multiplier = 1.0 / len(component['members']) - for member, member_availability in zip( - component['members'], member_availabilities - ): - for location_id, ratio in member_availability.items(): - availability[location_id] += ( - ratio * multiplier - ) - else: - for component_location in component['component_locations']: - location_id = component_location['location_id'] - if location_id in availability: - availability[location_id] = 100.0 - - for location_id, percentage in availability.items(): - # Avoid quantization error by rounding percentage and clamping - # to range 0-100. - adjusted_percentage = round(percentage, 9) - adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) - availability[location_id] = adjusted_percentage - - return availabilities - - @ftrack_api.logging.deprecation_warning( - 'Session.delayed_job has been deprecated in favour of session.call. ' - 'Please refer to the release notes for more information.' - ) - def delayed_job(self, job_type): - '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. - - *job_type* should be one of the allowed job types. There is currently - only one remote job type "SYNC_USERS_LDAP". - ''' - if job_type not in (ftrack_api.symbol.JOB_SYNC_USERS_LDAP, ): - raise ValueError( - u'Invalid Job type: {0}.'.format(job_type) - ) - - operation = { - 'action': 'delayed_job', - 'job_type': job_type.name - } - - try: - result = self.call( - [operation] - )[0] - - except ftrack_api.exception.ServerError as error: - raise - - return result['data'] - - def get_widget_url(self, name, entity=None, theme=None): - '''Return an authenticated URL for widget with *name* and given options. - - The returned URL will be authenticated using a token which will expire - after 6 minutes. - - *name* should be the name of the widget to return and should be one of - 'info', 'tasks' or 'tasks_browser'. - - Certain widgets require an entity to be specified. If so, specify it by - setting *entity* to a valid entity instance. - - *theme* sets the theme of the widget and can be either 'light' or 'dark' - (defaulting to 'dark' if an invalid option given). - - ''' - operation = { - 'action': 'get_widget_url', - 'name': name, - 'theme': theme - } - if entity: - operation['entity_type'] = entity.entity_type - operation['entity_key'] = ( - ftrack_api.inspection.primary_key(entity).values() - ) - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_widget_url\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "get_widget_url", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - else: - return result[0]['widget_url'] - - def encode_media(self, media, version_id=None, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible that can be used - as a thumbnail. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *version_id* is specified, the new components will automatically be - associated with the AssetVersion. Otherwise, the components will not - be associated to a version even if the supplied *media* belongs to one. - A server version of 3.3.32 or higher is required for the version_id - argument to function properly. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - if isinstance(media, basestring): - # Media is a path to a file. - server_location = self.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - if keep_original == 'auto': - keep_original = False - - component_data = None - if keep_original: - component_data = dict(version_id=version_id) - - component = self.create_component( - path=media, - data=component_data, - location=server_location - ) - - # Auto commit to ensure component exists when sent to server. - self.commit() - - elif ( - hasattr(media, 'entity_type') and - media.entity_type in ('FileComponent',) - ): - # Existing file component. - component = media - if keep_original == 'auto': - keep_original = True - - else: - raise ValueError( - 'Unable to encode media of type: {0}'.format(type(media)) - ) - - operation = { - 'action': 'encode_media', - 'component_id': component['id'], - 'version_id': version_id, - 'keep_original': keep_original - } - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'encode_media\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "encode_media", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return self.get('Job', result[0]['job_id']) - - def get_upload_metadata( - self, component_id, file_name, file_size, checksum=None - ): - '''Return URL and headers used to upload data for *component_id*. - - *file_name* and *file_size* should match the components details. - - The returned URL should be requested using HTTP PUT with the specified - headers. - - The *checksum* is used as the Content-MD5 header and should contain - the base64-encoded 128-bit MD5 digest of the message (without the - headers) according to RFC 1864. This can be used as a message integrity - check to verify that the data is the same data that was originally sent. - ''' - operation = { - 'action': 'get_upload_metadata', - 'component_id': component_id, - 'file_name': file_name, - 'file_size': file_size, - 'checksum': checksum - } - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_upload_metadata\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"get_upload_metadata", please update server and try ' - 'again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return result[0] - - def send_user_invite(self, user): - '''Send a invitation to the provided *user*. - - *user* is a User instance - - ''' - - self.send_user_invites( - [user] - ) - - def send_user_invites(self, users): - '''Send a invitation to the provided *user*. - - *users* is a list of User instances - - ''' - - operations = [] - - for user in users: - operations.append( - { - 'action':'send_user_invite', - 'user_id': user['id'] - } - ) - - try: - self.call(operations) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_user_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_user_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - def send_review_session_invite(self, invitee): - '''Send an invite to a review session to *invitee*. - - *invitee* is a instance of ReviewSessionInvitee. - - .. note:: - - The *invitee* must be committed. - - ''' - self.send_review_session_invites([invitee]) - - def send_review_session_invites(self, invitees): - '''Send an invite to a review session to a list of *invitees*. - - *invitee* is a list of ReviewSessionInvitee objects. - - .. note:: - - All *invitees* must be committed. - - ''' - operations = [] - - for invitee in invitees: - operations.append( - { - 'action': 'send_review_session_invite', - 'review_session_invitee_id': invitee['id'] - } - ) - - try: - self.call(operations) - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_review_session_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_review_session_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - -class AutoPopulatingContext(object): - '''Context manager for temporary change of session auto_populate value.''' - - def __init__(self, session, auto_populate): - '''Initialise context.''' - super(AutoPopulatingContext, self).__init__() - self._session = session - self._auto_populate = auto_populate - self._current_auto_populate = None - - def __enter__(self): - '''Enter context switching to desired auto populate setting.''' - self._current_auto_populate = self._session.auto_populate - self._session.auto_populate = self._auto_populate - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context resetting auto populate to original setting.''' - self._session.auto_populate = self._current_auto_populate - - -class OperationRecordingContext(object): - '''Context manager for temporary change of session record_operations.''' - - def __init__(self, session, record_operations): - '''Initialise context.''' - super(OperationRecordingContext, self).__init__() - self._session = session - self._record_operations = record_operations - self._current_record_operations = None - - def __enter__(self): - '''Enter context.''' - self._current_record_operations = self._session.record_operations - self._session.record_operations = self._record_operations - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context.''' - self._session.record_operations = self._current_record_operations - - -class OperationPayload(collections.MutableMapping): - '''Represent operation payload.''' - - def __init__(self, *args, **kwargs): - '''Initialise payload.''' - super(OperationPayload, self).__init__() - self._data = dict() - self.update(dict(*args, **kwargs)) - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py deleted file mode 100644 index 1aab07ed77a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py deleted file mode 100644 index eae3784dc2e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py +++ /dev/null @@ -1,38 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from abc import ABCMeta, abstractmethod - - -class Structure(object): - '''Structure plugin interface. - - A structure plugin should compute appropriate paths for data. - - ''' - - __metaclass__ = ABCMeta - - def __init__(self, prefix=''): - '''Initialise structure.''' - self.prefix = prefix - self.path_separator = '/' - super(Structure, self).__init__() - - @abstractmethod - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - - def _get_sequence_expression(self, sequence): - '''Return a sequence expression for *sequence* component.''' - padding = sequence['padding'] - if padding: - expression = '%0{0}d'.format(padding) - else: - expression = '%d' - - return expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py deleted file mode 100644 index ae466bf6d9f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py +++ /dev/null @@ -1,12 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.structure.base - - -class EntityIdStructure(ftrack_api.structure.base.Structure): - '''Entity id pass-through structure.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a *resourceIdentifier* for supplied *entity*.''' - return entity['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py deleted file mode 100644 index acc3e21b026..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class IdStructure(ftrack_api.structure.base.Structure): - '''Id based structure supporting Components only. - - A components unique id will be used to form a path to store the data at. - To avoid millions of entries in one directory each id is chunked into four - prefix directories with the remainder used to name the file:: - - /prefix/1/2/3/4/56789 - - If the component has a defined filetype it will be added to the path:: - - /prefix/1/2/3/4/56789.exr - - Components that are children of container components will be placed inside - the id structure of their parent:: - - /prefix/1/2/3/4/56789/355827648d.exr - /prefix/1/2/3/4/56789/ajf24215b5.exr - - However, sequence children will be named using their label as an index and - a common prefix of 'file.':: - - /prefix/1/2/3/4/56789/file.0001.exr - /prefix/1/2/3/4/56789/file.0002.exr - - ''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - if entity.entity_type in ('FileComponent',): - # When in a container, place the file inside a directory named - # after the container. - container = entity['container'] - if container and container is not ftrack_api.symbol.NOT_SET: - path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Label doubles as index for now. - name = 'file.{0}{1}'.format( - entity['name'], entity['file_type'] - ) - parts = [os.path.dirname(path), name] - - else: - # Just place uniquely identified file into directory - name = entity['id'] + entity['file_type'] - parts = [path, name] - - else: - name = entity['id'][4:] + entity['file_type'] - parts = ([self.prefix] + list(entity['id'][:4]) + [name]) - - elif entity.entity_type in ('SequenceComponent',): - name = 'file' - - # Add a sequence identifier. - sequence_expression = self._get_sequence_expression(entity) - name += '.{0}'.format(sequence_expression) - - if ( - entity['file_type'] and - entity['file_type'] is not ftrack_api.symbol.NOT_SET - ): - name += entity['file_type'] - - parts = ([self.prefix] + list(entity['id'][:4]) - + [entity['id'][4:]] + [name]) - - elif entity.entity_type in ('ContainerComponent',): - # Just an id directory - parts = ([self.prefix] + - list(entity['id'][:4]) + [entity['id'][4:]]) - - else: - raise NotImplementedError('Cannot generate path for unsupported ' - 'entity {0}'.format(entity)) - - return self.path_separator.join(parts).strip('/') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py deleted file mode 100644 index 0d4d3a57f57..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from .base import Structure - - -class OriginStructure(Structure): - '''Origin structure that passes through existing resource identifier.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* should be a mapping that includes at least a - 'source_resource_identifier' key that refers to the resource identifier - to pass through. - - ''' - if context is None: - context = {} - - resource_identifier = context.get('source_resource_identifier') - if resource_identifier is None: - raise ValueError( - 'Could not generate resource identifier as no source resource ' - 'identifier found in passed context.' - ) - - return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py deleted file mode 100644 index 0b0602df003..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py +++ /dev/null @@ -1,217 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import re -import unicodedata - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class StandardStructure(ftrack_api.structure.base.Structure): - '''Project hierarchy based structure that only supports Components. - - The resource identifier is generated from the project code, the name - of objects in the project structure, asset name and version number:: - - my_project/folder_a/folder_b/asset_name/v003 - - If the component is a `FileComponent` then the name of the component and the - file type are used as filename in the resource_identifier:: - - my_project/folder_a/folder_b/asset_name/v003/foo.jpg - - If the component is a `SequenceComponent` then a sequence expression, - `%04d`, is used. E.g. a component with the name `foo` yields:: - - my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg - - For the member components their index in the sequence is used:: - - my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg - - The name of the component is added to the resource identifier if the - component is a `ContainerComponent`. E.g. a container component with the - name `bar` yields:: - - my_project/folder_a/folder_b/asset_name/v003/bar - - For a member of that container the file name is based on the component name - and file type:: - - my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf - - ''' - - def __init__( - self, project_versions_prefix=None, illegal_character_substitute='_' - ): - '''Initialise structure. - - If *project_versions_prefix* is defined, insert after the project code - for versions published directly under the project:: - - my_project//v001/foo.jpg - - Replace illegal characters with *illegal_character_substitute* if - defined. - - .. note:: - - Nested component containers/sequences are not supported. - - ''' - super(StandardStructure, self).__init__() - self.project_versions_prefix = project_versions_prefix - self.illegal_character_substitute = illegal_character_substitute - - def _get_parts(self, entity): - '''Return resource identifier parts from *entity*.''' - session = entity.session - - version = entity['version'] - - if version is ftrack_api.symbol.NOT_SET and entity['version_id']: - version = session.get('AssetVersion', entity['version_id']) - - error_message = ( - 'Component {0!r} must be attached to a committed ' - 'version and a committed asset with a parent context.'.format( - entity - ) - ) - - if ( - version is ftrack_api.symbol.NOT_SET or - version in session.created - ): - raise ftrack_api.exception.StructureError(error_message) - - link = version['link'] - - if not link: - raise ftrack_api.exception.StructureError(error_message) - - structure_names = [ - item['name'] - for item in link[1:-1] - ] - - project_id = link[0]['id'] - project = session.get('Project', project_id) - asset = version['asset'] - - version_number = self._format_version(version['version']) - - parts = [] - parts.append(project['name']) - - if structure_names: - parts.extend(structure_names) - elif self.project_versions_prefix: - # Add *project_versions_prefix* if configured and the version is - # published directly under the project. - parts.append(self.project_versions_prefix) - - parts.append(asset['name']) - parts.append(version_number) - - return [self.sanitise_for_filesystem(part) for part in parts] - - def _format_version(self, number): - '''Return a formatted string representing version *number*.''' - return 'v{0:03d}'.format(number) - - def sanitise_for_filesystem(self, value): - '''Return *value* with illegal filesystem characters replaced. - - An illegal character is one that is not typically valid for filesystem - usage, such as non ascii characters, or can be awkward to use in a - filesystem, such as spaces. Replace these characters with - the character specified by *illegal_character_substitute* on - initialisation. If no character was specified as substitute then return - *value* unmodified. - - ''' - if self.illegal_character_substitute is None: - return value - - if isinstance(value, str): - value = value.decode('utf-8') - - value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') - value = re.sub('[^\w\.-]', self.illegal_character_substitute, value) - return unicode(value.strip().lower()) - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information, but - is unused in this implementation. - - - Raise a :py:exc:`ftrack_api.exeption.StructureError` if *entity* is not - attached to a committed version and a committed asset with a parent - context. - - ''' - if entity.entity_type in ('FileComponent',): - container = entity['container'] - - if container: - # Get resource identifier for container. - container_path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Strip the sequence component expression from the parent - # container and back the correct filename, i.e. - # /sequence/component/sequence_component_name.0012.exr. - name = '{0}.{1}{2}'.format( - container['name'], entity['name'], entity['file_type'] - ) - parts = [ - os.path.dirname(container_path), - self.sanitise_for_filesystem(name) - ] - - else: - # Container is not a sequence component so add it as a - # normal component inside the container. - name = entity['name'] + entity['file_type'] - parts = [ - container_path, self.sanitise_for_filesystem(name) - ] - - else: - # File component does not have a container, construct name from - # component name and file type. - parts = self._get_parts(entity) - name = entity['name'] + entity['file_type'] - parts.append(self.sanitise_for_filesystem(name)) - - elif entity.entity_type in ('SequenceComponent',): - # Create sequence expression for the sequence component and add it - # to the parts. - parts = self._get_parts(entity) - sequence_expression = self._get_sequence_expression(entity) - parts.append( - '{0}.{1}{2}'.format( - self.sanitise_for_filesystem(entity['name']), - sequence_expression, - self.sanitise_for_filesystem(entity['file_type']) - ) - ) - - elif entity.entity_type in ('ContainerComponent',): - # Add the name of the container to the resource identifier parts. - parts = self._get_parts(entity) - parts.append(self.sanitise_for_filesystem(entity['name'])) - - else: - raise NotImplementedError( - 'Cannot generate resource identifier for unsupported ' - 'entity {0!r}'.format(entity) - ) - - return self.path_separator.join(parts) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py deleted file mode 100644 index f46760f634b..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py +++ /dev/null @@ -1,77 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - - -class Symbol(object): - '''A constant symbol.''' - - def __init__(self, name, value=True): - '''Initialise symbol with unique *name* and *value*. - - *value* is used for nonzero testing. - - ''' - self.name = name - self.value = value - - def __str__(self): - '''Return string representation.''' - return self.name - - def __repr__(self): - '''Return representation.''' - return '{0}({1})'.format(self.__class__.__name__, self.name) - - def __nonzero__(self): - '''Return whether symbol represents non-zero value.''' - return bool(self.value) - - def __copy__(self): - '''Return shallow copy. - - Overridden to always return same instance. - - ''' - return self - - -#: Symbol representing that no value has been set or loaded. -NOT_SET = Symbol('NOT_SET', False) - -#: Symbol representing created state. -CREATED = Symbol('CREATED') - -#: Symbol representing modified state. -MODIFIED = Symbol('MODIFIED') - -#: Symbol representing deleted state. -DELETED = Symbol('DELETED') - -#: Topic published when component added to a location. -COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' - -#: Topic published when component removed from a location. -COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' - -#: Identifier of builtin origin location. -ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' - -#: Identifier of builtin unmanaged location. -UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' - -#: Identifier of builtin review location. -REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' - -#: Identifier of builtin connect location. -CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' - -#: Identifier of builtin server location. -SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' - -#: Chunk size used when working with data, default to 1Mb. -CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 - -#: Symbol representing syncing users with ldap -JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov deleted file mode 100644 index db34709c2426d85147e9512b4de3c66c7dd48a00..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 17627 zcmchf2S5|e_UMz)dsR9F2*nC0QWObIB?ux`L?JX01O&wbQW81{NV6ayAVn--P{9fW z=_m?Vz)C_>G$I6ugpg$31pV&!{qMW?zWcxL|K6BocV}nM%+8$o%{eD)5C{a??|4M$ z(c@?|T7VD1+avyKI_`Ju;6a!b6nxU(mv^aq{j4ExLm*JDi$#$L1pNG&{>ur>{=0Ll zKTH0jBVK9YQvtrPPAgQfuh(&SE-r`=B9b~02tI@r5uPs*6p-Odp569bbnC|}=hnBp zE>O@wzzYH%IuFoj3INHaL_{2+abfcJN1*_)r+5nZ^kY^{_xr}!b+g^8 zBASH_@`eQ+`1Vobb@{MGgoVc=Lz1XhZ}nQ~kGx*}J2)yNsFUkCpKnw1{G8=zT zdVlP8l6s)BZq>J%tks_TQZ{K-7pWo)2?ZlRKMb2?GPe&0r*z+8<|`PVo-#Y4QmAx` zsV*_$Jh86zsK!)vtMUmWAx>aw7G~DC6hD=pniUfVn1k9WLrl%idm1K*AFMNqxAY5o zrF8132yXTnneKW=(3qOKzO`OYm5j39koUB6E9r%NM7xOKENxg+wCl^z%mr^D3ijJ0 zZdtrQqI&tt>wd(6icrH6Pfw-Rd$t8wl)995-e3DzdxF{dIL3zHw}M&kn^@U zAl$iaIFFg*^9Yk2ld5wXlU*^3(-t-_#j(0XUvHD&u5EkfTOF{zqAp|Dpt?JAZwl4U ze@KR=NkjBEF*0pN;`j6QV{dRyxOG%TEnBt9yyfdy=+Ba8o?iY-U8;-tPfA%L4=TT- zO0M^u838vI1S*>z(erckuIXIUsi=D{A#-;|IXx+ndm8=7?a%c4=}F*+b2I9Jd_-vC zxx#iK``t&Q&JVVP3U%ZS70?6oU3Ksx>+6Li^*11yn)Ha+vRYUX$|8{EXrGRzv9ka zTxNuMSN%V`KIzn954Y;59;}cp-0aQn&nrBUl>ZA>X&Xkmtv>a`s-w}a` zG2p;P%M4m6>Z{6pv-}iv;*;#oBGdS@Vm@fM^m<3!u1lzuTefI1vR^n2<5qj@IH+Tn zQ@3pS4sV=Cs_x5NS*J=JH%x(@tk`n7u~gj+!}i>SJNIYs)mAPlJI%k$m_H$oE>-@N zh^=W8w#q_oQVQSOJeE-|9R2)uRMJZdoBPDS-sDq0psrbXF!4?IkI4i(o}F@L(`qZC zHF6?l^9O|T=g>u zln9rHSn#$!rG{vO40ZrCN^;1uu*UngaTWSO`VYsFI7Fy2nPezMt)8cK8_6abGj#Ys(SQgLfv0 z=09u-fMgrfOwG!WDDwj8^Qjy4R3$sQetY-bl}gG#)&d}wfG6A4&P6s1PM%sEX#i>! zM2R}b3tWDB`)(vwm&j1HT@=UXkef=S_T|-Au=5PAyeRo@AJ#KYc;gjnMcl4wWBXb1 zO}wO_zFjgK$Dj5HlN*zYfukwUI<|>*Of#nN{OoA(B`ut;F>ZMu<~=4GJDvk+Y2e_9 zvrNUHA9ZD8QbJF)=CaGnF-1Fam#xO+44clE=)KT4^R{fe*&?G^FW-zhwlsvflUWs- z@%RLZv+Spshm2w5*@Rj$RW9B3m`CLEr7LLrVRGZqkCSol_OKPv-rtcWU+A~}<5Dlp z&w|oXs&RpclUPU8?9`mW`qZoZ+SWf>+tbdr8)2^RUbY-_eipZ4JjZUP59849aAy}z zG4|=XyqsVNwf8%M3Uusb1U_Kcfu zAA=3DOwh)U&G~na`ZcU&C81q~o1ThugN?pKRY+PWzjiYE(R^;DxBW`o{EB63a%Jrw z+vTi&nNxvrzN6!tI@INMF5^xvq`{A_U_gvrUCOLwr{}uwXdCi zL5uKyu)SYZ#Lr{JnG=S>voCwHgEok^pdC<4{R<9@N)b_PGPk*%)a3qRu_CKm} z5x7=#rU@tW%ov8RNxgNA0&V%DAy}wzB4J#kF;{Q@Vo}4u>Tth$StXlzHrR zsNUA|i*Dp=?O%Y!PXu3)$&nwAyU8eyTkG9meX(wufa;K3Qq9_~LMlEp9_A^p9^Y}> zXteo-UCs)KBZ6HHH&rKE)`11t?*Lm`i<2(?+9lVXs-NAFyQgM&Bj0|01kN=|Fe zNG)6)wx%$1Hl57NwkR9ZdFt3&?E>`#Q+&LVWvi-XaNPx;wKSw&5_$zzzDnU)=l^DS9HLlx$k-O`lRvN zp<~38#@LY&hI?RjAPHiL`)I(8)!}0cK!0Oh|AX)TFzYm}I&j1afSHP0jkn+S0x;DO zSRMY#Qe&C$t5HjVsjD~YZk2L;kQsacG&8CLA*_=0kCqUIlG9f;LOLaLjTDE(+~=ZB z8iOHv^M@akY8g^3U?`B6BgJN>2rhm6gZbS*Mgxe2JRZ4SLSOG1X3Vzp67lo!^ZU zDmUD*xEy(;Vh)@HTsgGWiNGe|3*$}c^6Z5j9V?L@*Aib>zLjojxo|iwTrNanwV48t zHW1*W;a^K$lfHsUHQQj9W6*~=qfrXit{j8$oK4tmb_%3=tieM8QPS?$h(i@?J8J}I zbZT7lj=xt7x3Y*m`iL9R=w%_N8e8lhEsG0>+Wt9Jha-y%%S^?P`!Hk;BW?NhS1|!b zUsHW%TW`;{o-O%`1K@4wZW;8!eQU#{s||$}Wpy^GJ`_m%uum}YVCb}$+V5E^#XKVK zxN}`s#%ZW(sUz~8IFavKu69}B4<=+CW^tD1&cC-CFID0{aH=V^@Zd8~gLIm{2=S;+ zypIl_93^CuBWb0)K*dBGdYsVSqIxjWzwAkDRwzP$U*6p}?Cgnznh#f>?3j-8zr|Hu zx=Sqnn}wdznkNe1-xr{*(uU=ZS^Rv-ZTiGId)r4cREc^|lcsobPjL0#jaEMCni3L@ z-&X9OeQtjLRiurVmnvz9=a(_Xyc0&p6AC zx~53l$CM)PqQn9T!`x#|SM!(n-DG}jtzby)v;xFrQVanh{Ittl(R*)Cga3yR!IiJy ztYsGYaGFROBG`97^zFSi3bR-33uQ7}cmsHX+K8;YH?(q{*pQlSKmC4#1kt4a^ z>ZTpyaNLuL1BcDTHyO7aVxL{!&G(M#TRed1=G?H&~*Ix>v$g z_Jf}1=7^0y28s7+l(o}R*o{3J>y}GyAAegj$yqBw`L5r$>Lt1RKywe961{{)cq+a{ zk3kiZsA;eG8O~?Q;?#^yTFJZ!KT+bAxb9)^9Yf;X$S5fn>DOO~?fQE_Fe;kS7ISw! z5}TqqLwRw5Sa8bN{Cbt)-u%LckMu5CRFKjtDie7tY;nx)+h&owgcZc+g=f`&6mAJg>zE+aeC#}ZhkK-zU$_!s3uN}c zkYrz5cc%Bd)aTl>78`UYZS6mN5yF!Iw`H$!`3EtSXF9S_2Xd9H`PIJifd^RO! zDEVofN7F5gDV@L~0+pW3q~78qYC7w>3Z9~ja0x*1Nk@!Tf6p5q0!J|8GqNmT=H~a$ z`<`K`xicY8RSvO|*Mlp!9()SLbAw(n3O+3qPaz#LWDHpaj{#+JXjg_tM4-m(Qm@#L zLD8RPz!VC(#`O@1i>EezLJ-D-l=|l;DPWG!A~G(CS{8T#a-CckqddZm^@=h?)FnPW zGQm}$#LP`z1h%QK*H1K%EyX;zCS=Pm=-x}|Y9Ehluw$)>s3p~5lU&4`{ch_|O$t;; zdnGG&3Ma+SJJ?~UIeSWRughyO=fiVCWT97d`rIX`yt4MUN^wgDrXV;EenTI5waOb= z7pxz?M78FEwA$F$!h(e6U(h_kiplz^BC<7iViK{8@jWhhw0K{IRd&K184bmUacQsi z#WjiFzS?O$)`uB{Lt@~9dFYpdS$Sm^r^`NxIX7R5JNnTscNVABSeW8`b{Lb(F2(sD zId>MFvd(#nKlCFV>T%2Im_f{CyWDFzCwPOq^)+u~KX3!O~idwBG z5mZ{4`@J`EU`T;6%s;Kni6}Xa;71TOnoLfj=crDMA`7i0OHzC;0>Hm}%+?mnGu;{= zd=E%zsX)H%ZS|7f9S@gfz})ZgV1z#@%D}okNgGvvQ%Di|EIo0nj>0mYdv849xiAB9 z=kehq%7h0Uv2q4bV`p}AEs_R$GrktiqO8*?ZZr;J1^^0 zq2u5X=^CjLoz(91blvt#{AY7#o_DExy1Hsv`BW@z7nQnKAOjMM=u&6wdQN+-{}tH* zl`yiBVpN+ST?|-kLEV8etZ1};QHysY;uUM_zG3$8(VH6GBZ^iJMP`ZuYSvt6+UoQL zJw4v0ql+nM&6$+7ujW~-IGv8d3qW>LM5^$`eNM6tP7wO05IUHTtu_(kIc{HZrqC73 zJq>-J6VR_Xs0S9-2?>Nsqk5f-26Iy!DuojL&5WHoUy_COZTJ!A_6~%FOej}DRn7k$nfmG^^fFadzNz>@3pndtFyD3@)ZMX*cMI*O?z@Uk<3q;w22v+kf>TC!ymqBFnVsd6S+}kJ zt?a(qk7u4Qv!-gQDO}x{lE88P9i1V(EFNDf`Q${}V{JcdQ({2ZwhzAEWt6Y8`H>R2 zA5kHZk`(7DYkKP{ltAa(J@HIoTR(xrgP8b_r+A%hLi8{BGR z=q{KlmcuC7b=*;m2S}brS)R+bklhBUg)>dn34uN)jjQ2j7P=R)Hu$TbBR0kB-4tq5 zf1GEtuD7s#W^ACKv#NkQfALb?c|zPBVp&yWrlWbdVO*;;TEVR3(Ys{w=NvnAaMk3| z?>;6MotrNoiOX5iaubUP`jo#1pjM5{2bWN5B%9anSof0iUFJZqMQ`4P`@p%T6I?er z)h;?L+`f1-*qtV7wDwlp1sZ_S3i{~BiiU3A$L?bR1jwZtyCFVlU3 zxe1kN#g1IjzA(TTLun-n zRIi9hiPNk0ncAK6z(u%i>;AhnRd0AShQ5*D2^~cE-+^wj5y;JdSsRZiL7G~6n`KNA z`KGjv@c9#M`y@9OHyLULi+vNfQ5Ql;Ke^&2C9-KVvC2;`v#F@vRz1N+du40B@jO{2 zrRl{Ead}fqy$+wS;i#h-J=Uj@H&h@_gHMk`vawf@+Y*P4U6j28sy|vW7D=0D3~R{)A^*Y=7Dm3V%Gob3-H* za~eLGBx+j(nnm2iK^E^A8kw?c5_XAAh^M-F=LAE;p8qPCv=j^f6Uje$tWrIgi`qG zGEB3{h3FbS!yJ#o$<5+4`Y;?>dr}|fem?Y* zx1P(3vvt%xo|j^|eYU(n)Zw5G(CWOi61`qT{Sfk5eJA3QRC;&5t7Pj`40%Xev64S4 zlh1E0+Bl>8Ci(S}GW4EwvKCh(slE4u#@rB6Q&{%x2g*+h`0VX*Jzy~f{<3$t1eQP? z+V+Q6I;*>YMm36IPRzgNJ}jp;`8`}NJ!(vh%~R8%ir&4}l(!z)hc)&YU^?=}-Bxe$ zQ71PA3O;d#QCM`$+$Vg`py6Iw2lB-q0!Gyjs-s%U9;n%1&VVyE6ae$j& zfA`#6G2_M;ua^bO%HDgRiqe(*b9r=t5zeU(hZQ3#*vm2k-_P0{6@$q**!m z_2446(;eM{h`P_qI}$a{9lB%`&qm5zSCxrrQs~uC(%^jhSkY-je#=*t^=9fw!*@&Wj~yCZW&J(L1c{`x<8RllJ(?9G zHiX!0jWDZC&3mK52QKk@uR1ZG<@Va;gv0}5=p%~+{2mJV^5@b{C?9Niq8dat< zFD$VSKqj@yb(8+%9Lkh3(@G}G>Pq?~h<^fFHKY3P|Izo*!v3e<;Uczc4vFxE0~R24 z0Ruo12b2!!X9Ht^ZA6nKZh{^FG0wKa zI{T-?w^GO~Fnm;l0pwt1*_z0a1dPP6<%2U^?Kzpv1OUJrFl^!c1^^1j(+i&V04#?< zf6Pdsi-Ns$#Zm%+PHx>>K*s`b5y(5swaNa_V^Ug{5M*Qkpg;Zw+T_?S>%Qp~7IO`_ z1Uy$qZHWj2;gUR$7f@aMrtqFWsF z0I5Jxuk|xfV`X_xgd_9bef9b3uXV9v`{|E0GGdz~EOkM5RNSWO)ATIgS3N~FrTPoa zXDp{m`}GS)hr0$|M;8{ln3aa*dKf(GognZk&I&;&sf0Ne^n6VjbHVEQX)9r`a16Jc zT;+fAGC8RRvG$v!z1SMXftX~%2m1Ua#J-y6>Yf!7(jUZ-(zDM|uCL0XdZli!G)J9! zG1%~I&Qro8&+J(SQo~d$epDd+!eFUwz5TMnQT1Ii0 zyv)&A9r(2h9Ji%T2-8rr7cbAf7++(5_~0f(l>0~D_6$X_zF0B$8Z#`;pOq1KcPH{) z=BNlw#5~V-qIj4}U6E}V9c=XAJ!Zwp#1{9juN=CC2gM(3lKPfHSJ(Dn<2jpYi`7=X zJ0A%~MffceW~bSLX77rkHB9teuwa2P48E<2GdQEC>O|o+kEVQ)0ID|mU_Jby0u^f_ z+{|pO218hYbGo^>ShVaEw_&J9lw03e?@YqO8-^}#roG9aYusu$t4A3CEou0IHqb@u z0t@XNOO`t;l?DvhhiofmWzqn>F2jRE06(u}wnwi9pq0RqcLpk8=v+q#Dcpt(TiPMZ z8S!P7u26M?Cue~HLhI(ivX5LvK$9(|ffR1pa`v*lB-X51n(Ochz}13H%+*AM^O~p! z+DlImlz=2>HES*zJA#F?h+5fB0CbH6EWop<$-tRBOc*L=9!gr_p#+kR?C@=JpqT*L zWSBkxBc?K_YzCTLRepLPTN$0Ip#frE}~<87%TkD7;?}RB4J40C0lQ?KL%| z3;+&YP~u9{^JnE_tcwA=nB9@+?FpOp&)(HQ=p(PidWL8)7$Ah>Ra}3G(4yQCwPLP% zn9zI)yU6T*EGDHAp%619a=26=wth>$His}ru56D2c-;miz~!2T7f;j1+S%Lg zMh6mxxzW+10MB82Oh<TZKFG zSb0Ke4rNl$5Y(zydZR`lZyx1+;=qMhpTn#2>d6312bCaKZ#A=dVJ$K4ey>$TSzAEc z##_%raEC;uUw9&SZ2VMjsEERVvT%h*i*^fV&g;~%3o>%m%|D-ai&C7PQrGtBl25fe$SuYVFR7;C=eui(JOPj|6(4v856Vnffy;H1A z-%A4?T`q!Mc2UAGT}!6K!A2+E?DS)SP%>Sx;Nlvq0LOHZ?Ph%k9G-fV;4{kBcA$b_jE z3xGg)v=B-{V-J2Ei{lC>TbxLY1ZjY?SR>trdE)w;on34{*KGW-%h%@%<#x^6&aAOE zw0wW@(#6+ee5zd@;U1DzVnoh2!;NQV0EMgoJjlG8E1wx~2wYA$nKcJkpkM&Nd>Wil zh!Z>*Cj5pWcZuCskcb^20FW|oK_0dnNM%4~0l;F=1}>`TvGMGp`eINOOyJ%obJ#8l zKneC8R0Y_DKz=I;aPR=qW0Fe-?4ufFy?%7g}(<0<%d!F?8m79(mMBJIpo1=I6Uje2p?JD=z; z13=?n>9-#8TFWm00@p*6y7;N~+Z0g_ExdT@sxx;wL|1l@+o(qy_JlG_ApgoF7Wt^x!qC|RzG0w-tMJYz1G19BDWi}ybxF{&4h3?UxX8@0}ki33cwAO;_9Z1(9$zW7i%ja~? zw(MdKfg=N^qx1j;wuuaeoTmwlYKS>*!!OQlg{Wb|d;bOKMwI?D&^bZSrThLP&;bIy zk-Y`LwI7UN=w!NOJq5D2uQO!PTAdpO98nzra{r!jl8|w*F@MK6mtrux6ir1q>AuIA zyd;1o)Lq}cEL*PE_ABBt>ol0UMfoQ&SwrFGGUy$dKb`-^yHOQJsq!%e^K<2{Vd7B@ zsd;fpd_q*AwHkh*ekTrSPfSVGT_hUR$LX`9sCjR$cwCfBo^@=SKR7DVeZwQGnty+X zn}@rti>|mu+n-1~i{8ZbtE+7O;3jNkx>1zDClutUr)<$dR`**c(H%K#^KwPmBL9350& zz0C8?)06t`kuj#{4E;mCZ7#C9yy4);k__pMaxq`dZn%H7ohjihNiz5@aO&q0@}j{u zl$mI0j%uUW|0t6UxSX_#rO7y^N}PL1peh99&5h|vO>O>~IkxACxtUahYzT6IeG??T zz7^NS29a%?K-g@_ zq!Pd=J%(M(n0!V93>qaLnp~#ep~2Ksm_tj zW}2U3)hl*}2X^mQx3tQ4i)>2yhW?`YV?G2C?Czz75Kvr;u23ENQ9b<@nb$q**fetW z_H+~Xi(_w+ct^WcW64(#vnKD&Q85wSU5{j3Cw(79ou2TC=B%cT?Y>PmjK*>n=!D-k z*IE*R9cz$PRD>sf=;zF{H#ZOlz~n$0AQ7NF1>wh_aqD}b>bOI!nG640Sc51_hy52r z1`BX^M)M~8J3@dbBzTb91(op+TaE2N;N>l@l=hHnybAuH8V59ELGGf}$C!jM8vr#M zCb(b%t~_8D19lM^h!X%zcqqB0n0Ev~CJ{FFpRSw`TU-7c*s4(b$Jhe8ro$%+0Et8= zST;@*$kX0)xwMEA+&W%YAvJl}g4O<QLeBp1nKvi%LwiW+FxGp(0vX1Z^iR!XJxl z^OR9T_FL&1>859SlAYL9!tw7P z-fGPIY5ntvudQJ0PtSQ&sKmg8z3ZyF*xjtIky$z;d_&0Qk4-X4=6^#{t>qvH;&yUWw4mJg z*M=!FYULhQ4eoc6r_Kl#`hh~exQB8=Hv)+2U@GUM1NBTR2dl5`4L;y6t_uHsLm7}*EYTQ=?e`X95)$d0@>d^GYU-sj zZ}zEKPB%-HGK&dx0&f&xy4+i1FF@RES*MB>>)>B`ti39x>Tt(&0hhDr55p21m0rR$ zY7J4`rmpAyBr>z^#VHDTA=&CiQlqabwgv6#iA>q^aB^}^IQmu?pb+LcgumEcWaxO% z2H;y2aA^1ODCO`UPzqXLfJp&d0Ooph92&91ILF$;J2xEMqvRtsNxGk*+V>nOx+ycVr``h(Rf%o* z@@bReG4ZCD;55m=lHhQ@?6;ewdn)tC z_9^w3PN&jFTHh#f4sU#Y$;mNHTn|k1b6wY~*JYLkT#8P~VB;4c-QzJsnXjn1iMrJT zXJ(sJ_9u?K%RB6|e$?-<@2NYAz-iy*PBMwu;DoiEO-y|?If|e*$Vf12jj%FL_f_*L zr(F}2Z+B0^zJ>3I#VDEBjXgQ^gCx13Vqp+Q@&I@>Zdmp)rnlt&DvKeM;mNk#1^#5Ma7U=|~v~r*&HP^t_<~l=9aX%1QT+>J}9K|kX=Pxc{c-p@a z-ZIntZ1k__1e#-<0he8rB>j|<%axgfF2gVu;1@N;8QNNTt^Wr;Ex@=+D&sSJC~jg_ z5f}zv81U;6&|9hp=fyB*9!sNK9Dq}eG+zQ&IZKEr*r5L%QD8=xicD~m-L_kMeaNCN z`8~EdHXBPDtI=9|PC05f^9%V8kB{5vcfnO?l>dEH2{%P=_QVw`BFt_6!tPY0eTIwK z#yhJKij`~cBKAKSzQ+l?6=1^naUi;BX^>y0VRstRFd|?1Ky8?{mD=hDS2mH`?jBe@ z<(hC+`NLZZv9%@IPn*KV^ZY89zWA&qNZIivnjzYs4ql8AM$OjceZEpV(t%QuA_m>8 zH8pP5bVYe~e^t72=U`~)9xZR%qlJe(Rb$(3xPBDebAuF}%24j#+I0%i5q3Ry!8s#w zcC@8$=z>C{hXB}z0CYG3pukE7;MNXGl40xt z4-fTnVTKyO!@>aiqi_!K0{n}-g7P7+coG(%M#up6Z|9J}^9A8-g9*w6QctO~-FP=1(Q?id`I=HzC88x{hlZwH zOfDg;GM2JPv!dcTCmoI{*o)ar%sWYMcbD3SkDh$dGg)pa87$^1+IOqQuKKSx>T;X`o&XU0JPeo`fAfsupLxg5SmFP50#5xmogWcX24wNMjzUL|pP}MfT+a z@=wK)7LLG00=Jfvq;3e1S@LOur$S6VkO>Qn-*y=MHtjd-7VWTOzwEGogLUAak`BuF zpCjED^Zy&t6^{Ool1>7W4mRi?kgndoMVu#R+6~s>xhDKIuSqGQYtitYR=7Js(aX}iNF_^jWsjWhi{}DfMI@z?PF36 zXL`K1lKzP8FRxww0v+TQSH45zjM3#@jgtpcnL$@G1JFMRXiB#jOE*ZY&eG~Z*504* z7JA^cCM2A3Dz0WnZ6Pzsi@*Tjg|&v>l#Y7g%NQ3W?+22HFx5|+I`V!oL^Sxu4-$WoY2H`z}AWZO=Q4j>{9Vn@_{Q1*L7#{%%o!j+L}x^P|5S zmC2^B2SUV^9gdY}Rx28-e;N6qR{p?ePpaNrdlPC`GlEzq9d#wkO&tD*rft%P4p7I? z?cMcr3S$Pp-aIRKzQ-M@xaAjJdpbH7Z3iB2J8L%^mq0AQSy&sR`38tgw3JC0|Wwd(D!&4+^Zf8 z3Ol}-v)Iz#au;RsSqJ+Dc)@GvVBf$0<%QqR{_Do|{=PxSc!$CMLB~$QUrz7}4Gog! zWgwAVO4jh?AbJnX4naE|gc8yAUPpX`4#GQ7l;D6Pez2VU$>7Br;CtId_&W+j-o563 zNcDwMy@CS5eoI9{sY8mtq*@0Bc!ivv4cF}yu^DTah{N0$0)zB z&FkiUmC0?0IuaiDTi)XxZ_>Wg9Y$(h^!R~{dXAci~8!sUR>{qWb;c4hU zONs!%ODmvMTcnefIzS@i_2B-xqP);O#A`)<QTLRpa;dTLTyWj@pBkAyd$FSo* zAv{4a@w%`afl!C9i)nenITMRO=y@R!%MQaKoj@Q~{X`%P;q^7m2*kS0(0ztw%we8| K0|H@L{l5Sv%EZ9{ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png deleted file mode 100644 index da6ec772092e788b9db8dd7bf98b9d713255bd72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmeAS@N?(olHy`uVBq!ia0vp^AT|dF8<0HkD{mW+vhs9s45^rt{Nwxo|NpDSef}SR z)AjRwLB+TFMqi%)*FV4eU*G8XwcaNAPd!`P&HpncCH@&33UInDb7J`WPs~|dc=2+|Ns5_85n>V41r7_ z2Z$G#W^V*)67zI%45?szdvPP9g98K0!D87L`y+K3D$*XGO*?h$S#W)QaTODT0|NsG z0|O(20s{jJLjwbY0MII^pec-jRhCRa3|q)F2jR8_Zn(Wbw&7?&LAE(a!FvT)I8$Oc za(q!@4wBoXsALXGT7oAksu>=jjG1By8QyR)iowr~<@`_ format, for example:: - - /tmp/asfjsfjoj3/%04d.jpg [1-3] - - ''' - items = [] - for index in range(3): - item_path = os.path.join( - temporary_directory, '{0:04d}.jpg'.format(index) - ) - with open(item_path, 'w') as file_descriptor: - file_descriptor.write(uuid.uuid4().hex) - file_descriptor.close() - - items.append(item_path) - - collections, _ = clique.assemble(items) - sequence_path = collections[0].format() - - return sequence_path - - -@pytest.fixture() -def video_path(): - '''Return a path to a video file.''' - video = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - 'fixture', - 'media', - 'colour_wheel.mov' - ) - ) - - return video - - -@pytest.fixture() -def session(): - '''Return session instance.''' - return ftrack_api.Session() - - -@pytest.fixture() -def session_no_autoconnect_hub(): - '''Return session instance not auto connected to hub.''' - return ftrack_api.Session(auto_connect_event_hub=False) - - -@pytest.fixture() -def unique_name(): - '''Return a unique name.''' - return 'test-{0}'.format(uuid.uuid4()) - - -@pytest.fixture() -def temporary_path(request): - '''Return temporary path.''' - path = tempfile.mkdtemp() - - def cleanup(): - '''Remove created path.''' - try: - shutil.rmtree(path) - except OSError: - pass - - request.addfinalizer(cleanup) - - return path - - -@pytest.fixture() -def new_user(request, session, unique_name): - '''Return a newly created unique user.''' - entity = session.create('User', {'username': unique_name}) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(entity) - session.commit() - - request.addfinalizer(cleanup) - - return entity - - -@pytest.fixture() -def user(session): - '''Return the same user entity for entire session.''' - # Jenkins user - entity = session.get('User', 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def project_schema(session): - '''Return project schema.''' - # VFX Scheme - entity = session.get( - 'ProjectSchema', '69cb7f92-4dbf-11e1-9902-f23c91df25eb' - ) - assert entity is not None - return entity - - -@pytest.fixture() -def new_project_tree(request, session, user): - '''Return new project with basic tree.''' - project_schema = session.query('ProjectSchema').first() - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - for sequence_number in range(1): - sequence = session.create('Sequence', { - 'name': 'sequence_{0:03d}'.format(sequence_number), - 'parent': project - }) - - for shot_number in range(1): - shot = session.create('Shot', { - 'name': 'shot_{0:03d}'.format(shot_number * 10), - 'parent': sequence, - 'status': default_shot_status - }) - - for task_number in range(1): - task = session.create('Task', { - 'name': 'task_{0:03d}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - session.create('Appointment', { - 'type': 'assignment', - 'context': task, - 'resource': user - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -@pytest.fixture() -def new_project(request, session, user): - '''Return new empty project.''' - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -@pytest.fixture() -def project(session): - '''Return same project for entire session.''' - # Test project. - entity = session.get('Project', '5671dcb0-66de-11e1-8e6e-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def new_task(request, session, unique_name): - '''Return a new task.''' - project = session.query( - 'Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb' - ).one() - project_schema = project['project_schema'] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - task = session.create('Task', { - 'name': unique_name, - 'parent': project, - 'status': default_task_status, - 'type': default_task_type - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(task) - session.commit() - - request.addfinalizer(cleanup) - - return task - - -@pytest.fixture() -def task(session): - '''Return same task for entire session.''' - # Tests/python_api/tasks/t1 - entity = session.get('Task', 'adb4ad6c-7679-11e2-8df2-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def new_scope(request, session, unique_name): - '''Return a new scope.''' - scope = session.create('Scope', { - 'name': unique_name - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(scope) - session.commit() - - request.addfinalizer(cleanup) - - return scope - - -@pytest.fixture() -def new_job(request, session, unique_name, user): - '''Return a new scope.''' - job = session.create('Job', { - 'type': 'api_job', - 'user': user - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(job) - session.commit() - - request.addfinalizer(cleanup) - - return job - - -@pytest.fixture() -def new_note(request, session, unique_name, new_task, user): - '''Return a new note attached to a task.''' - note = new_task.create_note(unique_name, user) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(note) - session.commit() - - request.addfinalizer(cleanup) - - return note - - -@pytest.fixture() -def new_asset_version(request, session): - '''Return a new asset version.''' - asset_version = session.create('AssetVersion', { - 'asset_id': 'dd9a7e2e-c5eb-11e1-9885-f23c91df25eb' - }) - session.commit() - - # Do not cleanup the version as that will sometimes result in a deadlock - # database error. - - return asset_version - - -@pytest.fixture() -def new_component(request, session, temporary_file): - '''Return a new component not in any location except origin.''' - component = session.create_component(temporary_file, location=None) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture() -def new_container_component(request, session, temporary_directory): - '''Return a new container component not in any location except origin.''' - component = session.create('ContainerComponent') - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component( - component, temporary_directory, recursive=False - ) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture() -def new_sequence_component(request, session, temporary_sequence): - '''Return a new sequence component not in any location except origin.''' - component = session.create_component(temporary_sequence, location=None) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture -def mocked_schemas(): - '''Return a list of mocked schemas.''' - return [{ - 'id': 'Foo', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'string': { - 'type': 'string' - }, - 'integer': { - 'type': 'integer' - }, - 'number': { - 'type': 'number' - }, - 'boolean': { - 'type': 'boolean' - }, - 'bars': { - 'type': 'array', - 'items': { - 'ref': '$Bar' - } - }, - 'date': { - 'type': 'string', - 'format': 'date-time' - } - }, - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }, { - 'id': 'Bar', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'name': { - 'type': 'string' - }, - 'computed_value': { - 'type': 'string', - } - }, - 'computed': [ - 'computed_value' - ], - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }] - - -@pytest.yield_fixture -def mocked_schema_session(mocker, mocked_schemas): - '''Return a session instance with mocked schemas.''' - with mocker.patch.object( - ftrack_api.Session, - '_load_schemas', - return_value=mocked_schemas - ): - # Mock _configure_locations since it will fail if no location schemas - # exist. - with mocker.patch.object( - ftrack_api.Session, - '_configure_locations' - ): - patched_session = ftrack_api.Session() - yield patched_session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py deleted file mode 100644 index bc98f15de24..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py deleted file mode 100644 index 78d61a62d1c..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py +++ /dev/null @@ -1,54 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack -import json - - -def test_create_component(new_asset_version, temporary_file): - '''Create component on asset version.''' - session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None - ) - assert component['version'] is new_asset_version - - # Have to delete component before can delete asset version. - session.delete(component) - - -def test_create_component_specifying_different_version( - new_asset_version, temporary_file -): - '''Create component on asset version ignoring specified version.''' - session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None, - data=dict( - version_id='this-value-should-be-ignored', - version='this-value-should-be-overridden' - ) - ) - assert component['version'] is new_asset_version - - # Have to delete component before can delete asset version. - session.delete(component) - - -def test_encode_media(new_asset_version, video_path): - '''Encode media based on a file path - - Encoded components should be associated with the version. - ''' - session = new_asset_version.session - job = new_asset_version.encode_media(video_path) - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'output' in job_data - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - - component_id = job_data['output'][0]['component_id'] - component = session.get('FileComponent', component_id) - - # Component should be associated with the version. - assert component['version_id'] == new_asset_version['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py deleted file mode 100644 index aff456e2388..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py +++ /dev/null @@ -1,14 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import pytest - - -def test_hash(project, task, user): - '''Entities can be hashed.''' - test_set = set() - test_set.add(project) - test_set.add(task) - test_set.add(user) - - assert test_set == set((project, task, user)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py deleted file mode 100644 index 347c74a50de..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py +++ /dev/null @@ -1,70 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack -import os - -import pytest - - -def test_get_availability(new_component): - '''Retrieve availability in locations.''' - session = new_component.session - availability = new_component.get_availability() - - # Note: Currently the origin location is also 0.0 as the link is not - # persisted to the server. This may change in future and this test would - # need updating as a result. - assert set(availability.values()) == set([0.0]) - - # Add to a location. - source_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() - - target_location = session.query( - 'Location where name is "ftrack.unmanaged"' - ).one() - - target_location.add_component(new_component, source_location) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del new_component['component_locations'] - - availability = new_component.get_availability() - target_availability = availability.pop(target_location['id']) - assert target_availability == 100.0 - - # All other locations should still be 0. - assert set(availability.values()) == set([0.0]) - -@pytest.fixture() -def image_path(): - '''Return a path to an image file.''' - image_path = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' - ) - ) - - return image_path - -def test_create_task_thumbnail(task, image_path): - '''Successfully create thumbnail component and set as task thumbnail.''' - component = task.create_thumbnail(image_path) - component.session.commit() - assert component['id'] == task['thumbnail_id'] - - -def test_create_thumbnail_with_data(task, image_path, unique_name): - '''Successfully create thumbnail component with custom data.''' - data = {'name': unique_name} - component = task.create_thumbnail(image_path, data=data) - component.session.commit() - assert component['name'] == unique_name diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py deleted file mode 100644 index 5d5a0baa7ca..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py +++ /dev/null @@ -1,25 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.factory - - -class CustomUser(ftrack_api.entity.base.Entity): - '''Represent custom user.''' - - -def test_extend_standard_factory_with_bases(session): - '''Successfully add extra bases to standard factory.''' - standard_factory = ftrack_api.entity.factory.StandardFactory() - - schemas = session._load_schemas(False) - user_schema = [ - schema for schema in schemas if schema['id'] == 'User' - ].pop() - - user_class = standard_factory.create(user_schema, bases=[CustomUser]) - session.types[user_class.entity_type] = user_class - - user = session.query('User').first() - - assert CustomUser in type(user).__mro__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py deleted file mode 100644 index 52ddbda0aca..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py +++ /dev/null @@ -1,42 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - - -def test_create_job(session, user): - '''Create job.''' - job = session.create('Job', { - 'user': user - }) - - assert job - session.commit() - assert job['type'] == 'api_job' - - session.delete(job) - session.commit() - - -def test_create_job_with_valid_type(session, user): - '''Create job explicitly specifying valid type.''' - job = session.create('Job', { - 'user': user, - 'type': 'api_job' - }) - - assert job - session.commit() - assert job['type'] == 'api_job' - - session.delete(job) - session.commit() - - -def test_create_job_using_faulty_type(session, user): - '''Fail to create job with faulty type.''' - with pytest.raises(ValueError): - session.create('Job', { - 'user': user, - 'type': 'not-allowed-type' - }) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py deleted file mode 100644 index 5bb90e451f1..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py +++ /dev/null @@ -1,516 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import base64 -import filecmp - -import pytest -import requests - -import ftrack_api.exception -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.id -import ftrack_api.entity.location -import ftrack_api.resource_identifier_transformer.base as _transformer -import ftrack_api.symbol - - -class Base64ResourceIdentifierTransformer( - _transformer.ResourceIdentifierTransformer -): - '''Resource identifier transformer for test purposes. - - Store resource identifier as base 64 encoded string. - - ''' - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return base64.encodestring(resource_identifier) - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return base64.decodestring(resource_identifier) - - -@pytest.fixture() -def new_location(request, session, unique_name, temporary_directory): - '''Return new managed location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=os.path.join(temporary_directory, 'location') - ) - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 10 - - session.commit() - - def cleanup(): - '''Remove created entity.''' - # First auto-remove all components in location. - for location_component in location['location_components']: - session.delete(location_component) - - # At present, need this intermediate commit otherwise server errors - # complaining that location still has components in it. - session.commit() - - session.delete(location) - session.commit() - - request.addfinalizer(cleanup) - - return location - - -@pytest.fixture() -def new_unmanaged_location(request, session, unique_name): - '''Return new unmanaged location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) - - # TODO: Change to managed and use a temporary directory cleaned up after. - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedTestLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 10 - - session.commit() - - def cleanup(): - '''Remove created entity.''' - # First auto-remove all components in location. - for location_component in location['location_components']: - session.delete(location_component) - - # At present, need this intermediate commit otherwise server errors - # complaining that location still has components in it. - session.commit() - - session.delete(location) - session.commit() - - request.addfinalizer(cleanup) - - return location - - -@pytest.fixture() -def origin_location(session): - '''Return origin location.''' - return session.query('Location where name is "ftrack.origin"').one() - -@pytest.fixture() -def server_location(session): - '''Return server location.''' - return session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) - - -@pytest.fixture() -def server_image_component(request, session, server_location): - image_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' - ) - ) - component = session.create_component( - image_file, location=server_location - ) - - def cleanup(): - server_location.remove_component(component) - request.addfinalizer(cleanup) - - return component - - -@pytest.mark.parametrize('name', [ - 'named', - None -], ids=[ - 'named', - 'unnamed' -]) -def test_string_representation(session, name): - '''Return string representation.''' - location = session.create('Location', {'id': '1'}) - if name: - location['name'] = name - assert str(location) == '' - else: - assert str(location) == '' - - -def test_add_components(new_location, origin_location, session, temporary_file): - '''Add components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - new_location.add_components( - [component_a, component_b], [origin_location, origin_location] - ) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - -def test_add_components_from_single_location( - new_location, origin_location, session, temporary_file -): - '''Add components from single location.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - new_location.add_components([component_a, component_b], origin_location) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - -def test_add_components_with_mismatching_sources(new_location, new_component): - '''Fail to add components when sources mismatched.''' - with pytest.raises(ValueError): - new_location.add_components([new_component], []) - - -def test_add_components_with_undefined_structure(new_location, mocker): - '''Fail to add components when location structure undefined.''' - mocker.patch.object(new_location, 'structure', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_components([], []) - - -def test_add_components_already_in_location( - session, temporary_file, new_location, new_component, origin_location -): - '''Fail to add components already in location.''' - new_location.add_component(new_component, origin_location) - - another_new_component = session.create_component( - temporary_file, location=None - ) - - with pytest.raises(ftrack_api.exception.ComponentInLocationError): - new_location.add_components( - [another_new_component, new_component], origin_location - ) - - -def test_add_component_when_data_already_exists( - new_location, new_component, origin_location -): - '''Fail to add component when data already exists.''' - # Inject pre-existing data on disk. - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) - container = new_location.accessor.get_container(resource_identifier) - new_location.accessor.make_container(container) - data = new_location.accessor.open(resource_identifier, 'w') - data.close() - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_component_missing_source_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to add component when source is missing accessor.''' - mocker.patch.object(origin_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_component_missing_target_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to add component when target is missing accessor.''' - mocker.patch.object(new_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_container_component( - new_container_component, new_location, origin_location -): - '''Add container component.''' - new_location.add_component(new_container_component, origin_location) - - assert ( - new_location.get_component_availability(new_container_component) - == 100.0 - ) - - -def test_add_sequence_component_recursively( - new_sequence_component, new_location, origin_location -): - '''Add sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 100.0 - ) - - -def test_add_sequence_component_non_recursively( - new_sequence_component, new_location, origin_location -): - '''Add sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_components( - session, new_location, origin_location, temporary_file -): - '''Remove components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - new_location.add_components([component_a, component_b], origin_location) - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - new_location.remove_components([ - component_a, component_b - ]) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - -def test_remove_sequence_component_recursively( - new_sequence_component, new_location, origin_location -): - '''Remove sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) - - new_location.remove_component( - new_sequence_component, recursive=True - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_sequence_component_non_recursively( - new_sequence_component, new_location, origin_location -): - '''Remove sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) - - new_location.remove_component( - new_sequence_component, recursive=False - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_component_missing_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to remove component when location is missing accessor.''' - new_location.add_component(new_component, origin_location) - mocker.patch.object(new_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.remove_component(new_component) - - -def test_resource_identifier_transformer( - new_component, new_unmanaged_location, origin_location, mocker -): - '''Transform resource identifier.''' - session = new_unmanaged_location.session - - transformer = Base64ResourceIdentifierTransformer(session) - mocker.patch.object( - new_unmanaged_location, 'resource_identifier_transformer', transformer - ) - - new_unmanaged_location.add_component(new_component, origin_location) - - original_resource_identifier = origin_location.get_resource_identifier( - new_component - ) - assert ( - new_component['component_locations'][0]['resource_identifier'] - == base64.encodestring(original_resource_identifier) - ) - - assert ( - new_unmanaged_location.get_resource_identifier(new_component) - == original_resource_identifier - ) - - -def test_get_filesystem_path(new_component, new_location, origin_location): - '''Retrieve filesystem path.''' - new_location.add_component(new_component, origin_location) - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) - expected = os.path.normpath( - os.path.join(new_location.accessor.prefix, resource_identifier) - ) - assert new_location.get_filesystem_path(new_component) == expected - - -def test_get_context(new_component, new_location, origin_location): - '''Retrieve context for component.''' - resource_identifier = origin_location.get_resource_identifier( - new_component - ) - context = new_location._get_context(new_component, origin_location) - assert context == { - 'source_resource_identifier': resource_identifier - } - - -def test_get_context_for_component_not_in_source(new_component, new_location): - '''Retrieve context for component not in source location.''' - context = new_location._get_context(new_component, new_location) - assert context == {} - - -def test_data_transfer(session, new_location, origin_location): - '''Transfer a real file and make sure it is identical.''' - video_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'colour_wheel.mov' - ) - ) - component = session.create_component( - video_file, location=new_location - ) - new_video_file = new_location.get_filesystem_path(component) - - assert filecmp.cmp(video_file, new_video_file) - - -def test_get_thumbnail_url(server_location, server_image_component): - '''Test download a thumbnail image from server location''' - thumbnail_url = server_location.get_thumbnail_url( - server_image_component, - size=10 - ) - assert thumbnail_url - - response = requests.get(thumbnail_url) - response.raise_for_status() - - image_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image-resized-10.png' - ) - ) - expected_image_contents = open(image_file).read() - assert response.content == expected_image_contents diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py deleted file mode 100644 index 3a81fdbe858..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import ftrack_api - - -def test_query_metadata(new_project): - '''Query metadata.''' - session = new_project.session - - metadata_key = uuid.uuid1().hex - metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value - session.commit() - - results = session.query( - 'Project where metadata.key is {0}'.format(metadata_key) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - results = session.query( - 'Project where metadata.value is {0}'.format(metadata_value) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - results = session.query( - 'Project where metadata.key is {0} and ' - 'metadata.value is {1}'.format(metadata_key, metadata_value) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - -def test_set_get_metadata_from_different_sessions(new_project): - '''Get and set metadata using different sessions.''' - session = new_project.session - - metadata_key = uuid.uuid1().hex - metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value - session.commit() - - new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] - - assert project['metadata'][metadata_key] == metadata_value - - project['metadata'][metadata_key] = uuid.uuid1().hex - - new_session.commit() - - new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(project['id']) - )[0] - - assert project['metadata'][metadata_key] != metadata_value - - -def test_get_set_multiple_metadata(new_project): - '''Get and set multiple metadata.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } - session.commit() - - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) - - new_session = ftrack_api.Session() - retrieved = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] - - assert set(retrieved['metadata'].keys()) == set(['key1', 'key2']) - - -def test_metadata_parent_type_remains_in_schema_id_format(session, new_project): - '''Metadata parent_type remains in schema id format post commit.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - assert entity['parent_type'] == new_project.entity_type - - -def test_set_metadata_twice(new_project): - '''Set metadata twice in a row.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } - session.commit() - - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) - - new_project['metadata'] = { - 'key3': 'value3', - 'key4': 'value4' - } - session.commit() - - -def test_set_same_metadata_on_retrieved_entity(new_project): - '''Set same metadata on retrieved entity.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1' - } - session.commit() - - project = session.get('Project', new_project['id']) - - project['metadata'] = { - 'key1': 'value1' - } - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py deleted file mode 100644 index 5d854eaed4e..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py +++ /dev/null @@ -1,67 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api -import ftrack_api.inspection - - -def test_create_reply(session, new_note, user, unique_name): - '''Create reply to a note.''' - reply_text = 'My reply on note' - new_note.create_reply(reply_text, user) - - session.commit() - - assert len(new_note['replies']) == 1 - - assert reply_text == new_note['replies'][0]['content'] - - -def test_create_note_on_entity(session, new_task, user, unique_name): - '''Create note attached to an entity.''' - note = new_task.create_note(unique_name, user) - session.commit() - - session.reset() - retrieved_task = session.get(*ftrack_api.inspection.identity(new_task)) - assert len(retrieved_task['notes']) == 1 - assert ( - ftrack_api.inspection.identity(retrieved_task['notes'][0]) - == ftrack_api.inspection.identity(note) - ) - - -def test_create_note_on_entity_specifying_recipients( - session, new_task, user, unique_name, new_user -): - '''Create note with specified recipients attached to an entity.''' - recipient = new_user - note = new_task.create_note(unique_name, user, recipients=[recipient]) - session.commit() - - session.reset() - retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - - # Note: The calling user is automatically added server side so there will be - # 2 recipients. - assert len(retrieved_note['recipients']) == 2 - specified_recipient_present = False - for entry in retrieved_note['recipients']: - if entry['resource_id'] == recipient['id']: - specified_recipient_present = True - break - - assert specified_recipient_present - - -def test_create_note_on_entity_specifying_category( - session, new_task, user, unique_name -): - '''Create note with specified category attached to an entity.''' - category = session.query('NoteCategory').first() - note = new_task.create_note(unique_name, user, category=category) - session.commit() - - session.reset() - retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - assert retrieved_note['category']['id'] == category['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py deleted file mode 100644 index 10ef485aed4..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py +++ /dev/null @@ -1,64 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest - - -@pytest.mark.parametrize('schema, expected', [ - ('Task', [ - 'Not started', 'In progress', 'Awaiting approval', 'Approved' - ]), - ('Shot', [ - 'Normal', 'Omitted', 'On Hold' - ]), - ('AssetVersion', [ - 'Approved', 'Pending' - ]), - ('AssetBuild', [ - 'Normal', 'Omitted', 'On Hold' - ]), - ('Invalid', ValueError) -], ids=[ - 'task', - 'shot', - 'asset version', - 'asset build', - 'invalid' -]) -def test_get_statuses(project_schema, schema, expected): - '''Retrieve statuses for schema and optional type.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - project_schema.get_statuses(schema) - - else: - statuses = project_schema.get_statuses(schema) - status_names = [status['name'] for status in statuses] - assert sorted(status_names) == sorted(expected) - - -@pytest.mark.parametrize('schema, expected', [ - ('Task', [ - 'Generic', 'Animation', 'Modeling', 'Previz', 'Lookdev', 'Hair', - 'Cloth', 'FX', 'Lighting', 'Compositing', 'Tracking', 'Rigging', - 'test 1', 'test type 2' - ]), - ('AssetBuild', ['Character', 'Prop', 'Environment', 'Matte Painting']), - ('Invalid', ValueError) -], ids=[ - 'task', - 'asset build', - 'invalid' -]) -def test_get_types(project_schema, schema, expected): - '''Retrieve types for schema.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - project_schema.get_types(schema) - - else: - types = project_schema.get_types(schema) - type_names = [type_['name'] for type_ in types] - assert sorted(type_names) == sorted(expected) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py deleted file mode 100644 index 1a5afe70c96..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - - -def test_add_remove_and_query_scopes_for_tasks(session, new_task, new_scope): - '''Add, remove and query scopes for task.''' - query_string = 'Task where scopes.name is {0}'.format(new_scope['name']) - tasks = session.query(query_string) - - assert len(tasks) == 0 - - new_task['scopes'].append(new_scope) - session.commit() - - tasks = session.query(query_string) - - assert len(tasks) == 1 and tasks[0] == new_task - - new_task['scopes'].remove(new_scope) - session.commit() - - tasks = session.query(query_string) - - assert len(tasks) == 0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py deleted file mode 100644 index 4d7e4550421..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py +++ /dev/null @@ -1,49 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - - -def test_force_start_timer(new_user, task): - '''Successfully force starting a timer when another timer is running.''' - first_timer = new_user.start_timer(context=task) - second_timer = new_user.start_timer(context=task, force=True) - - assert first_timer['id'] - assert second_timer['id'] - assert first_timer['id'] != second_timer['id'] - - -def test_timer_creates_timelog(new_user, task, unique_name): - '''Successfully create time log when stopping timer. - - A timer which was immediately stopped should have a duration less than - a minute. - - ''' - comment = 'comment' + unique_name - timer = new_user.start_timer( - context=task, - name=unique_name, - comment=comment - ) - timer_start = timer['start'] - timelog = new_user.stop_timer() - - assert timelog['user_id'] == new_user['id'] - assert timelog['context_id']== task['id'] - assert timelog['name'] == unique_name - assert timelog['comment'] == comment - assert timelog['start'] == timer_start - assert isinstance(timelog['duration'], (int, long, float)) - assert timelog['duration'] < 60 - - -def test_reset_user_api_key(new_user): - '''Test resetting of api keys.''' - - api_keys = list() - for i in range(0, 10): - api_keys.append(new_user.reset_api_key()) - - # make sure all api keys are unique - assert len(set(api_keys)) == 10 - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py deleted file mode 100644 index bc98f15de24..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py deleted file mode 100644 index 09b270a0438..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py +++ /dev/null @@ -1,92 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import time -import logging -import argparse - -import ftrack_api -from ftrack_api.event.base import Event - - -TOPIC = 'test_event_hub_server_heartbeat' -RECEIVED = [] - - -def callback(event): - '''Track received messages.''' - counter = event['data']['counter'] - RECEIVED.append(counter) - print('Received message {0} ({1} in total)'.format(counter, len(RECEIVED))) - - -def main(arguments=None): - '''Publish and receive heartbeat test.''' - parser = argparse.ArgumentParser() - parser.add_argument('mode', choices=['publish', 'subscribe']) - - namespace = parser.parse_args(arguments) - logging.basicConfig(level=logging.INFO) - - session = ftrack_api.Session() - - message_count = 100 - sleep_time_per_message = 1 - - if namespace.mode == 'publish': - max_atempts = 100 - retry_interval = 0.1 - atempt = 0 - while not session.event_hub.connected: - print ( - 'Session is not yet connected to event hub, sleeping for 0.1s' - ) - time.sleep(retry_interval) - - atempt = atempt + 1 - if atempt > max_atempts: - raise Exception( - 'Unable to connect to server within {0} seconds'.format( - max_atempts * retry_interval - ) - ) - - print('Sending {0} messages...'.format(message_count)) - - for counter in range(1, message_count + 1): - session.event_hub.publish( - Event(topic=TOPIC, data=dict(counter=counter)) - ) - print('Sent message {0}'.format(counter)) - - if counter < message_count: - time.sleep(sleep_time_per_message) - - elif namespace.mode == 'subscribe': - session.event_hub.subscribe('topic={0}'.format(TOPIC), callback) - session.event_hub.wait( - duration=( - ((message_count - 1) * sleep_time_per_message) + 15 - ) - ) - - if len(RECEIVED) != message_count: - print( - '>> Failed to receive all messages. Dropped {0} <<' - .format(message_count - len(RECEIVED)) - ) - return False - - # Give time to flush all buffers. - time.sleep(5) - - return True - - -if __name__ == '__main__': - result = main(sys.argv[1:]) - if not result: - raise SystemExit(1) - else: - raise SystemExit(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py deleted file mode 100644 index d9496fe0703..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py +++ /dev/null @@ -1,36 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.event.base - - -def test_string_representation(): - '''String representation.''' - event = ftrack_api.event.base.Event('test', id='some-id') - assert str(event) == ( - "" - ) - - -def test_stop(): - '''Set stopped flag on event.''' - event = ftrack_api.event.base.Event('test', id='some-id') - - assert event.is_stopped() is False - - event.stop() - assert event.is_stopped() is True - - -def test_is_stopped(): - '''Report stopped status of event.''' - event = ftrack_api.event.base.Event('test', id='some-id') - - assert event.is_stopped() is False - - event.stop() - assert event.is_stopped() is True - - event.stop() - assert event.is_stopped() is True diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py deleted file mode 100644 index 4cf68b58f0d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py +++ /dev/null @@ -1,174 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import operator -import inspect - -import pytest - -from ftrack_api.event.expression import ( - Expression, All, Any, Not, Condition, Parser -) -from ftrack_api.exception import ParseError - - -@pytest.fixture() -def candidate(): - '''Return common candidate to test expressions against.''' - return { - 'id': 10, - 'name': 'value', - 'change': { - 'name': 'value', - 'new_value': 10 - } - } - - -@pytest.mark.parametrize('expression, expected', [ - pytest.mark.xfail(('', Expression())), - ('invalid', ParseError), - ('key=value nor other=value', ParseError), - ('key=value', Condition('key', operator.eq, 'value')), - ('key="value"', Condition('key', operator.eq, 'value')), - ( - 'a=b and ((c=d or e!=f) and not g.h > 10)', - All([ - Condition('a', operator.eq, 'b'), - All([ - Any([ - Condition('c', operator.eq, 'd'), - Condition('e', operator.ne, 'f') - ]), - Not( - Condition('g.h', operator.gt, 10) - ) - ]) - ]) - ) -], ids=[ - 'empty expression', - 'invalid expression', - 'invalid conjunction', - 'basic condition', - 'basic quoted condition', - 'complex condition' -]) -def test_parser_parse(expression, expected): - '''Parse expression into Expression instances.''' - parser = Parser() - - if inspect.isclass(expected)and issubclass(expected, Exception): - with pytest.raises(expected): - parser.parse(expression) - else: - assert str(parser.parse(expression)) == str(expected) - - -@pytest.mark.parametrize('expression, expected', [ - (Expression(), ''), - (All([Expression(), Expression()]), ' ]>'), - (Any([Expression(), Expression()]), ' ]>'), - (Not(Expression()), '>'), - (Condition('key', '=', 'value'), '') -], ids=[ - 'Expression', - 'All', - 'Any', - 'Not', - 'Condition' -]) -def test_string_representation(expression, expected): - '''String representation of expression.''' - assert str(expression) == expected - - -@pytest.mark.parametrize('expression, expected', [ - # Expression - (Expression(), True), - - # All - (All(), True), - (All([Expression(), Expression()]), True), - (All([Expression(), Condition('test', operator.eq, 'value')]), False), - - # Any - (Any(), False), - (Any([Expression(), Condition('test', operator.eq, 'value')]), True), - (Any([ - Condition('test', operator.eq, 'value'), - Condition('other', operator.eq, 'value') - ]), False), - - # Not - (Not(Expression()), False), - (Not(Not(Expression())), True) -], ids=[ - 'Expression-always matches', - - 'All-no expressions always matches', - 'All-all match', - 'All-not all match', - - 'Any-no expressions never matches', - 'Any-some match', - 'Any-none match', - - 'Not-invert positive match', - 'Not-double negative is positive match' -]) -def test_match(expression, candidate, expected): - '''Determine if candidate matches expression.''' - assert expression.match(candidate) is expected - - -def parametrize_test_condition_match(metafunc): - '''Parametrize condition_match tests.''' - identifiers = [] - data = [] - - matrix = { - # Operator, match, no match - operator.eq: { - 'match': 10, 'no-match': 20, - 'wildcard-match': 'valu*', 'wildcard-no-match': 'values*' - }, - operator.ne: {'match': 20, 'no-match': 10}, - operator.ge: {'match': 10, 'no-match': 20}, - operator.le: {'match': 10, 'no-match': 0}, - operator.gt: {'match': 0, 'no-match': 10}, - operator.lt: {'match': 20, 'no-match': 10} - } - - for operator_function, values in matrix.items(): - for value_label, value in values.items(): - if value_label.startswith('wildcard'): - key_options = { - 'plain': 'name', - 'nested': 'change.name' - } - else: - key_options = { - 'plain': 'id', - 'nested': 'change.new_value' - } - - for key_label, key in key_options.items(): - identifiers.append('{} operator {} key {}'.format( - operator_function.__name__, key_label, value_label - )) - - data.append(( - key, operator_function, value, - 'no-match' not in value_label - )) - - metafunc.parametrize( - 'key, operator, value, expected', data, ids=identifiers - ) - - -def test_condition_match(key, operator, value, candidate, expected): - '''Determine if candidate matches condition expression.''' - condition = Condition(key, operator, value) - assert condition.match(candidate) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py deleted file mode 100644 index 6f1920dddf4..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py +++ /dev/null @@ -1,701 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect -import json -import os -import time -import subprocess -import sys - -import pytest - -import ftrack_api.event.hub -import ftrack_api.event.subscriber -from ftrack_api.event.base import Event -import ftrack_api.exception - - -class MockClass(object): - '''Mock class for testing.''' - - def method(self): - '''Mock method for testing.''' - - -def mockFunction(): - '''Mock function for testing.''' - - -class MockConnection(object): - '''Mock connection for testing.''' - - @property - def connected(self): - '''Return whether connected.''' - return True - - def close(self): - '''Close mock connection.''' - pass - - -def assert_callbacks(hub, callbacks): - '''Assert hub has exactly *callbacks* subscribed.''' - # Subscribers always starts with internal handle_reply subscriber. - subscribers = hub._subscribers[:] - subscribers.pop(0) - - if len(subscribers) != len(callbacks): - raise AssertionError( - 'Number of subscribers ({0}) != number of callbacks ({1})' - .format(len(subscribers), len(callbacks)) - ) - - for index, subscriber in enumerate(subscribers): - if subscriber.callback != callbacks[index]: - raise AssertionError( - 'Callback at {0} != subscriber callback at same index.' - .format(index) - ) - - -@pytest.fixture() -def event_hub(request, session): - '''Return event hub to test against. - - Hub is automatically connected at start of test and disconnected at end. - - ''' - hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - hub.connect() - - def cleanup(): - '''Cleanup.''' - if hub.connected: - hub.disconnect() - - request.addfinalizer(cleanup) - - return hub - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000') -], ids=[ - 'with port', - 'without port' -]) -def test_get_server_url(server_url, expected): - '''Return server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_server_url() == expected - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'test.ftrackapp.com:9000') -], ids=[ - 'with port', - 'without port' -]) -def test_get_network_location(server_url, expected): - '''Return network location of server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_network_location() == expected - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', True), - ('http://test.ftrackapp.com', False) -], ids=[ - 'secure', - 'not secure' -]) -def test_secure_property(server_url, expected, mocker): - '''Return whether secure connection used.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.secure is expected - - -def test_connected_property(session): - '''Return connected state.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - assert event_hub.connected is False - - event_hub.connect() - assert event_hub.connected is True - - event_hub.disconnect() - assert event_hub.connected is False - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000'), - ('test.ftrackapp.com', ValueError), - ('https://:9000', ValueError), -], ids=[ - 'with port', - 'without port', - 'missing scheme', - 'missing hostname' -]) -def test_initialise_against_server_url(server_url, expected): - '''Initialise against server url.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - else: - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_server_url() == expected - - -def test_connect(session): - '''Connect.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.connect() - - assert event_hub.connected is True - event_hub.disconnect() - - -def test_connect_when_already_connected(event_hub): - '''Fail to connect when already connected''' - assert event_hub.connected is True - - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.connect() - - assert 'Already connected' in str(error) - - -def test_connect_failure(session, mocker): - '''Fail to connect to server.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') - - mocker.patch('websocket.create_connection', force_fail) - with pytest.raises(ftrack_api.exception.EventHubConnectionError): - event_hub.connect() - - -def test_connect_missing_required_transport(session, mocker, caplog): - '''Fail to connect to server that does not provide correct transport.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - original_get_socket_io_session = event_hub._get_socket_io_session - - def _get_socket_io_session(): - '''Patched to return no transports.''' - session = original_get_socket_io_session() - return ftrack_api.event.hub.SocketIoSession( - session[0], session[1], [] - ) - - mocker.patch.object( - event_hub, '_get_socket_io_session', _get_socket_io_session - ) - - with pytest.raises(ftrack_api.exception.EventHubConnectionError): - event_hub.connect() - - logs = caplog.records() - assert ( - 'Server does not support websocket sessions.' in str(logs[-1].exc_info) - ) - - -def test_disconnect(event_hub): - '''Disconnect and unsubscribe all subscribers.''' - event_hub.disconnect() - assert len(event_hub._subscribers) == 0 - assert event_hub.connected is False - - -def test_disconnect_without_unsubscribing(event_hub): - '''Disconnect without unsubscribing all subscribers.''' - event_hub.disconnect(unsubscribe=False) - assert len(event_hub._subscribers) > 0 - assert event_hub.connected is False - - -def test_close_connection_from_manually_connected_hub(session_no_autoconnect_hub): - '''Close connection from manually connected hub.''' - session_no_autoconnect_hub.event_hub.connect() - session_no_autoconnect_hub.close() - assert session_no_autoconnect_hub.event_hub.connected is False - - -def test_disconnect_when_not_connected(session): - '''Fail to disconnect when not connected''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.disconnect() - - assert 'Not currently connected' in str(error) - - -def test_reconnect(event_hub): - '''Reconnect successfully.''' - assert event_hub.connected is True - event_hub.reconnect() - assert event_hub.connected is True - - -def test_reconnect_when_not_connected(session): - '''Reconnect successfully even if not already connected.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - assert event_hub.connected is False - - event_hub.reconnect() - assert event_hub.connected is True - - event_hub.disconnect() - - -def test_fail_to_reconnect(session, mocker): - '''Fail to reconnect.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.connect() - assert event_hub.connected is True - - def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') - - mocker.patch('websocket.create_connection', force_fail) - - attempts = 2 - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.reconnect(attempts=attempts, delay=0.5) - - assert 'Failed to reconnect to event server' in str(error) - assert 'after {} attempts'.format(attempts) in str(error) - - -def test_wait(event_hub): - '''Wait for event and handle as they arrive.''' - called = {'callback': False} - - def callback(event): - called['callback'] = True - - event_hub.subscribe('topic=test-subscribe', callback) - - event_hub.publish(Event(topic='test-subscribe')) - - # Until wait, the event should not have been processed even if received. - time.sleep(1) - assert called == {'callback': False} - - event_hub.wait(2) - assert called == {'callback': True} - - -def test_wait_interrupted_by_disconnect(event_hub): - '''Interrupt wait loop with disconnect event.''' - wait_time = 5 - start = time.time() - - # Inject event directly for test purposes. - event = Event(topic='ftrack.meta.disconnected') - event_hub._event_queue.put(event) - - event_hub.wait(wait_time) - - assert time.time() - start < wait_time - - -@pytest.mark.parametrize('identifier, registered', [ - ('registered-test-subscriber', True), - ('unregistered-test-subscriber', False) -], ids=[ - 'registered', - 'missing' -]) -def test_get_subscriber_by_identifier(event_hub, identifier, registered): - '''Return subscriber by identifier.''' - def callback(event): - pass - - subscriber = { - 'id': 'registered-test-subscriber' - } - - event_hub.subscribe('topic=test-subscribe', callback, subscriber) - retrieved = event_hub.get_subscriber_by_identifier(identifier) - - if registered: - assert isinstance(retrieved, ftrack_api.event.subscriber.Subscriber) - assert retrieved.metadata.get('id') == subscriber['id'] - else: - assert retrieved is None - - -def test_subscribe(event_hub): - '''Subscribe to topics.''' - called = {'a': False, 'b': False} - - def callback_a(event): - called['a'] = True - - def callback_b(event): - called['b'] = True - - event_hub.subscribe('topic=test-subscribe', callback_a) - event_hub.subscribe('topic=test-subscribe-other', callback_b) - - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - - assert called == {'a': True, 'b': False} - - -def test_subscribe_before_connected(session): - '''Subscribe to topic before connected.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - called = {'callback': False} - - def callback(event): - called['callback'] = True - - identifier = 'test-subscriber' - event_hub.subscribe( - 'topic=test-subscribe', callback, subscriber={'id': identifier} - ) - assert event_hub.get_subscriber_by_identifier(identifier) is not None - - event_hub.connect() - - try: - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - finally: - event_hub.disconnect() - - assert called == {'callback': True} - - -def test_duplicate_subscriber(event_hub): - '''Fail to subscribe same subscriber more than once.''' - subscriber = {'id': 'test-subscriber'} - event_hub.subscribe('topic=test', None, subscriber=subscriber) - - with pytest.raises(ftrack_api.exception.NotUniqueError) as error: - event_hub.subscribe('topic=test', None, subscriber=subscriber) - - assert '{0} already exists'.format(subscriber['id']) in str(error) - - -def test_unsubscribe(event_hub): - '''Unsubscribe a specific callback.''' - def callback_a(event): - pass - - def callback_b(event): - pass - - identifier_a = event_hub.subscribe('topic=test', callback_a) - identifier_b = event_hub.subscribe('topic=test', callback_b) - - assert_callbacks(event_hub, [callback_a, callback_b]) - - event_hub.unsubscribe(identifier_a) - - # Unsubscribe requires confirmation event so wait here to give event a - # chance to process. - time.sleep(5) - - assert_callbacks(event_hub, [callback_b]) - - -def test_unsubscribe_whilst_disconnected(event_hub): - '''Unsubscribe whilst disconnected.''' - identifier = event_hub.subscribe('topic=test', None) - event_hub.disconnect(unsubscribe=False) - - event_hub.unsubscribe(identifier) - assert_callbacks(event_hub, []) - - -def test_unsubscribe_missing_subscriber(event_hub): - '''Fail to unsubscribe a non-subscribed subscriber.''' - identifier = 'non-subscribed-subscriber' - with pytest.raises(ftrack_api.exception.NotFoundError) as error: - event_hub.unsubscribe(identifier) - - assert ( - 'missing subscriber with identifier {}'.format(identifier) - in str(error) - ) - - -@pytest.mark.parametrize('event_data', [ - dict(source=dict(id='1', user=dict(username='auto'))), - dict(source=dict(user=dict(username='auto'))), - dict(source=dict(id='1')), - dict() -], ids=[ - 'pre-prepared', - 'missing id', - 'missing user', - 'no source' -]) -def test_prepare_event(session, event_data): - '''Prepare event.''' - # Replace username `auto` in event data with API user. - try: - if event_data['source']['user']['username'] == 'auto': - event_data['source']['user']['username'] = session.api_user - except KeyError: - pass - - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.id = '1' - - event = Event('test', id='event-id', **event_data) - expected = Event( - 'test', id='event-id', source=dict(id='1', user=dict(username=session.api_user)) - ) - event_hub._prepare_event(event) - assert event == expected - - -def test_prepare_reply_event(session): - '''Prepare reply event.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - source_event = Event('source', source=dict(id='source-id')) - reply_event = Event('reply') - - event_hub._prepare_reply_event(reply_event, source_event) - assert source_event['source']['id'] in reply_event['target'] - assert reply_event['in_reply_to_event'] == source_event['id'] - - event_hub._prepare_reply_event(reply_event, source_event, {'id': 'source'}) - assert reply_event['source'] == {'id': 'source'} - - -def test_publish(event_hub): - '''Publish asynchronous event.''' - called = {'callback': False} - - def callback(event): - called['callback'] = True - - event_hub.subscribe('topic=test-subscribe', callback) - - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - - assert called == {'callback': True} - - -def test_publish_raising_error(event_hub): - '''Raise error, when configured, on failed publish.''' - # Note that the event hub currently only fails publish when not connected. - # All other errors are inconsistently swallowed. - event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - - with pytest.raises(Exception): - event_hub.publish(event, on_error='raise') - - -def test_publish_ignoring_error(event_hub): - '''Ignore error, when configured, on failed publish.''' - # Note that the event hub currently only fails publish when not connected. - # All other errors are inconsistently swallowed. - event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event, on_error='ignore') - - -def test_publish_logs_other_errors(event_hub, caplog, mocker): - '''Log publish errors other than connection error.''' - # Mock connection to force error. - mocker.patch.object(event_hub, '_connection', MockConnection()) - - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event) - - expected = 'Error sending event {0}.'.format(event) - messages = [record.getMessage().strip() for record in caplog.records()] - assert expected in messages, 'Expected log message missing in output.' - - -def test_synchronous_publish(event_hub): - '''Publish event synchronously and collect results.''' - def callback_a(event): - return 'A' - - def callback_b(event): - return 'B' - - def callback_c(event): - return 'C' - - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) - - results = event_hub.publish(Event(topic='test'), synchronous=True) - assert results == ['A', 'B', 'C'] - - -def test_publish_with_reply(event_hub): - '''Publish asynchronous event with on reply handler.''' - - def replier(event): - '''Replier.''' - return 'Replied' - - event_hub.subscribe('topic=test', replier) - - called = {'callback': None} - - def on_reply(event): - called['callback'] = event['data'] - - event_hub.publish(Event(topic='test'), on_reply=on_reply) - event_hub.wait(2) - - assert called['callback'] == 'Replied' - - -def test_publish_with_multiple_replies(event_hub): - '''Publish asynchronous event and retrieve multiple replies.''' - - def replier_one(event): - '''Replier.''' - return 'One' - - def replier_two(event): - '''Replier.''' - return 'Two' - - event_hub.subscribe('topic=test', replier_one) - event_hub.subscribe('topic=test', replier_two) - - called = {'callback': []} - - def on_reply(event): - called['callback'].append(event['data']) - - event_hub.publish(Event(topic='test'), on_reply=on_reply) - event_hub.wait(2) - - assert sorted(called['callback']) == ['One', 'Two'] - - -@pytest.mark.slow -def test_server_heartbeat_response(): - '''Maintain connection by responding to server heartbeat request.''' - test_script = os.path.join( - os.path.dirname(__file__), 'event_hub_server_heartbeat.py' - ) - - # Start subscriber that will listen for all three messages. - subscriber = subprocess.Popen([sys.executable, test_script, 'subscribe']) - - # Give subscriber time to connect to server. - time.sleep(10) - - # Start publisher to publish three messages. - publisher = subprocess.Popen([sys.executable, test_script, 'publish']) - - publisher.wait() - subscriber.wait() - - assert subscriber.returncode == 0 - - -def test_stop_event(event_hub): - '''Stop processing of subsequent local handlers when stop flag set.''' - called = { - 'a': False, - 'b': False, - 'c': False - } - - def callback_a(event): - called['a'] = True - - def callback_b(event): - called['b'] = True - event.stop() - - def callback_c(event): - called['c'] = True - - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) - - event_hub.publish(Event(topic='test')) - event_hub.wait(2) - - assert called == { - 'a': True, - 'b': True, - 'c': False - } - - -def test_encode(session): - '''Encode event data.''' - encoded = session.event_hub._encode( - dict(name='ftrack.event', args=[Event('test')]) - ) - assert 'inReplyToEvent' in encoded - assert 'in_reply_to_event' not in encoded - - -def test_decode(session): - '''Decode event data.''' - decoded = session.event_hub._decode( - json.dumps({ - 'inReplyToEvent': 'id' - }) - ) - - assert 'in_reply_to_event' in decoded - assert 'inReplyToEvent' not in decoded diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py deleted file mode 100644 index dc8ac69fd92..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py +++ /dev/null @@ -1,33 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.event.subscriber -from ftrack_api.event.base import Event - - -def test_string_representation(): - '''String representation.''' - subscriber = ftrack_api.event.subscriber.Subscriber( - 'topic=test', lambda x: None, {'meta': 'info'}, 100 - ) - - assert str(subscriber) == ( - '' - ) - - -@pytest.mark.parametrize('expression, event, expected', [ - ('topic=test', Event(topic='test'), True), - ('topic=test', Event(topic='other-test'), False) -], ids=[ - 'interested', - 'not interested' -]) -def test_interested_in(expression, event, expected): - '''Determine if subscriber interested in event.''' - subscriber = ftrack_api.event.subscriber.Subscriber( - expression, lambda x: None, {'meta': 'info'}, 100 - ) - assert subscriber.interested_in(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py deleted file mode 100644 index 1535309f257..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.event.subscription -from ftrack_api.event.base import Event - - -def test_string_representation(): - '''String representation is subscription expression.''' - expression = 'topic=some-topic' - subscription = ftrack_api.event.subscription.Subscription(expression) - - assert str(subscription) == expression - - -@pytest.mark.parametrize('expression, event, expected', [ - ('topic=test', Event(topic='test'), True), - ('topic=test', Event(topic='other-test'), False) -], ids=[ - 'match', - 'no match' -]) -def test_includes(expression, event, expected): - '''Subscription includes event.''' - subscription = ftrack_api.event.subscription.Subscription(expression) - assert subscription.includes(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py deleted file mode 100644 index bc98f15de24..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py deleted file mode 100644 index 51c896f96ba..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py +++ /dev/null @@ -1,36 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.resource_identifier_transformer.base as _transformer - - -@pytest.fixture() -def transformer(session): - '''Return instance of ResourceIdentifierTransformer.''' - return _transformer.ResourceIdentifierTransformer(session) - - -@pytest.mark.parametrize('resource_identifier, context, expected', [ - ('identifier', None, 'identifier'), - ('identifier', {'user': {'username': 'user'}}, 'identifier') -], ids=[ - 'no context', - 'basic context' -]) -def test_encode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' - assert transformer.encode(resource_identifier, context) == expected - - -@pytest.mark.parametrize('resource_identifier, context, expected', [ - ('identifier', None, 'identifier'), - ('identifier', {'user': {'username': 'user'}}, 'identifier') -], ids=[ - 'no context', - 'basic context' -]) -def test_decode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' - assert transformer.decode(resource_identifier, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py deleted file mode 100644 index bc98f15de24..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py deleted file mode 100644 index dbf91ead208..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py +++ /dev/null @@ -1,31 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.structure.base - - -class Concrete(ftrack_api.structure.base.Structure): - '''Concrete implementation to allow testing non-abstract methods.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - return 'resource_identifier' - - -@pytest.mark.parametrize('sequence, expected', [ - ({'padding': None}, '%d'), - ({'padding': 4}, '%04d') -], ids=[ - 'no padding', - 'padded' -]) -def test_get_sequence_expression(sequence, expected): - '''Get sequence expression from sequence.''' - structure = Concrete() - assert structure._get_sequence_expression(sequence) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py deleted file mode 100644 index 01ccb35ac85..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py +++ /dev/null @@ -1,49 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest -import mock - -import ftrack_api -import ftrack_api.structure.entity_id - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.entity_id.EntityIdStructure() - - -# Note: When it is possible to use indirect=True on just a few arguments, the -# called functions here can change to standard fixtures. -# https://github.com/pytest-dev/pytest/issues/579 - -def valid_entity(): - '''Return valid entity.''' - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': 'file_component', - 'file_type': '.png' - }) - - return entity - - -@pytest.mark.parametrize('entity, context, expected', [ - (valid_entity(), {}, 'f6cd40cb-d1c0-469f-a2d5-10369be8a724'), - (mock.Mock(), {}, Exception) -], ids=[ - 'valid-entity', - 'non-entity' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py deleted file mode 100644 index ef81da2d65d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest - -import ftrack_api -import ftrack_api.structure.id - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.id.IdStructure(prefix='path') - - -# Note: When it is possible to use indirect=True on just a few arguments, the -# called functions here can change to standard fixtures. -# https://github.com/pytest-dev/pytest/issues/579 - -def file_component(container=None): - '''Return file component.''' - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': '0001', - 'file_type': '.png', - 'container': container - }) - - return entity - - -def sequence_component(padding=0): - '''Return sequence component with *padding*.''' - session = ftrack_api.Session() - - entity = session.create('SequenceComponent', { - 'id': 'ff17edad-2129-483b-8b59-d1a654c8497b', - 'name': 'sequence_component', - 'file_type': '.png', - 'padding': padding - }) - - return entity - - -def container_component(): - '''Return container component.''' - session = ftrack_api.Session() - - entity = session.create('ContainerComponent', { - 'id': '03ab9967-f86c-4b55-8252-cd187d0c244a', - 'name': 'container_component' - }) - - return entity - - -def unsupported_entity(): - '''Return an unsupported entity.''' - session = ftrack_api.Session() - - entity = session.create('User', { - 'username': 'martin' - }) - - return entity - - -@pytest.mark.parametrize('entity, context, expected', [ - ( - file_component(), {}, - 'path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png' - ), - ( - file_component(container_component()), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/' - 'f6cd40cb-d1c0-469f-a2d5-10369be8a724.png' - ), - ( - file_component(sequence_component()), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png' - ), - ( - sequence_component(padding=0), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png' - ), - ( - sequence_component(padding=4), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png' - ), - ( - container_component(), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a' - ), - (unsupported_entity(), {}, NotImplementedError) -], ids=[ - 'file-component', - 'file-component-in-container', - 'file-component-in-sequence', - 'unpadded-sequence-component', - 'padded-sequence-component', - 'container-component', - 'unsupported-entity' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py deleted file mode 100644 index e294e04a70a..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py +++ /dev/null @@ -1,33 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest -import mock - -import ftrack_api.structure.origin - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.origin.OriginStructure() - - -@pytest.mark.parametrize('entity, context, expected', [ - (mock.Mock(), {'source_resource_identifier': 'identifier'}, 'identifier'), - (mock.Mock(), {}, ValueError), - (mock.Mock(), None, ValueError) -], ids=[ - 'valid-context', - 'invalid-context', - 'unspecified-context' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py deleted file mode 100644 index dd72f8ec3fa..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py +++ /dev/null @@ -1,309 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api -import ftrack_api.structure.standard - - -@pytest.fixture(scope='session') -def new_project(request): - '''Return new empty project.''' - session = ftrack_api.Session() - - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -def new_container_component(): - '''Return container component.''' - session = ftrack_api.Session() - - entity = session.create('ContainerComponent', { - 'name': 'container_component' - }) - - return entity - - -def new_sequence_component(): - '''Return sequence component.''' - session = ftrack_api.Session() - - entity = session.create_component( - '/tmp/foo/%04d.jpg [1-10]', location=None, data={'name': 'baz'} - ) - - return entity - - -def new_file_component(name='foo', container=None): - '''Return file component with *name* and *container*.''' - if container: - session = container.session - else: - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'name': name, - 'file_type': '.png', - 'container': container - }) - - return entity - - -# Reusable fixtures. -file_component = new_file_component() -container_component = new_container_component() -sequence_component = new_sequence_component() - - -# Note: to improve test performance the same project is reused throughout the -# tests. This means that all hierarchical names must be unique, otherwise an -# IntegrityError will be raised on the server. - -@pytest.mark.parametrize( - 'component, hierarchy, expected, structure, asset_name', - [ - ( - file_component, - [], - '{project_name}/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [], - '{project_name}/foobar/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - project_versions_prefix='foobar' - ), - 'my_new_asset' - ), - ( - file_component, - ['baz1', 'bar'], - '{project_name}/baz1/bar/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - sequence_component, - ['baz2', 'bar'], - '{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - sequence_component['members'][3], - ['baz3', 'bar'], - '{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - container_component, - ['baz4', 'bar'], - '{project_name}/baz4/bar/my_new_asset/v001/container_component', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(container=container_component), - ['baz5', 'bar'], - ( - '{project_name}/baz5/bar/my_new_asset/v001/container_component/' - 'foo.png' - ), - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [u'björn'], - '{project_name}/bjorn/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [u'björn!'], - '{project_name}/bjorn_/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(name=u'fää'), - [], - '{project_name}/my_new_asset/v001/faa.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(name=u'fo/o'), - [], - '{project_name}/my_new_asset/v001/fo_o.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [], - '{project_name}/aao/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - u'åäö' - ), - ( - file_component, - [], - '{project_name}/my_ne____w_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - u'my_ne!!!!w_asset' - ), - ( - file_component, - [u'björn2'], - u'{project_name}/björn2/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute=None - ), - 'my_new_asset' - ), - ( - file_component, - [u'bj!rn'], - '{project_name}/bj^rn/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute='^' - ), - 'my_new_asset' - ) - ], ids=[ - 'file_component_on_project', - 'file_component_on_project_with_prefix', - 'file_component_with_hierarchy', - 'sequence_component', - 'sequence_component_member', - 'container_component', - 'container_component_member', - 'slugify_non_ascii_hierarchy', - 'slugify_illegal_hierarchy', - 'slugify_non_ascii_component_name', - 'slugify_illegal_component_name', - 'slugify_non_ascii_asset_name', - 'slugify_illegal_asset_name', - 'slugify_none', - 'slugify_other_character' - ] -) -def test_get_resource_identifier( - component, hierarchy, expected, structure, asset_name, new_project -): - '''Get resource identifier.''' - session = component.session - - # Create structure, asset and version. - context_id = new_project['id'] - for name in hierarchy: - context_id = session.create('Folder', { - 'name': name, - 'project_id': new_project['id'], - 'parent_id': context_id - })['id'] - - asset = session.create( - 'Asset', {'name': asset_name, 'context_id': context_id} - ) - version = session.create('AssetVersion', {'asset': asset}) - - # Update component with version. - if component['container']: - component['container']['version'] = version - else: - component['version'] = version - - session.commit() - - assert structure.get_resource_identifier(component) == expected.format( - project_name=new_project['name'] - ) - - -def test_unsupported_entity(user): - '''Fail to get resource identifier for unsupported entity.''' - structure = ftrack_api.structure.standard.StandardStructure() - with pytest.raises(NotImplementedError): - structure.get_resource_identifier(user) - - -def test_component_without_version_relation(new_project): - '''Get an identifer for component without a version relation.''' - session = new_project.session - - asset = session.create( - 'Asset', {'name': 'foo', 'context_id': new_project['id']} - ) - version = session.create('AssetVersion', {'asset': asset}) - - session.commit() - - file_component = new_file_component() - file_component['version_id'] = version['id'] - - structure = ftrack_api.structure.standard.StandardStructure() - structure.get_resource_identifier(file_component) - - -def test_component_without_committed_version_relation(): - '''Fail to get an identifer for component without a committed version.''' - file_component = new_file_component() - session = file_component.session - version = session.create('AssetVersion', {}) - - file_component['version'] = version - - structure = ftrack_api.structure.standard.StandardStructure() - - with pytest.raises(ftrack_api.exception.StructureError): - structure.get_resource_identifier(file_component) - - -@pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' -) -def test_component_without_committed_asset_relation(): - '''Fail to get an identifer for component without a committed asset.''' - file_component = new_file_component() - session = file_component.session - version = session.create('AssetVersion', {}) - - file_component['version'] = version - - session.commit() - - structure = ftrack_api.structure.standard.StandardStructure() - - with pytest.raises(ftrack_api.exception.StructureError): - structure.get_resource_identifier(file_component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py deleted file mode 100644 index 555adb2d891..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py +++ /dev/null @@ -1,146 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.attribute -import ftrack_api.exception - - -@pytest.mark.parametrize('attributes', [ - [], - [ftrack_api.attribute.Attribute('test')] -], ids=[ - 'no initial attributes', - 'with initial attributes' -]) -def test_initialise_attributes_collection(attributes): - '''Initialise attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert sorted(list(attribute_collection)) == sorted(attributes) - - -def test_add_attribute_to_attributes_collection(): - '''Add valid attribute to attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - assert attribute_collection.keys() == [] - attribute_collection.add(attribute) - assert attribute_collection.keys() == ['test'] - - -def test_add_duplicate_attribute_to_attributes_collection(): - '''Fail to add attribute with duplicate name to attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - attribute_collection.add(attribute) - with pytest.raises(ftrack_api.exception.NotUniqueError): - attribute_collection.add(attribute) - - -def test_remove_attribute_from_attributes_collection(): - '''Remove attribute from attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - attribute_collection.add(attribute) - assert len(attribute_collection) == 1 - - attribute_collection.remove(attribute) - assert len(attribute_collection) == 0 - - -def test_remove_missing_attribute_from_attributes_collection(): - '''Fail to remove attribute not present in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - with pytest.raises(KeyError): - attribute_collection.remove(attribute) - - -def test_get_attribute_from_attributes_collection(): - '''Get attribute from attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - attribute_collection.add(attribute) - - retrieved_attribute = attribute_collection.get('test') - - assert retrieved_attribute is attribute - - -def test_get_missing_attribute_from_attributes_collection(): - '''Get attribute not present in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - assert attribute_collection.get('test') is None - - -@pytest.mark.parametrize('attributes, expected', [ - ([], []), - ([ftrack_api.attribute.Attribute('test')], ['test']) -], ids=[ - 'no initial attributes', - 'with initial attributes' -]) -def test_attribute_collection_keys(attributes, expected): - '''Retrieve keys for attribute collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert sorted(attribute_collection.keys()) == sorted(expected) - - -@pytest.mark.parametrize('attribute, expected', [ - (None, False), - (ftrack_api.attribute.Attribute('b'), True), - (ftrack_api.attribute.Attribute('c'), False) -], ids=[ - 'none attribute', - 'present attribute', - 'missing attribute' -]) -def test_attributes_collection_contains(attribute, expected): - '''Check presence in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes([ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ]) - - assert (attribute in attribute_collection) is expected - - -@pytest.mark.parametrize('attributes, expected', [ - ([], 0), - ([ftrack_api.attribute.Attribute('test')], 1), - ( - [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ], - 2 - ) -], ids=[ - 'no attributes', - 'single attribute', - 'multiple attributes' -]) -def test_attributes_collection_count(attributes, expected): - '''Count attributes in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert len(attribute_collection) == expected - - -def test_iterate_over_attributes_collection(): - '''Iterate over attributes collection.''' - attributes = [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ] - - attribute_collection = ftrack_api.attribute.Attributes(attributes) - for attribute in attribute_collection: - attributes.remove(attribute) - - assert len(attributes) == 0 - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py deleted file mode 100644 index 7915737253f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py +++ /dev/null @@ -1,416 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import uuid -import tempfile - -import pytest - -import ftrack_api.cache - - -@pytest.fixture(params=['proxy', 'layered', 'memory', 'file', 'serialised']) -def cache(request): - '''Return cache.''' - if request.param == 'proxy': - cache = ftrack_api.cache.ProxyCache( - ftrack_api.cache.MemoryCache() - ) - - elif request.param == 'layered': - cache = ftrack_api.cache.LayeredCache( - [ftrack_api.cache.MemoryCache()] - ) - - elif request.param == 'memory': - cache = ftrack_api.cache.MemoryCache() - - elif request.param == 'file': - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - cache = ftrack_api.cache.FileCache(cache_path) - - def cleanup(): - '''Cleanup.''' - try: - os.remove(cache_path) - except OSError: - # BSD DB (Mac OSX) implementation of the interface will append - # a .db extension. - os.remove(cache_path + '.db') - - request.addfinalizer(cleanup) - - elif request.param == 'serialised': - cache = ftrack_api.cache.SerialisedCache( - ftrack_api.cache.MemoryCache(), - encode=lambda value: value, - decode=lambda value: value - ) - - else: - raise ValueError( - 'Unrecognised cache fixture type {0!r}'.format(request.param) - ) - - return cache - - - -class Class(object): - '''Class for testing.''' - - def method(self, key): - '''Method for testing.''' - - -def function(mutable, x, y=2): - '''Function for testing.''' - mutable['called'] = True - return {'result': x + y} - - -def assert_memoised_call( - memoiser, function, expected, args=None, kw=None, memoised=True -): - '''Assert *function* call via *memoiser* was *memoised*.''' - mapping = {'called': False} - if args is not None: - args = (mapping,) + args - else: - args = (mapping,) - - result = memoiser.call(function, args, kw) - - assert result == expected - assert mapping['called'] is not memoised - - -def test_get(cache): - '''Retrieve item from cache.''' - cache.set('key', 'value') - assert cache.get('key') == 'value' - - -def test_get_missing_key(cache): - '''Fail to retrieve missing item from cache.''' - with pytest.raises(KeyError): - cache.get('key') - - -def test_set(cache): - '''Set item in cache.''' - with pytest.raises(KeyError): - cache.get('key') - - cache.set('key', 'value') - assert cache.get('key') == 'value' - - -def test_remove(cache): - '''Remove item from cache.''' - cache.set('key', 'value') - cache.remove('key') - - with pytest.raises(KeyError): - cache.get('key') - - -def test_remove_missing_key(cache): - '''Fail to remove missing key.''' - with pytest.raises(KeyError): - cache.remove('key') - - -def test_keys(cache): - '''Retrieve keys of items in cache.''' - assert cache.keys() == [] - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - assert sorted(cache.keys()) == sorted(['a', 'b', 'c']) - - -def test_clear(cache): - '''Remove items from cache.''' - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - - assert cache.keys() - cache.clear() - - assert not cache.keys() - - -def test_clear_using_pattern(cache): - '''Remove items that match pattern from cache.''' - cache.set('matching_key', 'value') - cache.set('another_matching_key', 'value') - cache.set('key_not_matching', 'value') - - assert cache.keys() - cache.clear(pattern='.*matching_key$') - - assert cache.keys() == ['key_not_matching'] - - -def test_clear_encountering_missing_key(cache, mocker): - '''Clear missing key.''' - # Force reporting keys that are not actually valid for test purposes. - mocker.patch.object(cache, 'keys', lambda: ['missing']) - assert cache.keys() == ['missing'] - - # Should not error even though key not valid. - cache.clear() - - # The key was not successfully removed so should still be present. - assert cache.keys() == ['missing'] - - -def test_layered_cache_propagates_value_on_get(): - '''Layered cache propagates value on get.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] - - cache = ftrack_api.cache.LayeredCache(caches) - - # Set item on second level cache only. - caches[1].set('key', 'value') - - # Retrieving key via layered cache should propagate it automatically to - # higher level caches only. - assert cache.get('key') == 'value' - assert caches[0].get('key') == 'value' - - with pytest.raises(KeyError): - caches[2].get('key') - - -def test_layered_cache_remove_at_depth(): - '''Remove key that only exists at depth in LayeredCache.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] - - cache = ftrack_api.cache.LayeredCache(caches) - - # Set item on second level cache only. - caches[1].set('key', 'value') - - # Removing key that only exists at depth should not raise key error. - cache.remove('key') - - # Ensure key was removed. - assert not cache.keys() - - -def test_expand_references(): - '''Test that references are expanded from serialized cache.''' - - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - def make_cache(session, cache_path): - '''Create a serialised file cache.''' - serialized_file_cache = ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - - return serialized_file_cache - - # Populate the serialized file cache. - session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - expanded_results = dict() - - query_string = 'select asset.parent from AssetVersion where asset is_not None limit 10' - - for sequence in session.query(query_string): - asset = sequence.get('asset') - - expanded_results.setdefault( - asset.get('id'), asset.get('parent') - ) - - # Fetch the data from cache. - new_session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - - new_session_two = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - - # Make sure references are merged. - for sequence in new_session.query(query_string): - asset = sequence.get('asset') - - assert ( - asset.get('parent') == expanded_results[asset.get('id')] - ) - - # Use for fetching directly using get. - assert ( - new_session_two.get(asset.entity_type, asset.get('id')).get('parent') == - expanded_results[asset.get('id')] - ) - - - -@pytest.mark.parametrize('items, key', [ - (({},), '{}'), - (({}, {}), '{}{}') -], ids=[ - 'single object', - 'multiple objects' -]) -def test_string_key_maker_key(items, key): - '''Generate key using string key maker.''' - key_maker = ftrack_api.cache.StringKeyMaker() - assert key_maker.key(*items) == key - - -@pytest.mark.parametrize('items, key', [ - ( - ({},), - '\x01\x01' - ), - ( - ({'a': 'b'}, [1, 2]), - '\x01' - '\x80\x02U\x01a.' '\x02' '\x80\x02U\x01b.' - '\x01' - '\x00' - '\x03' - '\x80\x02K\x01.' '\x00' '\x80\x02K\x02.' - '\x03' - ), - ( - (function,), - '\x04function\x00unit.test_cache' - ), - ( - (Class,), - '\x04Class\x00unit.test_cache' - ), - ( - (Class.method,), - '\x04method\x00Class\x00unit.test_cache' - ), - ( - (callable,), - '\x04callable' - ) -], ids=[ - 'single mapping', - 'multiple objects', - 'function', - 'class', - 'method', - 'builtin' -]) -def test_object_key_maker_key(items, key): - '''Generate key using string key maker.''' - key_maker = ftrack_api.cache.ObjectKeyMaker() - assert key_maker.key(*items) == key - - -def test_memoised_call(): - '''Call memoised function.''' - memoiser = ftrack_api.cache.Memoiser() - - # Initial call should not be memoised so function is executed. - assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=False - ) - - # Identical call should be memoised so function is not executed again. - assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=True - ) - - # Differing call is not memoised so function is executed. - assert_memoised_call( - memoiser, function, args=(3,), expected={'result': 5}, memoised=False - ) - - -def test_memoised_call_variations(): - '''Call memoised function with identical arguments using variable format.''' - memoiser = ftrack_api.cache.Memoiser() - expected = {'result': 3} - - # Call function once to ensure is memoised. - assert_memoised_call( - memoiser, function, args=(1,), expected=expected, memoised=False - ) - - # Each of the following calls should equate to the same key and make - # use of the memoised value. - for args, kw in [ - ((), {'x': 1}), - ((), {'x': 1, 'y': 2}), - ((1,), {'y': 2}), - ((1,), {}) - ]: - assert_memoised_call( - memoiser, function, args=args, kw=kw, expected=expected - ) - - # The following calls should all be treated as new variations and so - # not use any memoised value. - assert_memoised_call( - memoiser, function, kw={'x': 2}, expected={'result': 4}, memoised=False - ) - assert_memoised_call( - memoiser, function, kw={'x': 3, 'y': 2}, expected={'result': 5}, - memoised=False - ) - assert_memoised_call( - memoiser, function, args=(4, ), kw={'y': 2}, expected={'result': 6}, - memoised=False - ) - assert_memoised_call( - memoiser, function, args=(5, ), expected={'result': 7}, memoised=False - ) - - -def test_memoised_mutable_return_value(): - '''Avoid side effects for returned mutable arguments when memoising.''' - memoiser = ftrack_api.cache.Memoiser() - arguments = ({'called': False}, 1) - - result_a = memoiser.call(function, arguments) - assert result_a == {'result': 3} - assert arguments[0]['called'] - - # Modify mutable externally and check that stored memoised value is - # unchanged. - del result_a['result'] - - arguments[0]['called'] = False - result_b = memoiser.call(function, arguments) - - assert result_b == {'result': 3} - assert not arguments[0]['called'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py deleted file mode 100644 index 15c3e5cf395..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py +++ /dev/null @@ -1,574 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy -import uuid - -import mock -import pytest - -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation - - -def create_mock_entity(session): - '''Return new mock entity for *session*.''' - entity = mock.MagicMock() - entity.session = session - entity.primary_key_attributes = ['id'] - entity['id'] = str(uuid.uuid4()) - return entity - - -@pytest.fixture -def mock_entity(session): - '''Return mock entity.''' - return create_mock_entity(session) - - -@pytest.fixture -def mock_entities(session): - '''Return list of two mock entities.''' - return [ - create_mock_entity(session), - create_mock_entity(session) - ] - - -@pytest.fixture -def mock_attribute(): - '''Return mock attribute.''' - attribute = mock.MagicMock() - attribute.name = 'test' - return attribute - - -def test_collection_initialisation_does_not_modify_entity_state( - mock_entity, mock_attribute, mock_entities -): - '''Initialising collection does not modify entity state.''' - ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - assert ftrack_api.inspection.state(mock_entity) is ftrack_api.symbol.NOT_SET - - -def test_immutable_collection_initialisation( - mock_entity, mock_attribute, mock_entities -): - '''Initialise immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - assert list(collection) == mock_entities - assert collection.mutable is False - - -def test_collection_shallow_copy( - mock_entity, mock_attribute, mock_entities, session -): - '''Shallow copying collection should avoid indirect mutation.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with mock_entity.session.operation_recording(False): - collection_copy = copy.copy(collection) - new_entity = create_mock_entity(session) - collection_copy.append(new_entity) - - assert list(collection) == mock_entities - assert list(collection_copy) == mock_entities + [new_entity] - - -def test_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Insert a value into collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - new_entity = create_mock_entity(session) - collection.insert(0, new_entity) - assert list(collection) == [new_entity] + mock_entities - - -def test_collection_insert_duplicate( - mock_entity, mock_attribute, mock_entities -): - '''Fail to insert a duplicate value into collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): - collection.insert(0, mock_entities[1]) - - -def test_immutable_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Fail to insert a value into immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - collection.insert(0, create_mock_entity(session)) - - -def test_collection_set_item( - mock_entity, mock_attribute, mock_entities, session -): - '''Set item at index in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - new_entity = create_mock_entity(session) - collection[0] = new_entity - assert list(collection) == [new_entity, mock_entities[1]] - - -def test_collection_re_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Re-set value at exact same index in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - collection[0] = mock_entities[0] - assert list(collection) == mock_entities - - -def test_collection_set_duplicate_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set a duplicate value into collection at different index.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): - collection[0] = mock_entities[1] - - -def test_immutable_collection_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set item at index in immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - collection[0] = mock_entities[0] - - -def test_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Remove item at index from collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - del collection[0] - assert list(collection) == [mock_entities[1]] - - -def test_collection_delete_item_at_invalid_index( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at missing index from immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(IndexError): - del collection[4] - - -def test_immutable_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at index from immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - del collection[0] - - -def test_collection_count( - mock_entity, mock_attribute, mock_entities, session -): - '''Count items in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - assert len(collection) == 2 - - collection.append(create_mock_entity(session)) - assert len(collection) == 3 - - del collection[0] - assert len(collection) == 2 - - -@pytest.mark.parametrize('other, expected', [ - ([], False), - ([1, 2], True), - ([1, 2, 3], False), - ([1], False) -], ids=[ - 'empty', - 'same', - 'additional', - 'missing' -]) -def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): - '''Determine collection equality against another collection.''' - # Temporarily override determination of entity identity so that it works - # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) - - collection_a = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=[1, 2] - ) - - collection_b = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=other - ) - assert (collection_a == collection_b) is expected - - -def test_collection_not_equal_to_non_collection( - mocker, mock_entity, mock_attribute -): - '''Collection not equal to a non-collection.''' - # Temporarily override determination of entity identity so that it works - # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) - - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=[1, 2] - ) - - assert (collection != {}) is True - - -def test_collection_notify_on_modification( - mock_entity, mock_attribute, mock_entities, session -): - '''Record UpdateEntityOperation on collection modification.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - assert len(session.recorded_operations) == 0 - - collection.append(create_mock_entity(session)) - assert len(session.recorded_operations) == 1 - operation = session.recorded_operations.pop() - assert isinstance(operation, ftrack_api.operation.UpdateEntityOperation) - assert operation.new_value == collection - - -def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): - '''Shallow copying mapped collection proxy avoids indirect mutation.''' - metadata = new_project['metadata'] - - with new_project.session.operation_recording(False): - metadata_copy = copy.copy(metadata) - metadata_copy[unique_name] = True - - assert unique_name not in metadata - assert unique_name in metadata_copy - - -def test_mapped_collection_proxy_mutable_property(new_project): - '''Mapped collection mutable property maps to underlying collection.''' - metadata = new_project['metadata'] - - assert metadata.mutable is True - assert metadata.collection.mutable is True - - metadata.mutable = False - assert metadata.collection.mutable is False - - -def test_mapped_collection_proxy_attribute_property( - new_project, mock_attribute -): - '''Mapped collection attribute property maps to underlying collection.''' - metadata = new_project['metadata'] - - assert metadata.attribute is metadata.collection.attribute - - metadata.attribute = mock_attribute - assert metadata.collection.attribute is mock_attribute - - -def test_mapped_collection_proxy_get_item(new_project, unique_name): - '''Retrieve item in mapped collection proxy.''' - session = new_project.session - - # Prepare data. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Check in clean session retrieval of value. - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value - - -def test_mapped_collection_proxy_set_item(new_project, unique_name): - '''Set new item in mapped collection proxy.''' - session = new_project.session - - metadata = new_project['metadata'] - assert unique_name not in metadata - - value = 'value' - metadata[unique_name] = value - assert metadata[unique_name] == value - - # Check change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value - - -def test_mapped_collection_proxy_update_item(new_project, unique_name): - '''Update existing item in mapped collection proxy.''' - session = new_project.session - - # Prepare a pre-existing value. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Set new value. - new_value = 'new_value' - metadata[unique_name] = new_value - - # Confirm change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == new_value - - -def test_mapped_collection_proxy_delete_item(new_project, unique_name): - '''Remove existing item from mapped collection proxy.''' - session = new_project.session - - # Prepare a pre-existing value to remove. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Now remove value. - del new_project['metadata'][unique_name] - assert unique_name not in new_project['metadata'] - - # Confirm change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [] - assert unique_name not in retrieved['metadata'] - - -def test_mapped_collection_proxy_delete_missing_item(new_project, unique_name): - '''Fail to remove item for missing key from mapped collection proxy.''' - metadata = new_project['metadata'] - assert unique_name not in metadata - with pytest.raises(KeyError): - del metadata[unique_name] - - -def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): - '''Iterate over keys in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) - - # Commit here as otherwise cleanup operation will fail because transaction - # will include updating metadata to refer to a deleted entity. - new_project.session.commit() - - iterated = set() - for key in metadata: - iterated.add(key) - - assert iterated == set(['a', 'b', 'c']) - - -def test_mapped_collection_proxy_count(new_project, unique_name): - '''Count items in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) - - # Commit here as otherwise cleanup operation will fail because transaction - # will include updating metadata to refer to a deleted entity. - new_project.session.commit() - - assert len(metadata) == 3 - - -def test_mapped_collection_on_create(session, unique_name, project): - '''Test that it is possible to set relational attributes on create''' - metadata = { - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - } - - task_id = session.create( - 'Task', { - 'name': unique_name, - 'parent': project, - 'metadata': metadata, - - } - ).get('id') - - session.commit() - - # Reset the session and check that we have the expected - # values. - session.reset() - - task = session.get( - 'Task', task_id - ) - - for key, value in metadata.items(): - assert value == task['metadata'][key] - - -def test_collection_refresh(new_asset_version, new_component): - '''Test collection reload.''' - session_two = ftrack_api.Session(auto_connect_event_hub=False) - - query_string = 'select components from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() - - # Modify our asset version - new_asset_version.get('components').append( - new_component - ) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure we get the newly - # populated data. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version.get('components') == new_asset_version_two.get('components') - ) - - # Make a local change to our asset version - new_asset_version_two.get('components').pop() - - # Query the same asset version again and make sure our local changes - # are not overwritten. - - session_two.query( - query_string - ).all() - - assert len(new_asset_version_two.get('components')) == 0 - - -def test_mapped_collection_reload(new_asset_version): - '''Test mapped collection reload.''' - session_two = ftrack_api.Session(auto_connect_event_hub=False) - - query_string = 'select metadata from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() - - # Modify our asset version - new_asset_version['metadata']['test'] = str(uuid.uuid4()) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure we get the newly - # populated data. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version['metadata']['test'] == new_asset_version_two['metadata']['test'] - ) - - local_data = str(uuid.uuid4()) - - new_asset_version_two['metadata']['test'] = local_data - - # Modify our asset version again - new_asset_version['metadata']['test'] = str(uuid.uuid4()) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure our local changes - # are not overwritten. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version_two['metadata']['test'] == local_data - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py deleted file mode 100644 index 7a9b0fadaa4..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py +++ /dev/null @@ -1,251 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api - -@pytest.fixture( - params=[ - 'AssetVersion', 'Shot', 'AssetVersionList', 'TypedContextList', 'User', - 'Asset' - ] -) -def new_entity_and_custom_attribute(request, session): - '''Return tuple with new entity, custom attribute name and value.''' - if request.param == 'AssetVersion': - entity = session.create( - request.param, { - 'asset': session.query('Asset').first() - } - ) - return (entity, 'versiontest', 123) - - elif request.param == 'Shot': - sequence = session.query('Sequence').first() - entity = session.create( - request.param, { - 'parent_id': sequence['id'], - 'project_id': sequence['project_id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'fstart', 1005) - - elif request.param == 'Asset': - shot = session.query('Shot').first() - entity = session.create( - request.param, { - 'context_id': shot['project_id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'htest', 1005) - - elif request.param in ('AssetVersionList', 'TypedContextList'): - entity = session.create( - request.param, { - 'project_id': session.query('Project').first()['id'], - 'category_id': session.query('ListCategory').first()['id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'listbool', True) - - elif request.param == 'User': - entity = session.create( - request.param, { - 'first_name': 'Custom attribute test', - 'last_name': 'Custom attribute test', - 'username': str(uuid.uuid1()) - } - ) - return (entity, 'teststring', 'foo') - - -@pytest.mark.parametrize( - 'entity_type, entity_model_name, custom_attribute_name', - [ - ('Task', 'task', 'customNumber'), - ('AssetVersion', 'assetversion', 'NumberField') - ], - ids=[ - 'task', - 'asset_version' - ] -) -def test_read_set_custom_attribute( - session, entity_type, entity_model_name, custom_attribute_name -): - '''Retrieve custom attribute value set on instance.''' - custom_attribute_value = session.query( - 'CustomAttributeValue where configuration.key is ' - '{custom_attribute_name}' - .format( - custom_attribute_name=custom_attribute_name - ) - ).first() - - entity = session.query( - 'select custom_attributes from {entity_type} where id is ' - '{entity_id}'.format( - entity_type=entity_type, - entity_id=custom_attribute_value['entity_id'], - ) - ).first() - - assert custom_attribute_value - - assert entity['id'] == entity['custom_attributes'].collection.entity['id'] - assert entity is entity['custom_attributes'].collection.entity - assert ( - entity['custom_attributes'][custom_attribute_name] == - custom_attribute_value['value'] - ) - - assert custom_attribute_name in entity['custom_attributes'].keys() - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'customNumber'), - ('Shot', 'fstart'), - ( - 'AssetVersion', 'NumberField' - ) - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_write_set_custom_attribute_value( - session, entity_type, custom_attribute_name -): - '''Overwrite existing instance level custom attribute value.''' - entity = session.query( - 'select custom_attributes from {entity_type} where ' - 'custom_attributes.configuration.key is {custom_attribute_name}'.format( - entity_type=entity_type, - custom_attribute_name=custom_attribute_name - ) - ).first() - - entity['custom_attributes'][custom_attribute_name] = 42 - - assert entity['custom_attributes'][custom_attribute_name] == 42 - - session.commit() - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'fstart'), - ('Shot', 'Not existing'), - ('AssetVersion', 'fstart') - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_read_custom_attribute_that_does_not_exist( - session, entity_type, custom_attribute_name -): - '''Fail to read value from a custom attribute that does not exist.''' - entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) - ).first() - - with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'fstart'), - ('Shot', 'Not existing'), - ('AssetVersion', 'fstart') - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_write_custom_attribute_that_does_not_exist( - session, entity_type, custom_attribute_name -): - '''Fail to write a value to a custom attribute that does not exist.''' - entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) - ).first() - - with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] = 42 - - -def test_set_custom_attribute_on_new_but_persisted_version( - session, new_asset_version -): - '''Set custom attribute on new persisted version.''' - new_asset_version['custom_attributes']['versiontest'] = 5 - session.commit() - - -@pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' -) -def test_batch_create_entity_and_custom_attributes( - new_entity_and_custom_attribute -): - '''Write custom attribute value and entity in the same batch.''' - entity, name, value = new_entity_and_custom_attribute - session = entity.session - entity['custom_attributes'][name] = value - - assert entity['custom_attributes'][name] == value - session.commit() - - assert entity['custom_attributes'][name] == value - - -def test_refresh_custom_attribute(new_asset_version): - '''Test custom attribute refresh.''' - session_two = ftrack_api.Session() - - query_string = 'select custom_attributes from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - asset_version_two = session_two.query( - query_string - ).first() - - new_asset_version['custom_attributes']['versiontest'] = 42 - - new_asset_version.session.commit() - - asset_version_two = session_two.query( - query_string - ).first() - - assert ( - new_asset_version['custom_attributes']['versiontest'] == - asset_version_two['custom_attributes']['versiontest'] - ) - - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py deleted file mode 100644 index c53dda9630f..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py +++ /dev/null @@ -1,129 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile - -import pytest - -import ftrack_api.data - - -@pytest.fixture() -def content(): - '''Return initial content.''' - return 'test data' - - -@pytest.fixture(params=['file', 'file_wrapper', 'string']) -def data(request, content): - '''Return cache.''' - - if request.param == 'string': - data_object = ftrack_api.data.String(content) - - elif request.param == 'file': - file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') - file_object.write(content) - file_object.flush() - file_object.close() - - data_object = ftrack_api.data.File(path, 'r+') - - def cleanup(): - '''Cleanup.''' - data_object.close() - os.remove(path) - - request.addfinalizer(cleanup) - - elif request.param == 'file_wrapper': - file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') - file_object.write(content) - file_object.seek(0) - - data_object = ftrack_api.data.FileWrapper(file_object) - - def cleanup(): - '''Cleanup.''' - data_object.close() - os.remove(path) - - request.addfinalizer(cleanup) - - else: - raise ValueError('Unrecognised parameter: {0}'.format(request.param)) - - return data_object - - -def test_read(data, content): - '''Return content from current position up to *limit*.''' - assert data.read(5) == content[:5] - assert data.read() == content[5:] - - -def test_write(data, content): - '''Write content at current position.''' - assert data.read() == content - data.write('more test data') - data.seek(0) - assert data.read() == content + 'more test data' - - -def test_flush(data): - '''Flush buffers ensuring data written.''' - # TODO: Implement better test than just calling function. - data.flush() - - -def test_seek(data, content): - '''Move internal pointer to *position*.''' - data.seek(5) - assert data.read() == content[5:] - - -def test_tell(data): - '''Return current position of internal pointer.''' - assert data.tell() == 0 - data.seek(5) - assert data.tell() == 5 - - -def test_close(data): - '''Flush buffers and prevent further access.''' - data.close() - with pytest.raises(ValueError) as error: - data.read() - - assert 'I/O operation on closed file' in str(error.value) - - -class Dummy(ftrack_api.data.Data): - '''Dummy string.''' - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - def write(self, content): - '''Write content at current position.''' - - -def test_unsupported_tell(): - '''Fail when tell unsupported.''' - data = Dummy() - with pytest.raises(NotImplementedError) as error: - data.tell() - - assert 'Tell not supported' in str(error.value) - - -def test_unsupported_seek(): - '''Fail when seek unsupported.''' - data = Dummy() - with pytest.raises(NotImplementedError) as error: - data.seek(5) - - assert 'Seek not supported' in str(error.value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py deleted file mode 100644 index ae565cb3f50..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py +++ /dev/null @@ -1,70 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import termcolor - -import ftrack_api.formatter - - -def colored(text, *args, **kwargs): - '''Pass through so there are no escape sequences in output.''' - return text - - -def test_format(user, mocker): - '''Return formatted representation of entity.''' - mocker.patch.object(termcolor, 'colored', colored) - - result = ftrack_api.formatter.format(user) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' in result - assert ' email: ' in result - - -def test_format_using_custom_formatters(user): - '''Return formatted representation of entity using custom formatters.''' - result = ftrack_api.formatter.format( - user, formatters={ - 'header': lambda text: '*{0}*'.format(text), - 'label': lambda text: '-{0}'.format(text) - } - ) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('*User*\n') - assert ' -username: jenkins' in result - assert ' -email: ' in result - - -def test_format_filtering(new_user, mocker): - '''Return formatted representation using custom filter.''' - mocker.patch.object(termcolor, 'colored', colored) - - with new_user.session.auto_populating(False): - result = ftrack_api.formatter.format( - new_user, - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - ) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: {0}'.format(new_user['username']) in result - assert ' email: ' not in result - - -def test_format_recursive(user, mocker): - '''Return formatted recursive representation.''' - mocker.patch.object(termcolor, 'colored', colored) - - user.session.populate(user, 'timelogs.user') - - with user.session.auto_populating(False): - result = ftrack_api.formatter.format(user, recursive=True) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' - assert ' timelogs: Timelog' in result - assert ' user: User{...}' in result diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py deleted file mode 100644 index 57b44613a84..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py +++ /dev/null @@ -1,101 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.inspection -import ftrack_api.symbol - - -def test_identity(user): - '''Retrieve identity of *user*.''' - identity = ftrack_api.inspection.identity(user) - assert identity[0] == 'User' - assert identity[1] == ['d07ae5d0-66e1-11e1-b5e9-f23c91df25eb'] - - -def test_primary_key(user): - '''Retrieve primary key of *user*.''' - primary_key = ftrack_api.inspection.primary_key(user) - assert primary_key == { - 'id': 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb' - } - - -def test_created_entity_state(session, unique_name): - '''Created entity has CREATED state.''' - new_user = session.create('User', {'username': unique_name}) - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - # Even after a modification the state should remain as CREATED. - new_user['username'] = 'changed' - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - -def test_retrieved_entity_state(user): - '''Retrieved entity has NOT_SET state.''' - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.NOT_SET - - -def test_modified_entity_state(user): - '''Modified entity has MODIFIED state.''' - user['username'] = 'changed' - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.MODIFIED - - -def test_deleted_entity_state(session, user): - '''Deleted entity has DELETED state.''' - session.delete(user) - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.DELETED - - -def test_post_commit_entity_state(session, unique_name): - '''Entity has NOT_SET state post commit.''' - new_user = session.create('User', {'username': unique_name}) - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - session.commit() - - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.NOT_SET - - -def test_states(session, unique_name, user): - '''Determine correct states for multiple entities.''' - # NOT_SET - user_a = session.create('User', {'username': unique_name}) - session.commit() - - # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' - - # MODIFIED - user_c = user - user_c['username'] = 'changed' - - # DELETED - user_d = session.create('User', {'username': unique_name}) - session.delete(user_d) - - # Assert states. - states = ftrack_api.inspection.states([user_a, user_b, user_c, user_d]) - - assert states == [ - ftrack_api.symbol.NOT_SET, - ftrack_api.symbol.CREATED, - ftrack_api.symbol.MODIFIED, - ftrack_api.symbol.DELETED - ] - - -def test_states_for_no_entities(): - '''Return empty list of states when no entities passed.''' - states = ftrack_api.inspection.states([]) - assert states == [] - - -def test_skip_operations_for_non_inspected_entities(session, unique_name): - '''Skip operations for non inspected entities.''' - user_a = session.create('User', {'username': unique_name + '-1'}) - user_b = session.create('User', {'username': unique_name + '-2'}) - - states = ftrack_api.inspection.states([user_a]) - assert states == [ftrack_api.symbol.CREATED] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py deleted file mode 100644 index 702bfae355d..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py +++ /dev/null @@ -1,79 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.operation - - -def test_operations_initialise(): - '''Initialise empty operations stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - -def test_operations_push(): - '''Push new operation onto stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operation = ftrack_api.operation.Operation() - operations.push(operation) - assert list(operations)[-1] is operation - - -def test_operations_pop(): - '''Pop and return operation from stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - operation = ftrack_api.operation.Operation() - operations.push(operation) - - assert len(operations) == 3 - popped = operations.pop() - assert popped is operation - assert len(operations) == 2 - - -def test_operations_count(): - '''Count operations in stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operations.push(ftrack_api.operation.Operation()) - assert len(operations) == 1 - - operations.pop() - assert len(operations) == 0 - - -def test_operations_clear(): - '''Clear operations stack.''' - operations = ftrack_api.operation.Operations() - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - assert len(operations) == 3 - - operations.clear() - assert len(operations) == 0 - - -def test_operations_iter(): - '''Iterate over operations stack.''' - operations = ftrack_api.operation.Operations() - operation_a = ftrack_api.operation.Operation() - operation_b = ftrack_api.operation.Operation() - operation_c = ftrack_api.operation.Operation() - - operations.push(operation_a) - operations.push(operation_b) - operations.push(operation_c) - - assert len(operations) == 3 - for operation, expected in zip( - operations, [operation_a, operation_b, operation_c] - ): - assert operation is expected - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py deleted file mode 100644 index 247b496d963..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api - - -class Class(object): - '''Class.''' - - -class Mixin(object): - '''Mixin.''' - - def method(self): - '''Method.''' - return True - - -def test_mixin(): - '''Mixin class to instance.''' - instance_a = Class() - instance_b = Class() - - assert not hasattr(instance_a, 'method') - assert not hasattr(instance_b, 'method') - - ftrack_api.mixin(instance_a, Mixin) - - assert hasattr(instance_a, 'method') - assert instance_a.method() is True - assert not hasattr(instance_b, 'method') - - -def test_mixin_same_class_multiple_times(): - '''Mixin class to instance multiple times.''' - instance = Class() - assert not hasattr(instance, 'method') - assert len(instance.__class__.mro()) == 2 - - ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') - assert instance.method() is True - assert len(instance.__class__.mro()) == 4 - - ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') - assert instance.method() is True - assert len(instance.__class__.mro()) == 4 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py deleted file mode 100644 index 252c813a9b5..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py +++ /dev/null @@ -1,192 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import textwrap -import logging -import re - -import pytest - -import ftrack_api.plugin - - -@pytest.fixture() -def valid_plugin(temporary_path): - '''Return path to directory containing a valid plugin.''' - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(*args, **kw): - print "Registered", args, kw - ''')) - - return temporary_path - - -@pytest.fixture() -def python_non_plugin(temporary_path): - '''Return path to directory containing Python file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - print "Not a plugin" - - def not_called(): - print "Not called" - ''')) - - return temporary_path - - -@pytest.fixture() -def non_plugin(temporary_path): - '''Return path to directory containing file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.txt'), 'w') as file_object: - file_object.write('Never seen') - - return temporary_path - - -@pytest.fixture() -def broken_plugin(temporary_path): - '''Return path to directory containing broken plugin.''' - with open(os.path.join(temporary_path, 'broken.py'), 'w') as file_object: - file_object.write('syntax error') - - return temporary_path - - -@pytest.fixture() -def plugin(request, temporary_path): - '''Return path containing a plugin with requested specification.''' - specification = request.param - output = re.sub('(\w+)=\w+', '"\g<1>={}".format(\g<1>)', specification) - output = re.sub('\*args', 'args', output) - output = re.sub('\*\*kwargs', 'sorted(kwargs.items())', output) - - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - content = textwrap.dedent(''' - def register({}): - print {} - '''.format(specification, output)) - file_object.write(content) - - return temporary_path - - -def test_discover_empty_paths(capsys): - '''Discover no plugins when paths are empty.''' - ftrack_api.plugin.discover([' ']) - output, error = capsys.readouterr() - assert not output - assert not error - - -def test_discover_valid_plugin(valid_plugin, capsys): - '''Discover valid plugin.''' - ftrack_api.plugin.discover([valid_plugin], (1, 2), {'3': 4}) - output, error = capsys.readouterr() - assert 'Registered (1, 2) {\'3\': 4}' in output - - -def test_discover_python_non_plugin(python_non_plugin, capsys): - '''Discover Python non plugin.''' - ftrack_api.plugin.discover([python_non_plugin]) - output, error = capsys.readouterr() - assert 'Not a plugin' in output - assert 'Not called' not in output - - -def test_discover_non_plugin(non_plugin, capsys): - '''Discover non plugin.''' - ftrack_api.plugin.discover([non_plugin]) - output, error = capsys.readouterr() - assert not output - assert not error - - -def test_discover_broken_plugin(broken_plugin, caplog): - '''Discover broken plugin.''' - ftrack_api.plugin.discover([broken_plugin]) - - records = caplog.records() - assert len(records) == 1 - assert records[0].levelno is logging.WARNING - assert 'Failed to load plugin' in records[0].message - - -@pytest.mark.parametrize( - 'plugin, positional, keyword, expected', - [ - ( - 'a, b=False, c=False, d=False', - (1, 2), {'c': True, 'd': True, 'e': True}, - '1 b=2 c=True d=True' - ), - ( - '*args', - (1, 2), {'b': True, 'c': False}, - '(1, 2)' - ), - ( - '**kwargs', - tuple(), {'b': True, 'c': False}, - '[(\'b\', True), (\'c\', False)]' - ), - ( - 'a=False, b=False', - (True,), {'b': True}, - 'a=True b=True' - ), - ( - 'a, c=False, *args', - (1, 2, 3, 4), {}, - '1 c=2 (3, 4)' - ), - ( - 'a, c=False, **kwargs', - tuple(), {'a': 1, 'b': 2, 'c': 3, 'd': 4}, - '1 c=3 [(\'b\', 2), (\'d\', 4)]' - ), - ], - indirect=['plugin'], - ids=[ - 'mixed-explicit', - 'variable-args-only', - 'variable-kwargs-only', - 'keyword-from-positional', - 'trailing-variable-args', - 'trailing-keyword-args' - ] -) -def test_discover_plugin_with_specific_signature( - plugin, positional, keyword, expected, capsys -): - '''Discover plugin passing only supported arguments.''' - ftrack_api.plugin.discover( - [plugin], positional, keyword - ) - output, error = capsys.readouterr() - assert expected in output - - -def test_discover_plugin_varying_signatures(temporary_path, capsys): - '''Discover multiple plugins with varying signatures.''' - with open(os.path.join(temporary_path, 'plugin_a.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(a): - print (a,) - ''')) - - with open(os.path.join(temporary_path, 'plugin_b.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(a, b=False): - print (a,), {'b': b} - ''')) - - ftrack_api.plugin.discover( - [temporary_path], (True,), {'b': True} - ) - - output, error = capsys.readouterr() - assert '(True,)'in output - assert '(True,) {\'b\': True}' in output diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py deleted file mode 100644 index f8e3f9dec33..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py +++ /dev/null @@ -1,164 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import math - -import pytest - -import ftrack_api -import ftrack_api.query -import ftrack_api.exception - - -def test_index(session): - '''Index into query result.''' - results = session.query('User') - assert isinstance(results[2], session.types['User']) - - -def test_len(session): - '''Return count of results using len.''' - results = session.query('User where username is jenkins') - assert len(results) == 1 - - -def test_all(session): - '''Return all results using convenience method.''' - results = session.query('User').all() - assert isinstance(results, list) - assert len(results) - - -def test_implicit_iteration(session): - '''Implicitly iterate through query result.''' - results = session.query('User') - assert isinstance(results, ftrack_api.query.QueryResult) - - records = [] - for record in results: - records.append(record) - - assert len(records) == len(results) - - -def test_one(session): - '''Return single result using convenience method.''' - user = session.query('User where username is jenkins').one() - assert user['username'] == 'jenkins' - - -def test_one_fails_for_no_results(session): - '''Fail to fetch single result when no results available.''' - with pytest.raises(ftrack_api.exception.NoResultFoundError): - session.query('User where username is does_not_exist').one() - - -def test_one_fails_for_multiple_results(session): - '''Fail to fetch single result when multiple results available.''' - with pytest.raises(ftrack_api.exception.MultipleResultsFoundError): - session.query('User').one() - - -def test_one_with_existing_limit(session): - '''Fail to return single result when existing limit in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').one() - - -def test_one_with_existing_offset(session): - '''Fail to return single result when existing offset in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins offset 2').one() - - -def test_one_with_prefetched_data(session): - '''Return single result ignoring prefetched data.''' - query = session.query('User where username is jenkins') - query.all() - - user = query.one() - assert user['username'] == 'jenkins' - - -def test_first(session): - '''Return first result using convenience method.''' - users = session.query('User').all() - - user = session.query('User').first() - assert user == users[0] - - -def test_first_returns_none_when_no_results(session): - '''Return None when no results available.''' - user = session.query('User where username is does_not_exist').first() - assert user is None - - -def test_first_with_existing_limit(session): - '''Fail to return first result when existing limit in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').first() - - -def test_first_with_existing_offset(session): - '''Return first result whilst respecting custom offset.''' - users = session.query('User').all() - - user = session.query('User offset 2').first() - assert user == users[2] - - -def test_first_with_prefetched_data(session): - '''Return first result ignoring prefetched data.''' - query = session.query('User where username is jenkins') - query.all() - - user = query.first() - assert user['username'] == 'jenkins' - - -def test_paging(session, mocker): - '''Page through results.''' - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 5 - query = session.query('User limit 50', page_size=page_size) - records = query.all() - - assert session.call.call_count == ( - math.ceil(len(records) / float(page_size)) - ) - - -def test_paging_respects_offset_and_limit(session, mocker): - '''Page through results respecting offset and limit.''' - users = session.query('User').all() - - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 6 - query = session.query('User offset 2 limit 8', page_size=page_size) - records = query.all() - - assert session.call.call_count == 2 - assert len(records) == 8 - assert records == users[2:10] - - -def test_paging_respects_limit_smaller_than_page_size(session, mocker): - '''Use initial limit when less than page size.''' - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 100 - query = session.query('User limit 10', page_size=page_size) - records = query.all() - - assert session.call.call_count == 1 - session.call.assert_called_once_with( - [{ - 'action': 'query', - 'expression': 'select id from User offset 0 limit 10' - }] - ) - - assert len(records) == 10 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py deleted file mode 100644 index 5087efcc088..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py +++ /dev/null @@ -1,1519 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile -import functools -import uuid -import textwrap -import datetime -import json -import random - -import pytest -import mock -import arrow -import requests - -import ftrack_api -import ftrack_api.cache -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.session -import ftrack_api.collection - - -@pytest.fixture(params=['memory', 'persisted']) -def cache(request): - '''Return cache.''' - if request.param == 'memory': - cache = None # There is already a default Memory cache present. - elif request.param == 'persisted': - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - cache = lambda session: ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=functools.partial( - session.encode, entity_attribute_strategy='persisted_only' - ), - decode=session.decode - ) - - def cleanup(): - '''Cleanup.''' - try: - os.remove(cache_path) - except OSError: - # BSD DB (Mac OSX) implementation of the interface will append - # a .db extension. - os.remove(cache_path + '.db') - - request.addfinalizer(cleanup) - - return cache - - -@pytest.fixture() -def temporary_invalid_schema_cache(request): - '''Return schema cache path to invalid schema cache file.''' - schema_cache_path = os.path.join( - tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) - ) - - with open(schema_cache_path, 'w') as file_: - file_.write('${invalid json}') - - def cleanup(): - '''Cleanup.''' - os.remove(schema_cache_path) - - request.addfinalizer(cleanup) - - return schema_cache_path - - -@pytest.fixture() -def temporary_valid_schema_cache(request, mocked_schemas): - '''Return schema cache path to valid schema cache file.''' - schema_cache_path = os.path.join( - tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) - ) - - with open(schema_cache_path, 'w') as file_: - json.dump(mocked_schemas, file_, indent=4) - - def cleanup(): - '''Cleanup.''' - os.remove(schema_cache_path) - - request.addfinalizer(cleanup) - - return schema_cache_path - - -class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that should not cache newly created entities.''' - - def set(self, key, value): - '''Set *value* for *key*.''' - if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): - return - - super(SelectiveCache, self).set(key, value) - - -def test_get_entity(session, user): - '''Retrieve an entity by type and id.''' - matching = session.get(*ftrack_api.inspection.identity(user)) - assert matching == user - - -def test_get_non_existant_entity(session): - '''Retrieve a non-existant entity by type and id.''' - matching = session.get('User', 'non-existant-id') - assert matching is None - - -def test_get_entity_of_invalid_type(session): - '''Fail to retrieve an entity using an invalid type.''' - with pytest.raises(KeyError): - session.get('InvalidType', 'id') - - -def test_create(session): - '''Create entity.''' - user = session.create('User', {'username': 'martin'}) - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] == 'martin' - assert user['email'] is ftrack_api.symbol.NOT_SET - - -def test_create_using_only_defaults(session): - '''Create entity using defaults only.''' - user = session.create('User') - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET - - -def test_create_using_server_side_defaults(session): - '''Create entity using server side defaults.''' - user = session.create('User') - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET - - session.commit() - assert user['username'] is not ftrack_api.symbol.NOT_SET - - -def test_create_overriding_defaults(session): - '''Create entity overriding defaults.''' - uid = str(uuid.uuid4()) - user = session.create('User', {'id': uid}) - with session.auto_populating(False): - assert user['id'] == uid - - -def test_create_with_reference(session): - '''Create entity with a reference to another.''' - status = session.query('Status')[0] - task = session.create('Task', {'status': status}) - assert task['status'] is status - - -def test_ensure_new_entity(session, unique_name): - '''Ensure entity, creating first.''' - entity = session.ensure('User', {'username': unique_name}) - assert entity['username'] == unique_name - - -def test_ensure_entity_with_non_string_data_types(session): - '''Ensure entity against non-string data types, creating first.''' - datetime = arrow.get() - - task = session.query('Task').first() - user = session.query( - 'User where username is {}'.format(session.api_user) - ).first() - - first = session.ensure( - 'Timelog', - { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } - ) - - with mock.patch.object(session, 'create') as mocked: - session.ensure( - 'Timelog', - { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } - ) - assert not mocked.called - - assert first['start'] == datetime - assert first['duration'] == 10 - - -def test_ensure_entity_with_identifying_keys(session, unique_name): - '''Ensure entity, checking using keys subset and then creating.''' - entity = session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['username'] - ) - assert entity['username'] == unique_name - - -def test_ensure_entity_with_invalid_identifying_keys(session, unique_name): - '''Fail to ensure entity when identifying key missing from data.''' - with pytest.raises(KeyError): - session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['invalid'] - ) - - -def test_ensure_entity_with_missing_identifying_keys(session): - '''Fail to ensure entity when no identifying keys determined.''' - with pytest.raises(ValueError): - session.ensure('User', {}) - - -def test_ensure_existing_entity(session, unique_name): - '''Ensure existing entity.''' - entity = session.ensure('User', {'first_name': unique_name}) - - # Second call should not commit any new entity, just retrieve the existing. - with mock.patch.object(session, 'create') as mocked: - retrieved = session.ensure('User', {'first_name': unique_name}) - assert not mocked.called - assert retrieved == entity - - -def test_ensure_update_existing_entity(session, unique_name): - '''Ensure and update existing entity.''' - entity = session.ensure( - 'User', {'first_name': unique_name, 'email': 'anon@example.com'} - ) - assert entity['email'] == 'anon@example.com' - - # Second call should commit updates. - retrieved = session.ensure( - 'User', {'first_name': unique_name, 'email': 'test@example.com'}, - identifying_keys=['first_name'] - ) - assert retrieved == entity - assert retrieved['email'] == 'test@example.com' - - -def test_reconstruct_entity(session): - '''Reconstruct entity.''' - uid = str(uuid.uuid4()) - data = { - 'id': uid, - 'username': 'martin', - 'email': 'martin@example.com' - } - user = session.create('User', data, reconstructing=True) - - for attribute in user.attributes: - # No local attributes should be set. - assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET - - # Only remote attributes that had explicit values should be set. - value = attribute.get_remote_value(user) - if attribute.name in data: - assert value == data[attribute.name] - else: - assert value is ftrack_api.symbol.NOT_SET - - -def test_reconstruct_entity_does_not_apply_defaults(session): - '''Reconstruct entity does not apply defaults.''' - # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) - with session.auto_populating(False): - assert user['id'] is ftrack_api.symbol.NOT_SET - - -def test_reconstruct_empty_entity(session): - '''Reconstruct empty entity.''' - # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) - - for attribute in user.attributes: - # No local attributes should be set. - assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET - - # No remote attributes should be set. - assert attribute.get_remote_value(user) is ftrack_api.symbol.NOT_SET - - -def test_delete_operation_ordering(session, unique_name): - '''Delete entities in valid order.''' - # Construct entities. - project_schema = session.query('ProjectSchema').first() - project = session.create('Project', { - 'name': unique_name, - 'full_name': unique_name, - 'project_schema': project_schema - }) - - sequence = session.create('Sequence', { - 'name': unique_name, - 'parent': project - }) - - session.commit() - - # Delete in order that should succeed. - session.delete(sequence) - session.delete(project) - - session.commit() - - -def test_create_then_delete_operation_ordering(session, unique_name): - '''Create and delete entity in one transaction.''' - entity = session.create('User', {'username': unique_name}) - session.delete(entity) - session.commit() - - -def test_create_and_modify_to_have_required_attribute(session, unique_name): - '''Create and modify entity to have required attribute in transaction.''' - entity = session.create('Scope', {}) - other = session.create('Scope', {'name': unique_name}) - entity['name'] = '{0}2'.format(unique_name) - session.commit() - - -def test_ignore_in_create_entity_payload_values_set_to_not_set( - mocker, unique_name, session -): - '''Ignore in commit, created entity data set to NOT_SET''' - mocked = mocker.patch.object(session, 'call') - - # Should ignore 'email' attribute in payload. - new_user = session.create( - 'User', {'username': unique_name, 'email': 'test'} - ) - new_user['email'] = ftrack_api.symbol.NOT_SET - session.commit() - payloads = mocked.call_args[0][0] - assert len(payloads) == 1 - - -def test_ignore_operation_that_modifies_attribute_to_not_set( - mocker, session, user -): - '''Ignore in commit, operation that sets attribute value to NOT_SET''' - mocked = mocker.patch.object(session, 'call') - - # Should result in no call to server. - user['email'] = ftrack_api.symbol.NOT_SET - session.commit() - - assert not mocked.called - - -def test_operation_optimisation_on_commit(session, mocker): - '''Optimise operations on commit.''' - mocked = mocker.patch.object(session, 'call') - - user_a = session.create('User', {'username': 'bob'}) - user_a['username'] = 'foo' - user_a['email'] = 'bob@example.com' - - user_b = session.create('User', {'username': 'martin'}) - user_b['email'] = 'martin@ftrack.com' - - user_a['email'] = 'bob@example.com' - user_a['first_name'] = 'Bob' - - user_c = session.create('User', {'username': 'neverexist'}) - user_c['email'] = 'ignore@example.com' - session.delete(user_c) - - user_a_entity_key = ftrack_api.inspection.primary_key(user_a).values() - user_b_entity_key = ftrack_api.inspection.primary_key(user_b).values() - - session.commit() - - # The above operations should have translated into three payloads to call - # (two creates and one update). - payloads = mocked.call_args[0][0] - assert len(payloads) == 3 - - assert payloads[0]['action'] == 'create' - assert payloads[0]['entity_key'] == user_a_entity_key - assert set(payloads[0]['entity_data'].keys()) == set([ - '__entity_type__', 'id', 'resource_type', 'username' - ]) - - assert payloads[1]['action'] == 'create' - assert payloads[1]['entity_key'] == user_b_entity_key - assert set(payloads[1]['entity_data'].keys()) == set([ - '__entity_type__', 'id', 'resource_type', 'username', 'email' - ]) - - assert payloads[2]['action'] == 'update' - assert payloads[2]['entity_key'] == user_a_entity_key - assert set(payloads[2]['entity_data'].keys()) == set([ - '__entity_type__', 'email', 'first_name' - ]) - - -def test_state_collection(session, unique_name, user): - '''Session state collection holds correct entities.''' - # NOT_SET - user_a = session.create('User', {'username': unique_name}) - session.commit() - - # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' - - # MODIFIED - user_c = user - user_c['username'] = 'changed' - - # DELETED - user_d = session.create('User', {'username': unique_name}) - session.delete(user_d) - - assert session.created == [user_b] - assert session.modified == [user_c] - assert session.deleted == [user_d] - - -def test_get_entity_with_composite_primary_key(session, new_project): - '''Retrieve entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() - ) - - assert retrieved_entity == entity - - -def test_get_entity_with_incomplete_composite_primary_key(session, new_project): - '''Fail to retrieve entity using incomplete composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - with pytest.raises(ValueError): - new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values()[0] - ) - - -def test_populate_entity(session, new_user): - '''Populate entity that uses single primary key.''' - with session.auto_populating(False): - assert new_user['email'] is ftrack_api.symbol.NOT_SET - - session.populate(new_user, 'email') - assert new_user['email'] is not ftrack_api.symbol.NOT_SET - - -def test_populate_entities(session, unique_name): - '''Populate multiple entities that use single primary key.''' - users = [] - for index in range(3): - users.append( - session.create( - 'User', {'username': '{0}-{1}'.format(unique_name, index)} - ) - ) - - session.commit() - - with session.auto_populating(False): - for user in users: - assert user['email'] is ftrack_api.symbol.NOT_SET - - session.populate(users, 'email') - - for user in users: - assert user['email'] is not ftrack_api.symbol.NOT_SET - - -def test_populate_entity_with_composite_primary_key(session, new_project): - '''Populate entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() - ) - - # Manually change already populated remote value so can test it gets reset - # on populate call. - retrieved_entity.attributes.get('value').set_remote_value( - retrieved_entity, 'changed' - ) - - new_session.populate(retrieved_entity, 'value') - assert retrieved_entity['value'] == 'value' - - -@pytest.mark.parametrize('server_information, compatible', [ - ({}, False), - ({'version': '3.3.11'}, True), - ({'version': '3.3.12'}, True), - ({'version': '3.4'}, True), - ({'version': '3.4.1'}, True), - ({'version': '3.5.16'}, True), - ({'version': '3.3.10'}, False) -], ids=[ - 'No information', - 'Valid current version', - 'Valid higher version', - 'Valid higher version', - 'Valid higher version', - 'Valid higher version', - 'Invalid lower version' -]) -def test_check_server_compatibility( - server_information, compatible, session -): - '''Check server compatibility.''' - with mock.patch.dict( - session._server_information, server_information, clear=True - ): - if compatible: - session.check_server_compatibility() - else: - with pytest.raises(ftrack_api.exception.ServerCompatibilityError): - session.check_server_compatibility() - - -def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): - '''Encode entity using "all" entity_attribute_strategy.''' - new_bar = mocked_schema_session.create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id' - } - ) - - new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42, - 'number': 12345678.9, - 'boolean': False, - 'date': arrow.get('2015-11-18 15:24:09'), - 'bars': [new_bar] - } - ) - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='all' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "bars": [{"__entity_type__": "Bar", "id": "bar_unique_id"}], - "boolean": false, - "date": {"__type__": "datetime", "value": "2015-11-18T15:24:09+00:00"}, - "id": "a_unique_id", - "integer": 42, - "number": 12345678.9, - "string": "abc"} - ''').replace('\n', '') - - -def test_encode_entity_using_only_set_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "set_only" entity_attribute_strategy.''' - new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - } - ) - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='set_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "id": "a_unique_id", - "integer": 42, - "string": "abc"} - ''').replace('\n', '') - - -def test_encode_computed_attribute_using_persisted_only_attributes_strategy( - mocked_schema_session -): - '''Encode computed attribute, "persisted_only" entity_attribute_strategy.''' - new_bar = mocked_schema_session._create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id', - 'computed_value': 'FOO' - }, - reconstructing=True - ) - - encoded = mocked_schema_session.encode( - new_bar, entity_attribute_strategy='persisted_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Bar", - "id": "bar_unique_id", - "name": "myBar"} - ''').replace('\n', '') - - -def test_encode_entity_using_only_modified_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "modified_only" entity_attribute_strategy.''' - new_foo = mocked_schema_session._create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - }, - reconstructing=True - ) - - new_foo['string'] = 'Modified' - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='modified_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "id": "a_unique_id", - "string": "Modified"} - ''').replace('\n', '') - - -def test_encode_entity_using_invalid_strategy(session, new_task): - '''Fail to encode entity using invalid strategy.''' - with pytest.raises(ValueError): - session.encode(new_task, entity_attribute_strategy='invalid') - - -def test_encode_operation_payload(session): - '''Encode operation payload.''' - sequence_component = session.create_component( - "/path/to/sequence.%d.jpg [1]", location=None - ) - file_component = sequence_component["members"][0] - - encoded = session.encode([ - ftrack_api.session.OperationPayload({ - 'action': 'create', - 'entity_data': { - '__entity_type__': u'FileComponent', - u'container': sequence_component, - 'id': file_component['id'] - }, - 'entity_key': [file_component['id']], - 'entity_type': u'FileComponent' - }), - ftrack_api.session.OperationPayload({ - 'action': 'update', - 'entity_data': { - '__entity_type__': u'SequenceComponent', - u'members': ftrack_api.collection.Collection( - sequence_component, - sequence_component.attributes.get('members'), - data=[file_component] - ) - }, - 'entity_key': [sequence_component['id']], - 'entity_type': u'SequenceComponent' - }) - ]) - - expected = textwrap.dedent(''' - [{{"action": "create", - "entity_data": {{"__entity_type__": "FileComponent", - "container": {{"__entity_type__": "SequenceComponent", - "id": "{0[id]}"}}, - "id": "{1[id]}"}}, - "entity_key": ["{1[id]}"], - "entity_type": "FileComponent"}}, - {{"action": "update", - "entity_data": {{"__entity_type__": "SequenceComponent", - "members": [{{"__entity_type__": "FileComponent", "id": "{1[id]}"}}]}}, - "entity_key": ["{0[id]}"], - "entity_type": "SequenceComponent"}}] - '''.format(sequence_component, file_component)).replace('\n', '') - - assert encoded == expected - - -def test_decode_partial_entity( - session, new_task -): - '''Decode partially encoded entity.''' - encoded = session.encode( - new_task, entity_attribute_strategy='set_only' - ) - - entity = session.decode(encoded) - - assert entity == new_task - assert entity is not new_task - - -def test_reset(mocker): - '''Reset session.''' - plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') - ) - session = ftrack_api.Session(plugin_paths=[plugin_path]) - - assert hasattr(session.types.get('User'), 'stub') - location = session.query('Location where name is "test.location"').one() - assert location.accessor is not ftrack_api.symbol.NOT_SET - - mocked_close = mocker.patch.object(session._request, 'close') - mocked_fetch = mocker.patch.object(session, '_load_schemas') - - session.reset() - - # Assert custom entity type maintained. - assert hasattr(session.types.get('User'), 'stub') - - # Assert location plugin re-configured. - location = session.query('Location where name is "test.location"').one() - assert location.accessor is not ftrack_api.symbol.NOT_SET - - # Assert connection not closed and no schema fetch issued. - assert not mocked_close.called - assert not mocked_fetch.called - - -def test_rollback_scalar_attribute_change(session, new_user): - '''Rollback scalar attribute change via session.''' - assert not session.recorded_operations - current_first_name = new_user['first_name'] - - new_user['first_name'] = 'NewName' - assert new_user['first_name'] == 'NewName' - assert session.recorded_operations - - session.rollback() - - assert not session.recorded_operations - assert new_user['first_name'] == current_first_name - - -def test_rollback_collection_attribute_change(session, new_user): - '''Rollback collection attribute change via session.''' - assert not session.recorded_operations - current_timelogs = new_user['timelogs'] - assert list(current_timelogs) == [] - - timelog = session.create('Timelog', {}) - new_user['timelogs'].append(timelog) - assert list(new_user['timelogs']) == [timelog] - assert session.recorded_operations - - session.rollback() - - assert not session.recorded_operations - assert list(new_user['timelogs']) == [] - - -def test_rollback_entity_creation(session): - '''Rollback entity creation via session.''' - assert not session.recorded_operations - - new_user = session.create('User') - assert session.recorded_operations - assert new_user in session.created - - session.rollback() - - assert not session.recorded_operations - assert new_user not in session.created - assert new_user not in session._local_cache.values() - - -def test_rollback_entity_deletion(session, new_user): - '''Rollback entity deletion via session.''' - assert not session.recorded_operations - - session.delete(new_user) - assert session.recorded_operations - assert new_user in session.deleted - - session.rollback() - assert not session.recorded_operations - assert new_user not in session.deleted - assert new_user in session._local_cache.values() - - -# Caching -# ------------------------------------------------------------------------------ - - -def test_get_entity_bypassing_cache(session, user, mocker): - '''Retrieve an entity by type and id bypassing cache.''' - mocker.patch.object(session, 'call', wraps=session.call) - - session.cache.remove( - session.cache_key_maker.key(ftrack_api.inspection.identity(user)) - ) - - matching = session.get(*ftrack_api.inspection.identity(user)) - - # Check a different instance returned. - assert matching is not user - - # Check instances have the same identity. - assert matching == user - - # Check cache was bypassed and server was called. - assert session.call.called - - -def test_get_entity_from_cache(cache, task, mocker): - '''Retrieve an entity by type and id from cache.''' - session = ftrack_api.Session(cache=cache) - - # Prepare cache. - session.merge(task) - - # Disable server calls. - mocker.patch.object(session, 'call') - - # Retrieve entity from cache. - entity = session.get(*ftrack_api.inspection.identity(task)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == task - assert entity is not task - - # Check that no call was made to server. - assert not session.call.called - - -def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): - '''Retrieve an entity tree from cache.''' - session = ftrack_api.Session(cache=cache) - - # Prepare cache. - # TODO: Maybe cache should be prepopulated for a better check here. - session.query( - 'select children, children.children, children.children.children, ' - 'children.children.children.assignments, ' - 'children.children.children.assignments.resource ' - 'from Project where id is "{0}"' - .format(new_project_tree['id']) - ).one() - - # Disable server calls. - mocker.patch.object(session, 'call') - - # Retrieve entity from cache. - entity = session.get(*ftrack_api.inspection.identity(new_project_tree)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == new_project_tree - assert entity is not new_project_tree - - # Check tree. - with session.auto_populating(False): - for sequence in entity['children']: - for shot in sequence['children']: - for task in shot['children']: - assignments = task['assignments'] - for assignment in assignments: - resource = assignment['resource'] - - assert resource is not ftrack_api.symbol.NOT_SET - - # Check that no call was made to server. - assert not session.call.called - - -def test_get_metadata_from_cache(session, mocker, cache, new_task): - '''Retrieve an entity along with its metadata from cache.''' - new_task['metadata']['key'] = 'value' - session.commit() - - fresh_session = ftrack_api.Session(cache=cache) - - # Prepare cache. - fresh_session.query( - 'select metadata.key, metadata.value from ' - 'Task where id is "{0}"' - .format(new_task['id']) - ).all() - - # Disable server calls. - mocker.patch.object(fresh_session, 'call') - - # Retrieve entity from cache. - entity = fresh_session.get(*ftrack_api.inspection.identity(new_task)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == new_task - assert entity is not new_task - - # Check metadata cached correctly. - with fresh_session.auto_populating(False): - metadata = entity['metadata'] - assert metadata['key'] == 'value' - - assert not fresh_session.call.called - - -def test_merge_circular_reference(cache, temporary_file): - '''Merge circular reference into cache.''' - session = ftrack_api.Session(cache=cache) - # The following will test the condition as a FileComponent will be created - # with corresponding ComponentLocation. The server will return the file - # component data with the component location embedded. The component - # location will in turn have an embedded reference to the file component. - # If the merge does not prioritise the primary keys of the instance then - # any cache that relies on using the identity of the file component will - # fail. - component = session.create_component(path=temporary_file) - assert component - - -def test_create_with_selective_cache(session): - '''Create entity does not store entity in selective cache.''' - cache = ftrack_api.cache.MemoryCache() - session.cache.caches.append(SelectiveCache(cache)) - try: - user = session.create('User', {'username': 'martin'}) - cache_key = session.cache_key_maker.key( - ftrack_api.inspection.identity(user) - ) - - with pytest.raises(KeyError): - cache.get(cache_key) - - finally: - session.cache.caches.pop() - - -def test_correct_file_type_on_sequence_component(session): - '''Create sequence component with correct file type.''' - path = '/path/to/image/sequence.%04d.dpx [1-10]' - sequence_component = session.create_component(path) - - assert sequence_component['file_type'] == '.dpx' - - -def test_read_schemas_from_cache( - session, temporary_valid_schema_cache -): - '''Read valid content from schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' - - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) - - assert expected_hash == hash_ - - -def test_fail_to_read_schemas_from_invalid_cache( - session, temporary_invalid_schema_cache -): - '''Fail to read invalid content from schema cache.''' - with pytest.raises(ValueError): - session._read_schemas_from_cache( - temporary_invalid_schema_cache - ) - - -def test_write_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Write valid content to schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' - schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - - session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) - - assert expected_hash == hash_ - - -def test_fail_to_write_invalid_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Fail to write invalid content to schema cache.''' - # Datetime not serialisable by default. - invalid_content = datetime.datetime.now() - - with pytest.raises(TypeError): - session._write_schemas_to_cache( - invalid_content, temporary_valid_schema_cache - ) - - -def test_load_schemas_from_valid_cache( - mocker, session, temporary_valid_schema_cache, mocked_schemas -): - '''Load schemas from cache.''' - expected_schemas = session._load_schemas(temporary_valid_schema_cache) - - mocked = mocker.patch.object(session, 'call') - schemas = session._load_schemas(temporary_valid_schema_cache) - - assert schemas == expected_schemas - assert not mocked.called - - -def test_load_schemas_from_server_when_cache_invalid( - mocker, session, temporary_invalid_schema_cache -): - '''Load schemas from server when cache invalid.''' - mocked = mocker.patch.object(session, 'call', wraps=session.call) - - session._load_schemas(temporary_invalid_schema_cache) - assert mocked.called - - -def test_load_schemas_from_server_when_cache_outdated( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas from server when cache outdated.''' - schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - schemas.append({ - 'id': 'NewTest' - }) - session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - - mocked = mocker.patch.object(session, 'call', wraps=session.call) - session._load_schemas(temporary_valid_schema_cache) - - assert mocked.called - - -def test_load_schemas_from_server_not_reporting_schema_hash( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas from server when server does not report schema hash.''' - mocked_write = mocker.patch.object( - session, '_write_schemas_to_cache', - wraps=session._write_schemas_to_cache - ) - - server_information = session._server_information.copy() - server_information.pop('schema_hash') - mocker.patch.object( - session, '_server_information', new=server_information - ) - - session._load_schemas(temporary_valid_schema_cache) - - # Cache still written even if hash not reported. - assert mocked_write.called - - mocked = mocker.patch.object(session, 'call', wraps=session.call) - session._load_schemas(temporary_valid_schema_cache) - - # No hash reported by server so cache should have been bypassed. - assert mocked.called - - -def test_load_schemas_bypassing_cache( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas bypassing cache when set to False.''' - with mocker.patch.object(session, 'call', wraps=session.call): - - session._load_schemas(temporary_valid_schema_cache) - assert session.call.call_count == 1 - - session._load_schemas(False) - assert session.call.call_count == 2 - - -def test_get_tasks_widget_url(session): - '''Tasks widget URL returns valid HTTP status.''' - url = session.get_widget_url('tasks') - response = requests.get(url) - response.raise_for_status() - - -def test_get_info_widget_url(session, task): - '''Info widget URL for *task* returns valid HTTP status.''' - url = session.get_widget_url('info', entity=task, theme='light') - response = requests.get(url) - response.raise_for_status() - - -def test_encode_media_from_path(session, video_path): - '''Encode media based on a file path.''' - job = session.encode_media(video_path) - - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'output' in job_data - assert 'source_component_id' in job_data - assert 'keep_original' in job_data and job_data['keep_original'] is False - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - assert 'format' in job_data['output'][0] - - -def test_encode_media_from_component(session, video_path): - '''Encode media based on a component.''' - location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - video_path, - location=location - ) - session.commit() - - job = session.encode_media(component) - - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'keep_original' in job_data and job_data['keep_original'] is True - - -def test_create_sequence_component_with_size(session, temporary_sequence): - '''Create a sequence component and verify that is has a size.''' - location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - temporary_sequence - ) - - assert component['size'] > 0 - - -def test_plugin_arguments(mocker): - '''Pass plugin arguments to plugin discovery mechanism.''' - mock = mocker.patch( - 'ftrack_api.plugin.discover' - ) - session = ftrack_api.Session( - plugin_paths=[], plugin_arguments={"test": "value"} - ) - assert mock.called - mock.assert_called_once_with([], [session], {"test": "value"}) - -def test_remote_reset(session, new_user): - '''Reset user api key.''' - key_1 = session.reset_remote( - 'api_key', entity=new_user - ) - - key_2 = session.reset_remote( - 'api_key', entity=new_user - ) - - - assert key_1 != key_2 - - -@pytest.mark.parametrize('attribute', [ - ('id',), - ('email',) - -], ids=[ - 'Fail resetting primary key', - 'Fail resetting attribute without default value', -]) -def test_fail_remote_reset(session, user, attribute): - '''Fail trying to rest invalid attributes.''' - - with pytest.raises(ftrack_api.exception.ServerError): - session.reset_remote( - attribute, user - ) - - -def test_close(session): - '''Close session.''' - assert session.closed is False - session.close() - assert session.closed is True - - -def test_close_already_closed_session(session): - '''Close session that is already closed.''' - session.close() - assert session.closed is True - session.close() - assert session.closed is True - - -def test_server_call_after_close(session): - '''Fail to issue calls to server after session closed.''' - session.close() - assert session.closed is True - - with pytest.raises(ftrack_api.exception.ConnectionClosedError): - session.query('User').first() - - -def test_context_manager(session): - '''Use session as context manager.''' - with session: - assert session.closed is False - - assert session.closed is True - - -def test_delayed_job(session): - '''Test the delayed_job action''' - - with pytest.raises(ValueError): - session.delayed_job( - 'DUMMY_JOB' - ) - - -@pytest.mark.skip(reason='No configured ldap server.') -def test_delayed_job_ldap_sync(session): - '''Test the a delayed_job ldap sync action''' - result = session.delayed_job( - ftrack_api.symbol.JOB_SYNC_USERS_LDAP - ) - - assert isinstance( - result, ftrack_api.entity.job.Job - ) - - -def test_query_nested_custom_attributes(session, new_asset_version): - '''Query custom attributes nested and update a value and query again. - - This test will query custom attributes via 2 relations, then update the - value in one API session and read it back in another to verify that it gets - the new value. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - # Read the version via a relation in both sessions. - def get_versions(sessions): - versions = [] - for _session in sessions: - asset = _session.query( - 'select versions.custom_attributes from Asset where id is "{0}"'.format( - new_asset_version.get('asset_id') - ) - ).first() - - for version in asset['versions']: - if version.get('id') == new_asset_version.get('id'): - versions.append(version) - - return versions - - # Get version from both sessions. - versions = get_versions((session_one, session_two)) - - # Read attribute for both sessions. - for version in versions: - version['custom_attributes']['versiontest'] - - # Set attribute on session_one. - versions[0]['custom_attributes']['versiontest'] = random.randint( - 0, 99999 - ) - - session.commit() - - # Read version from server for session_two. - session_two_version = get_versions((session_two, ))[0] - - # Verify that value in session 2 is the same as set and committed in - # session 1. - assert ( - session_two_version['custom_attributes']['versiontest'] == - versions[0]['custom_attributes']['versiontest'] - ) - - -def test_query_nested(session): - '''Query components nested and update a value and query again. - - This test will query components via 2 relations, then update the - value in one API session and read it back in another to verify that it gets - the new value. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - query = ( - 'select versions.components.name from Asset where id is ' - '"12939d0c-6766-11e1-8104-f23c91df25eb"' - ) - - def get_version(session): - '''Return the test version from *session*.''' - asset = session.query(query).first() - asset_version = None - for version in asset['versions']: - if version['version'] == 8: - asset_version = version - break - - return asset_version - - asset_version = get_version(session_one) - asset_version2 = get_version(session_two) - - # This assert is not needed, but reading the collections are to ensure they - # are inflated. - assert ( - asset_version2['components'][0]['name'] == - asset_version['components'][0]['name'] - ) - - asset_version['components'][0]['name'] = str(uuid.uuid4()) - - session.commit() - - asset_version2 = get_version(session_two) - - assert ( - asset_version['components'][0]['name'] == - asset_version2['components'][0]['name'] - ) - - -def test_merge_iterations(session, mocker, project): - '''Ensure merge does not happen to many times when querying.''' - mocker.spy(session, '_merge') - - session.query( - 'select status from Task where project_id is {} limit 10'.format( - project['id'] - ) - ).all() - - assert session._merge.call_count < 75 - - -@pytest.mark.parametrize( - 'get_versions', - [ - lambda component, asset_version, asset: component['version']['asset']['versions'], - lambda component, asset_version, asset: asset_version['asset']['versions'], - lambda component, asset_version, asset: asset['versions'], - ], - ids=[ - 'from_component', - 'from_asset_version', - 'from_asset', - ] -) -def test_query_nested2(session, get_versions): - '''Query version.asset.versions from component and then add new version. - - This test will query versions via multiple relations and ensure a new - version appears when added to a different session and then is queried - again. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - # Get a random component that is linked to a version and asset. - component_id = session_two.query( - 'FileComponent where version.asset_id != None' - ).first()['id'] - - query = ( - 'select version.asset.versions from Component where id is "{}"'.format( - component_id - ) - ) - - component = session_one.query(query).one() - asset_version = component['version'] - asset = component['version']['asset'] - versions = component['version']['asset']['versions'] - length = len(versions) - - session_two.create('AssetVersion', { - 'asset_id': asset['id'] - }) - - session_two.commit() - - component = session_one.query(query).one() - versions = get_versions(component, asset_version, asset) - new_length = len(versions) - - assert length + 1 == new_length - - -def test_session_ready_reset_events(mocker): - '''Session ready and reset events.''' - plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') - ) - session = ftrack_api.Session(plugin_paths=[plugin_path]) - - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 0 - - session.reset() - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 1 - - -def test_entity_reference(mocker, session): - '''Return entity reference that uniquely identifies entity.''' - mock_entity = mocker.Mock(entity_type="MockEntityType") - mock_auto_populating = mocker.patch.object(session, "auto_populating") - mock_primary_key = mocker.patch( - "ftrack_api.inspection.primary_key", return_value={"id": "mock-id"} - ) - - reference = session.entity_reference(mock_entity) - - assert reference == { - "__entity_type__": "MockEntityType", - "id": "mock-id" - } - - mock_auto_populating.assert_called_once_with(False) - mock_primary_key.assert_called_once_with(mock_entity) - - -def test__entity_reference(mocker, session): - '''Act as alias to entity_reference.''' - mock_entity = mocker.Mock(entity_type="MockEntityType") - mock_entity_reference = mocker.patch.object(session, "entity_reference") - mocker.patch("warnings.warn") - - session._entity_reference(mock_entity) - - mock_entity_reference.assert_called_once_with(mock_entity) - - -def test__entity_reference_issues_deprecation_warning(mocker, session): - '''Issue deprecation warning for usage of _entity_reference.''' - mocker.patch.object(session, "entity_reference") - mock_warn = mocker.patch("warnings.warn") - - session._entity_reference({}) - - mock_warn.assert_called_once_with( - ( - "Session._entity_reference is now available as public method " - "Session.entity_reference. The private method will be removed " - "in version 2.0." - ), - PendingDeprecationWarning - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py deleted file mode 100644 index cf8b014ee59..00000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest -import ftrack_api.exception - - -def test_manually_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - session.create('Timer', { - 'user': new_user - }) - - session.commit() - - with pytest.raises(ftrack_api.exception.ServerError): - session.create('Timer', { - 'user': new_user - }) - - session.commit() - - session.reset() - - -def test_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - new_user.start_timer() - - with pytest.raises(ftrack_api.exception.NotUniqueError): - new_user.start_timer() - - session.reset() - - -def test_start_and_stop_a_timer(session, new_user, new_task): - '''Start a new timer and stop it to create a timelog.''' - new_user.start_timer(new_task) - - new_user.stop_timer() - - timelog = session.query( - 'Timelog where context_id = "{0}"'.format(new_task['id']) - ).one() - - assert timelog['user_id'] == new_user['id'], 'User id is correct.' - assert timelog['context_id'] == new_task['id'], 'Task id is correct.' - - -def test_start_a_timer_when_timer_is_running(session, new_user, new_task): - '''Start a timer when an existing timer is already running.''' - new_user.start_timer(new_task) - - # Create the second timer without context. - new_user.start_timer(force=True) - - # There should be only one existing timelog for this user. - timelogs = session.query( - 'Timelog where user_id = "{0}"'.format(new_user['id']) - ).all() - assert len(timelogs) == 1, 'One timelog exists.' - - timelog = session.query( - 'Timer where user_id = "{0}"'.format(new_user['id']) - ).one() - - # Make sure running timer has no context. - assert timelog['context_id'] is None, 'Timer does not have a context.' - - -def test_stop_timer_without_timer_running(session, new_user): - '''Stop a timer when no timer is running.''' - with pytest.raises(ftrack_api.exception.NoResultFoundError): - new_user.stop_timer() From e9cdcc5fafe513c7ad8c1565a23b3c41988dbffb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 29 Jul 2021 11:16:18 +0200 Subject: [PATCH 55/77] added ftrack submodules to right folder --- .gitmodules | 10 +++++----- .../default_modules/ftrack/python2_vendor/arrow | 1 + .../ftrack/python2_vendor/ftrack-python-api | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) create mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/arrow create mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api diff --git a/.gitmodules b/.gitmodules index 52f2fc07508..82fd194d26f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,9 +4,9 @@ [submodule "repos/avalon-unreal-integration"] path = repos/avalon-unreal-integration url = https://github.com/pypeclub/avalon-unreal-integration.git -[submodule "openpype/modules/ftrack/python2_vendor/ftrack-python-api"] - path = openpype/modules/ftrack/python2_vendor/ftrack-python-api +[submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"] + path = openpype/modules/default_modules/ftrack/python2_vendor/arrow + url = git@github.com:arrow-py/arrow.git +[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"] + path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api url = https://bitbucket.org/ftrack/ftrack-python-api.git -[submodule "openpype/modules/ftrack/python2_vendor/arrow"] - path = openpype/modules/ftrack/python2_vendor/arrow - url = https://github.com/arrow-py/arrow.git \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow b/openpype/modules/default_modules/ftrack/python2_vendor/arrow new file mode 160000 index 00000000000..b746fedf728 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow @@ -0,0 +1 @@ +Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api new file mode 160000 index 00000000000..d277f474ab0 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api @@ -0,0 +1 @@ +Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 9d456283bf68ff91ac5b6b7d3d999e96a0114998 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 29 Jul 2021 11:47:09 +0200 Subject: [PATCH 56/77] hound fixes --- openpype/modules/base.py | 9 ++++++--- .../ftrack/ftrack_server/event_server_cli.py | 11 ++--------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index fc53d3b27a0..77d9ddbcecd 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -2,7 +2,6 @@ """Base class for Pype Modules.""" import os import sys -import types import time import inspect import logging @@ -366,12 +365,16 @@ def initialize_modules(self): not_implemented = [] for attr_name in dir(modules_item): attr = getattr(modules_item, attr_name, None) - if attr and getattr(attr, "__isabstractmethod__", None): + abs_method = getattr( + attr, "__isabstractmethod__", None + ) + if attr and abs_method: not_implemented.append(attr_name) # Log missing implementations self.log.warning(( - "Skipping abstract Class: {}. Missing implementations: {}" + "Skipping abstract Class: {}." + " Missing implementations: {}" ).format(name, ", ".join(not_implemented))) continue module_classes.append(modules_item) diff --git a/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py index 1e14929d96e..d8e4d05580d 100644 --- a/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py @@ -19,15 +19,8 @@ OpenPypeMongoConnection ) from openpype_modules.ftrack import FTRACK_MODULE_DIR -from openpype_modules.ftrack.lib import ( - credentials, - get_ftrack_url_from_settings -) -from openpype_modules.ftrack.ftrack_server.lib import ( - check_ftrack_url, - get_ftrack_event_mongo_info -) - +from openpype_modules.ftrack.lib import credentials +from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url from openpype_modules.ftrack.ftrack_server import socket_thread From c2f48efe10203ce5fcf9015c30e616a5abb10388 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 18:31:50 +0200 Subject: [PATCH 57/77] renamed PypeModule to OpenPypeModule --- openpype/modules/__init__.py | 4 ++-- openpype/modules/base.py | 10 +++++----- .../modules/default_modules/avalon_apps/avalon_app.py | 4 ++-- .../default_modules/clockify/clockify_module.py | 4 ++-- .../default_modules/deadline/deadline_module.py | 4 ++-- .../modules/default_modules/ftrack/ftrack_module.py | 4 ++-- .../default_modules/idle_manager/idle_module.py | 4 ++-- openpype/modules/default_modules/launcher_action.py | 4 ++-- .../default_modules/log_viewer/log_view_module.py | 4 ++-- openpype/modules/default_modules/muster/muster.py | 4 ++-- .../modules/default_modules/project_manager_action.py | 4 ++-- .../default_modules/settings_module/settings_action.py | 6 +++--- openpype/modules/default_modules/slack/slack_module.py | 4 ++-- .../default_modules/standalonepublish_action.py | 4 ++-- .../default_modules/sync_server/sync_server_module.py | 4 ++-- .../default_modules/timers_manager/timers_manager.py | 4 ++-- .../default_modules/webserver/webserver_module.py | 4 ++-- 17 files changed, 38 insertions(+), 38 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 3ad9a751611..81853faa385 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from .base import ( - PypeModule, + OpenPypeModule, OpenPypeInterface, ModulesManager, TrayModulesManager @@ -8,7 +8,7 @@ __all__ = ( - "PypeModule", + "OpenPypeModule", "OpenPypeInterface", "ModulesManager", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 77d9ddbcecd..1f8fa6ae258 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -244,7 +244,7 @@ class MissingInteface(OpenPypeInterface): @six.add_metaclass(ABCMeta) -class PypeModule: +class OpenPypeModule: """Base class of pype module. Attributes: @@ -299,7 +299,7 @@ def get_global_environments(self): return {} -class OpenPypeAddOn(PypeModule): +class OpenPypeAddOn(OpenPypeModule): pass @@ -351,11 +351,11 @@ def initialize_modules(self): for name in dir(module): modules_item = getattr(module, name, None) # Filter globals that are not classes which inherit from - # PypeModule + # OpenPypeModule if ( not inspect.isclass(modules_item) - or modules_item is PypeModule - or not issubclass(modules_item, PypeModule) + or modules_item is OpenPypeModule + or not issubclass(modules_item, OpenPypeModule) ): continue diff --git a/openpype/modules/default_modules/avalon_apps/avalon_app.py b/openpype/modules/default_modules/avalon_apps/avalon_app.py index 7f130bfab1d..53e06ec90ac 100644 --- a/openpype/modules/default_modules/avalon_apps/avalon_app.py +++ b/openpype/modules/default_modules/avalon_apps/avalon_app.py @@ -1,14 +1,14 @@ import os import openpype from openpype import resources -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) -class AvalonModule(PypeModule, ITrayModule, IWebServerRoutes): +class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes): name = "avalon" def initialize(self, modules_settings): diff --git a/openpype/modules/default_modules/clockify/clockify_module.py b/openpype/modules/default_modules/clockify/clockify_module.py index 83f8d07c3aa..a9e989f4ec3 100644 --- a/openpype/modules/default_modules/clockify/clockify_module.py +++ b/openpype/modules/default_modules/clockify/clockify_module.py @@ -7,7 +7,7 @@ CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH ) -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IPluginPaths, @@ -17,7 +17,7 @@ class ClockifyModule( - PypeModule, + OpenPypeModule, ITrayModule, IPluginPaths, IFtrackEventHandlerPaths, diff --git a/openpype/modules/default_modules/deadline/deadline_module.py b/openpype/modules/default_modules/deadline/deadline_module.py index 47fd4e9656e..a5e189ee526 100644 --- a/openpype/modules/default_modules/deadline/deadline_module.py +++ b/openpype/modules/default_modules/deadline/deadline_module.py @@ -1,9 +1,9 @@ import os -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths -class DeadlineModule(PypeModule, IPluginPaths): +class DeadlineModule(OpenPypeModule, IPluginPaths): name = "deadline" def initialize(self, modules_settings): diff --git a/openpype/modules/default_modules/ftrack/ftrack_module.py b/openpype/modules/default_modules/ftrack/ftrack_module.py index 6fd27372617..1de152535cd 100644 --- a/openpype/modules/default_modules/ftrack/ftrack_module.py +++ b/openpype/modules/default_modules/ftrack/ftrack_module.py @@ -2,7 +2,7 @@ import json import collections import openpype -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, @@ -18,7 +18,7 @@ class FtrackModule( - PypeModule, + OpenPypeModule, ITrayModule, IPluginPaths, ITimersManager, diff --git a/openpype/modules/default_modules/idle_manager/idle_module.py b/openpype/modules/default_modules/idle_manager/idle_module.py index d669fcb90e1..1a6d71a961b 100644 --- a/openpype/modules/default_modules/idle_manager/idle_module.py +++ b/openpype/modules/default_modules/idle_manager/idle_module.py @@ -1,14 +1,14 @@ import platform import collections -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, IIdleManager ) -class IdleManager(PypeModule, ITrayService): +class IdleManager(OpenPypeModule, ITrayService): """ Measure user's idle time in seconds. Idle time resets on keyboard/mouse input. Is able to emit signals at specific time idle. diff --git a/openpype/modules/default_modules/launcher_action.py b/openpype/modules/default_modules/launcher_action.py index 728143ffacd..e3252e38428 100644 --- a/openpype/modules/default_modules/launcher_action.py +++ b/openpype/modules/default_modules/launcher_action.py @@ -1,8 +1,8 @@ -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class LauncherAction(PypeModule, ITrayAction): +class LauncherAction(OpenPypeModule, ITrayAction): label = "Launcher" name = "launcher_tool" diff --git a/openpype/modules/default_modules/log_viewer/log_view_module.py b/openpype/modules/default_modules/log_viewer/log_view_module.py index 22826d8a540..bc1a98f4adf 100644 --- a/openpype/modules/default_modules/log_viewer/log_view_module.py +++ b/openpype/modules/default_modules/log_viewer/log_view_module.py @@ -1,9 +1,9 @@ from openpype.api import Logger -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule -class LogViewModule(PypeModule, ITrayModule): +class LogViewModule(OpenPypeModule, ITrayModule): name = "log_viewer" def initialize(self, modules_settings): diff --git a/openpype/modules/default_modules/muster/muster.py b/openpype/modules/default_modules/muster/muster.py index 164f20054a5..a0e72006af8 100644 --- a/openpype/modules/default_modules/muster/muster.py +++ b/openpype/modules/default_modules/muster/muster.py @@ -2,14 +2,14 @@ import json import appdirs import requests -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) -class MusterModule(PypeModule, ITrayModule, IWebServerRoutes): +class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes): """ Module handling Muster Render credentials. This will display dialog asking for user credentials for Muster if not already specified. diff --git a/openpype/modules/default_modules/project_manager_action.py b/openpype/modules/default_modules/project_manager_action.py index 9a36d973b37..c1f984a8cbf 100644 --- a/openpype/modules/default_modules/project_manager_action.py +++ b/openpype/modules/default_modules/project_manager_action.py @@ -1,8 +1,8 @@ -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class ProjectManagerAction(PypeModule, ITrayAction): +class ProjectManagerAction(OpenPypeModule, ITrayAction): label = "Project Manager (beta)" name = "project_manager" admin_action = True diff --git a/openpype/modules/default_modules/settings_module/settings_action.py b/openpype/modules/default_modules/settings_module/settings_action.py index a6909e1fdf2..7140c57bab7 100644 --- a/openpype/modules/default_modules/settings_module/settings_action.py +++ b/openpype/modules/default_modules/settings_module/settings_action.py @@ -1,8 +1,8 @@ -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class SettingsAction(PypeModule, ITrayAction): +class SettingsAction(OpenPypeModule, ITrayAction): """Action to show Setttings tool.""" name = "settings" label = "Studio Settings" @@ -71,7 +71,7 @@ def show_settings_window(self): self.settings_window.reset() -class LocalSettingsAction(PypeModule, ITrayAction): +class LocalSettingsAction(OpenPypeModule, ITrayAction): """Action to show Setttings tool.""" name = "local_settings" label = "Settings" diff --git a/openpype/modules/default_modules/slack/slack_module.py b/openpype/modules/default_modules/slack/slack_module.py index 8e6ac100370..e3f7b4ad19c 100644 --- a/openpype/modules/default_modules/slack/slack_module.py +++ b/openpype/modules/default_modules/slack/slack_module.py @@ -1,5 +1,5 @@ import os -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( IPluginPaths, ILaunchHookPaths @@ -8,7 +8,7 @@ SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class SlackIntegrationModule(PypeModule, IPluginPaths, ILaunchHookPaths): +class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): """Allows sending notification to Slack channels during publishing.""" name = "slack" diff --git a/openpype/modules/default_modules/standalonepublish_action.py b/openpype/modules/default_modules/standalonepublish_action.py index 53319f9e113..9321a415a9a 100644 --- a/openpype/modules/default_modules/standalonepublish_action.py +++ b/openpype/modules/default_modules/standalonepublish_action.py @@ -2,11 +2,11 @@ import platform import subprocess from openpype.lib import get_pype_execute_args -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class StandAlonePublishAction(PypeModule, ITrayAction): +class StandAlonePublishAction(OpenPypeModule, ITrayAction): label = "Publish" name = "standalonepublish_tool" diff --git a/openpype/modules/default_modules/sync_server/sync_server_module.py b/openpype/modules/default_modules/sync_server/sync_server_module.py index 63f39474b1e..e65a410551c 100644 --- a/openpype/modules/default_modules/sync_server/sync_server_module.py +++ b/openpype/modules/default_modules/sync_server/sync_server_module.py @@ -7,7 +7,7 @@ from avalon.api import AvalonMongoDB -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule from openpype.api import ( Anatomy, @@ -29,7 +29,7 @@ log = PypeLogger().get_logger("SyncServer") -class SyncServerModule(PypeModule, ITrayModule): +class SyncServerModule(OpenPypeModule, ITrayModule): """ Synchronization server that is syncing published files from local to any of implemented providers (like GDrive, S3 etc.) diff --git a/openpype/modules/default_modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py index b31e14209a0..d7dfe390a05 100644 --- a/openpype/modules/default_modules/timers_manager/timers_manager.py +++ b/openpype/modules/default_modules/timers_manager/timers_manager.py @@ -1,6 +1,6 @@ import os import collections -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITimersManager, ITrayService, @@ -10,7 +10,7 @@ from avalon.api import AvalonMongoDB -class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): +class TimersManager(OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes): """ Handles about Timers. Should be able to start/stop all timers at once. diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py index f81bf52410e..ff3456f903b 100644 --- a/openpype/modules/default_modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -2,14 +2,14 @@ import socket from openpype import resources -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, IWebServerRoutes ) -class WebServerModule(PypeModule, ITrayService): +class WebServerModule(OpenPypeModule, ITrayService): name = "webserver" label = "WebServer" From c4869abd568886b3241d0ba5eb57a6f514dbe4e3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 18:31:57 +0200 Subject: [PATCH 58/77] update readme a littlebit --- openpype/modules/README.md | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/modules/README.md b/openpype/modules/README.md index 818375461fa..d54ba7c835a 100644 --- a/openpype/modules/README.md +++ b/openpype/modules/README.md @@ -1,7 +1,7 @@ -# Pype modules -Pype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering. +# OpenPype modules +OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or special plugins. -## Base class `PypeModule` +## Base class `OpenPypeModule` - abstract class as base for each module - implementation should be module's api withou GUI parts - may implement `get_global_environments` method which should return dictionary of environments that are globally appliable and value is the same for whole studio if launched at any workstation (except os specific paths) @@ -17,6 +17,15 @@ Pype modules should contain separated logic of specific kind of implementation, - interface is class that has defined abstract methods to implement and may contain preimplemented helper methods - module that inherit from an interface must implement those abstract methods otherwise won't be initialized - it is easy to find which module object inherited from which interfaces withh 100% chance they have implemented required methods +- interfaces can be defined in `interfaces.py` inside module directory + - the file can't use relative imports or import anything from other parts + of module itself at the header of file + +## Base class `OpenPypeInterface` +- has nothing implemented +- has ABCMeta as metaclass +- is defined to be able find out classes which inherit from this base to be + able tell this is an Interface ## Global interfaces - few interfaces are implemented for global usage @@ -70,7 +79,7 @@ Pype modules should contain separated logic of specific kind of implementation, - Clockify has more inharitance it's class definition looks like ``` class ClockifyModule( - PypeModule, # Says it's Pype module so ModulesManager will try to initialize. + OpenPypeModule, # Says it's Pype module so ModulesManager will try to initialize. ITrayModule, # Says has special implementation when used in tray. IPluginPaths, # Says has plugin paths that want to register (paths to clockify actions for launcher). IFtrackEventHandlerPaths, # Says has Ftrack actions/events for user/server. From b8d25956ac8e483f4f26e4643fa1984e7d7358af Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 18:42:05 +0200 Subject: [PATCH 59/77] fix formatting --- .../modules/default_modules/timers_manager/timers_manager.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py index d7dfe390a05..80f448095f6 100644 --- a/openpype/modules/default_modules/timers_manager/timers_manager.py +++ b/openpype/modules/default_modules/timers_manager/timers_manager.py @@ -10,7 +10,9 @@ from avalon.api import AvalonMongoDB -class TimersManager(OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes): +class TimersManager( + OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes +): """ Handles about Timers. Should be able to start/stop all timers at once. From 70393b6772f8ccc1a490ec47a3cc247b38efb50b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:00:52 +0200 Subject: [PATCH 60/77] added thread locks on loading functions --- openpype/modules/base.py | 47 ++++++++++++++++++++++++++++++++-------- 1 file changed, 38 insertions(+), 9 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1f8fa6ae258..c9771b60e4f 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -5,6 +5,7 @@ import time import inspect import logging +import threading import collections from uuid import uuid4 from abc import ABCMeta, abstractmethod @@ -84,6 +85,13 @@ def __getattr__(self, attr_name): return self.__attributes__[attr_name] +class _LoadCache: + interfaces_lock = threading.Lock() + modules_lock = threading.Lock() + interfaces_loaded = False + modules_loaded = False + + def get_default_modules_dir(): current_dir = os.path.abspath(os.path.dirname(__file__)) @@ -98,13 +106,26 @@ def get_module_dirs(): def load_interfaces(force=False): - if not force and "openpype_interfaces" in sys.modules: + if _LoadCache.interfaces_loaded and not force: return + if not _LoadCache.interfaces_lock.locked(): + with _LoadCache.interfaces_lock: + _load_interfaces() + _LoadCache.interfaces_loaded = True + else: + # If lock is locked wait until is finished + while _LoadCache.interfaces_lock.locked(): + time.sleep(0.1) + + +def _load_interfaces(): from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = ( - _InterfacesClass("openpype_interfaces") + modules_key = "openpype_interfaces" + + sys.modules[modules_key] = openpype_interfaces = ( + _InterfacesClass(modules_key) ) log = PypeLogger.get_logger("InterfacesLoader") @@ -156,25 +177,33 @@ def load_interfaces(force=False): def load_modules(force=False): - # TODO add thread lock + if _LoadCache.modules_loaded and not force: + return # First load interfaces # - modules must not be imported before interfaces load_interfaces(force) - # Key under which will be modules imported in `sys.modules` - modules_key = "openpype_modules" + if not _LoadCache.modules_lock.locked(): + with _LoadCache.modules_lock: + _load_modules() + _LoadCache.modules_loaded = True + else: + # If lock is locked wait until is finished + while _LoadCache.modules_lock.locked(): + time.sleep(0.1) - # Check if are modules already loaded or no - if not force and modules_key in sys.modules: - return +def _load_modules(): # Import helper functions from lib from openpype.lib import ( import_filepath, import_module_from_dirpath ) + # Key under which will be modules imported in `sys.modules` + modules_key = "openpype_modules" + # Change `sys.modules` sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) From 611346bf839840d1823a7d28c1a73558dd2550b7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:01:09 +0200 Subject: [PATCH 61/77] added logger to module class --- openpype/modules/base.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c9771b60e4f..29fdd9c8dfd 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -31,6 +31,8 @@ def __init__(self, name): super(_ModuleClass, self).__setattr__("__attributes__", dict()) super(_ModuleClass, self).__setattr__("__defaults__", set()) + super(_ModuleClass, self).__setattr__("_log", None) + def __getattr__(self, attr_name): if attr_name not in self.__attributes__: if attr_name in ("__path__"): @@ -45,6 +47,12 @@ def __iter__(self): yield module def __setattr__(self, attr_name, value): + if attr_name in self.__attributes__: + self.log.warning( + "Duplicated name \"{}\" in {}. Overriding.".format( + self.name, attr_name + ) + ) self.__attributes__[attr_name] = value def __setitem__(self, key, value): @@ -53,6 +61,14 @@ def __setitem__(self, key, value): def __getitem__(self, key): return getattr(self, key) + @property + def log(self): + if self._log is None: + super(_ModuleClass, self).__setattr__( + "_log", PypeLogger.get_logger(self.name) + ) + return self._log + def get(self, key, default=None): return self.__attributes__.get(key, default) From aedbded534da2c2d1d9b9c5fe57e2ab2ae2e22b9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:01:19 +0200 Subject: [PATCH 62/77] added few docstrings --- openpype/modules/base.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 29fdd9c8dfd..6c2eae332fc 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -109,12 +109,14 @@ class _LoadCache: def get_default_modules_dir(): + """Path to default OpenPype modules.""" current_dir = os.path.abspath(os.path.dirname(__file__)) return os.path.join(current_dir, "default_modules") def get_module_dirs(): + """List of paths where OpenPype modules can be found.""" dirpaths = [ get_default_modules_dir() ] @@ -122,6 +124,15 @@ def get_module_dirs(): def load_interfaces(force=False): + """Load interfaces from modules into `openpype_interfaces`. + + Only classes which inherit from `OpenPypeInterface` are loaded and stored. + + Args: + force(bool): Force to load interfaces even if are already loaded. + This won't update already loaded and used (cached) interfaces. + """ + if _LoadCache.interfaces_loaded and not force: return @@ -136,6 +147,7 @@ def load_interfaces(force=False): def _load_interfaces(): + # Key under which will be modules imported in `sys.modules` from openpype.lib import import_filepath modules_key = "openpype_interfaces" @@ -193,6 +205,22 @@ def _load_interfaces(): def load_modules(force=False): + """Load OpenPype modules as python modules. + + Modules does not load only classes (like in Interfaces) because there must + be ability to use inner code of module and be able to import it from one + defined place. + + With this it is possible to import module's content from predefined module. + + Function makes sure that `load_interfaces` was triggered. Modules import + has specific order which can't be changed. + + Args: + force(bool): Force to load modules even if are already loaded. + This won't update already loaded and used (cached) modules. + """ + if _LoadCache.modules_loaded and not force: return From c0f669a4b10eda2f5c1f4a5334a17565d38135ea Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:01:29 +0200 Subject: [PATCH 63/77] intrefaces has repr --- openpype/modules/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 6c2eae332fc..4ffc8cc1de1 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -293,6 +293,9 @@ class _OpenPypeInterfaceMeta(ABCMeta): def __str__(self): return "<'OpenPypeInterface.{}'>".format(self.__name__) + def __repr__(self): + return str(self) + @six.add_metaclass(_OpenPypeInterfaceMeta) class OpenPypeInterface: From cfabde66fb40f9de1c72207806f56e0f019ef3e0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:12:23 +0200 Subject: [PATCH 64/77] fixed double import of modules --- openpype/modules/base.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 4ffc8cc1de1..d43d5635d11 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -177,7 +177,6 @@ def _load_interfaces(): if os.path.exists(interfaces_path): interface_paths.append(interfaces_path) - # print(interface_paths) for full_path in interface_paths: if not os.path.exists(full_path): continue @@ -271,21 +270,14 @@ def _load_modules(): fullpath = os.path.join(dirpath, filename) basename, ext = os.path.splitext(filename) - module = None # TODO add more logic how to define if folder is module or not # - check manifest and content of manifest if os.path.isdir(fullpath): - module = import_module_from_dirpath( - dirpath, filename, modules_key - ) - module_name = filename + import_module_from_dirpath(dirpath, filename, modules_key) elif ext in (".py", ): module = import_filepath(fullpath) - module_name = basename - - if module is not None: - setattr(openpype_modules, module_name, module) + setattr(openpype_modules, basename, module) class _OpenPypeInterfaceMeta(ABCMeta): From 5b71c522a50336d7777c7785e09d9e337f5a9503 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:48:06 +0200 Subject: [PATCH 65/77] added missing function to init file --- openpype/modules/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 81853faa385..583480b0499 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,6 +2,9 @@ from .base import ( OpenPypeModule, OpenPypeInterface, + + load_modules, + ModulesManager, TrayModulesManager ) @@ -11,6 +14,8 @@ "OpenPypeModule", "OpenPypeInterface", + "load_modules", + "ModulesManager", "TrayModulesManager" ) From b6383ccb9afec966edd533e293f050160e23a9db Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:50:16 +0200 Subject: [PATCH 66/77] fixed conflict changes --- .../ftrack/event_handlers_user/action_where_run_ask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index 2c427cfff75..b4133fbe786 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -2,7 +2,7 @@ import socket import getpass -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ActionWhereIRun(BaseAction): From a44805ae36a378c11e7861f90b891914d91149b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:52:15 +0200 Subject: [PATCH 67/77] removed unused import --- .../ftrack/event_handlers_user/action_where_run_ask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index b4133fbe786..0d69913996d 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -2,7 +2,7 @@ import socket import getpass -from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction class ActionWhereIRun(BaseAction): From 1e50751d9def8c3db79bf3d900dfec0f5da21755 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Aug 2021 14:51:22 +0200 Subject: [PATCH 68/77] Changed missed import --- openpype/tools/tray_app/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/tray_app/app.py b/openpype/tools/tray_app/app.py index 339e6343f8d..03f83214643 100644 --- a/openpype/tools/tray_app/app.py +++ b/openpype/tools/tray_app/app.py @@ -9,7 +9,7 @@ from datetime import datetime from avalon import style -from openpype.modules.webserver import host_console_listener +from openpype_modules.webserver import host_console_listener from Qt import QtWidgets, QtCore From 976bc45a1e595273024a3711a6c3269f162247e8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Aug 2021 15:18:53 +0200 Subject: [PATCH 69/77] Changed missed imports --- .../clockify/ftrack/server/action_clockify_sync_server.py | 2 +- .../clockify/ftrack/user/action_clockify_sync_local.py | 2 +- .../default_modules/clockify/launcher_actions/ClockifyStart.py | 2 +- .../default_modules/clockify/launcher_actions/ClockifySync.py | 2 +- .../default_modules/slack/launch_hooks/pre_python2_vendor.py | 2 +- .../event_handlers_server/action_private_project_detection.py | 2 +- openpype/pype_commands.py | 2 +- openpype/settings/entities/enum_entity.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py index 8379414c0cf..c6b55947da6 100644 --- a/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py +++ b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py @@ -1,7 +1,7 @@ import os import json from openpype_modules.ftrack.lib import ServerAction -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI class SyncClocifyServer(ServerAction): diff --git a/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py index 3d55ee92b6f..a430791906c 100644 --- a/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py +++ b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py @@ -1,6 +1,6 @@ import json from openpype_modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI class SyncClocifyLocal(BaseAction): diff --git a/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py index c431ea240d7..db51964eb73 100644 --- a/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py +++ b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py @@ -1,6 +1,6 @@ from avalon import api, io from openpype.api import Logger -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI log = Logger().get_logger(__name__) diff --git a/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py index 1bb168a80be..02982d373a2 100644 --- a/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py +++ b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py @@ -1,5 +1,5 @@ from avalon import api, io -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI from openpype.api import Logger log = Logger().get_logger(__name__) diff --git a/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py index a2c1f8a9e06..0f4bc22a345 100644 --- a/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook -from openpype.modules.slack import SLACK_MODULE_DIR +from openpype_modules.slack import SLACK_MODULE_DIR class PrePython2Support(PreLaunchHook): diff --git a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py b/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py index 5213e10ba3d..62772740cd5 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py +++ b/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class PrivateProjectDetectionAction(ServerAction): diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 7c47d8c6139..978dcbc0d73 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -35,7 +35,7 @@ def launch_settings_gui(dev): @staticmethod def launch_eventservercli(*args): - from openpype.modules.ftrack.ftrack_server.event_server_cli import ( + from openpype_modules.ftrack.ftrack_server.event_server_cli import ( run_event_server ) return run_event_server(*args) diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 4f6a2886bcd..31ce96a0596 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -399,7 +399,7 @@ def _item_initalization(self): self.placeholder = None def _get_enum_values(self): - from openpype.modules.sync_server.providers import lib as lib_providers + from openpype_modules.sync_server.providers import lib as lib_providers providers = lib_providers.factory.providers From 3d4c18941a77efa6c78f1f69aa8b594f3c047e09 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 15:25:45 +0200 Subject: [PATCH 70/77] modified imports in comments --- .../ftrack/event_handlers_user/action_create_cust_attrs.py | 2 +- .../ftrack/plugins/publish/integrate_hierarchy_ftrack.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py index 599d2eb2572..3869d8ad08e 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -43,7 +43,7 @@ group (string) - name of group - - based on attribute `openpype.modules.ftrack.lib.CUST_ATTR_GROUP` + - based on attribute `openpype_modules.ftrack.lib.CUST_ATTR_GROUP` - "pype" by default *** Required *************************************************************** diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 2fd5296d24a..fbd64d9f703 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -4,7 +4,7 @@ import pyblish.api from avalon import io -# Copy of constant `openpype.modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` +# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" From 2bbb5e0fc10e67b0b3325bcd915a717249237f41 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 15:40:11 +0200 Subject: [PATCH 71/77] added a little bit readme info --- openpype/modules/README.md | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/openpype/modules/README.md b/openpype/modules/README.md index d54ba7c835a..a3733518acc 100644 --- a/openpype/modules/README.md +++ b/openpype/modules/README.md @@ -1,5 +1,17 @@ -# OpenPype modules -OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or special plugins. +# OpenPype modules/addons +OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or may contain only special plugins. Addons work the same way currently there is no difference in module and addon. + +## Modules concept +- modules and addons are dynamically imported to virtual python module `openpype_modules` from which it is possible to import them no matter where is the modulo located +- modules or addons should never be imported directly even if you know possible full import path + - it is because all of their content must be imported in specific order and should not be imported without defined functions as it may also break few implementation parts + +### TODOs +- add module/addon manifest + - definition of module (not 100% defined content e.g. minimum require OpenPype version etc.) + - defying that folder is content of a module or an addon +- module/addon have it's settings schemas and default values outside OpenPype +- add general setting of paths to modules ## Base class `OpenPypeModule` - abstract class as base for each module @@ -20,6 +32,7 @@ OpenPype modules should contain separated logic of specific kind of implementati - interfaces can be defined in `interfaces.py` inside module directory - the file can't use relative imports or import anything from other parts of module itself at the header of file + - this is one of reasons why modules/addons can't be imported directly without using defined functions in OpenPype modules implementation ## Base class `OpenPypeInterface` - has nothing implemented From abd7bfa375df0fdea3241b0019c871627a72c803 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 15:41:03 +0200 Subject: [PATCH 72/77] moved new file to right folder --- .../event_handlers_server/action_private_project_detection.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_private_project_detection.py (100%) diff --git a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py similarity index 100% rename from openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py From 25a742ecda82c1ac2e906dc0c87075af45bb32b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 17 Aug 2021 10:15:21 +0200 Subject: [PATCH 73/77] moved python console interpreter to default submodules --- .../python_console_interpreter/__init__.py | 0 .../python_console_interpreter/module.py | 5 +++-- .../python_console_interpreter/window/__init__.py | 0 .../python_console_interpreter/window/widgets.py | 0 4 files changed, 3 insertions(+), 2 deletions(-) rename openpype/modules/{ => default_modules}/python_console_interpreter/__init__.py (100%) rename openpype/modules/{ => default_modules}/python_console_interpreter/module.py (88%) rename openpype/modules/{ => default_modules}/python_console_interpreter/window/__init__.py (100%) rename openpype/modules/{ => default_modules}/python_console_interpreter/window/widgets.py (100%) diff --git a/openpype/modules/python_console_interpreter/__init__.py b/openpype/modules/default_modules/python_console_interpreter/__init__.py similarity index 100% rename from openpype/modules/python_console_interpreter/__init__.py rename to openpype/modules/default_modules/python_console_interpreter/__init__.py diff --git a/openpype/modules/python_console_interpreter/module.py b/openpype/modules/default_modules/python_console_interpreter/module.py similarity index 88% rename from openpype/modules/python_console_interpreter/module.py rename to openpype/modules/default_modules/python_console_interpreter/module.py index b37f35dfe00..7fd8d80f289 100644 --- a/openpype/modules/python_console_interpreter/module.py +++ b/openpype/modules/default_modules/python_console_interpreter/module.py @@ -1,7 +1,8 @@ -from .. import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayModule -class PythonInterpreterAction(PypeModule, ITrayAction): +class PythonInterpreterAction(OpenPypeModule, ITrayAction): label = "Console" name = "python_interpreter" admin_action = True diff --git a/openpype/modules/python_console_interpreter/window/__init__.py b/openpype/modules/default_modules/python_console_interpreter/window/__init__.py similarity index 100% rename from openpype/modules/python_console_interpreter/window/__init__.py rename to openpype/modules/default_modules/python_console_interpreter/window/__init__.py diff --git a/openpype/modules/python_console_interpreter/window/widgets.py b/openpype/modules/default_modules/python_console_interpreter/window/widgets.py similarity index 100% rename from openpype/modules/python_console_interpreter/window/widgets.py rename to openpype/modules/default_modules/python_console_interpreter/window/widgets.py From fc2e54ef368cd3b365aadabba9d4637d01319e5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 17 Aug 2021 10:15:52 +0200 Subject: [PATCH 74/77] moved new deadline plugins --- .../plugins/publish/collect_deadline_server_from_instance.py | 0 .../deadline/plugins/publish/collect_default_deadline_server.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/collect_deadline_server_from_instance.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/collect_default_deadline_server.py (100%) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/default_modules/deadline/plugins/publish/collect_deadline_server_from_instance.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py rename to openpype/modules/default_modules/deadline/plugins/publish/collect_deadline_server_from_instance.py diff --git a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py b/openpype/modules/default_modules/deadline/plugins/publish/collect_default_deadline_server.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py rename to openpype/modules/default_modules/deadline/plugins/publish/collect_default_deadline_server.py From ab4310cf87eefec7d64a52a97b50f5553b5bdcd8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 17 Aug 2021 10:19:16 +0200 Subject: [PATCH 75/77] fixes in console to match new structure --- .../default_modules/python_console_interpreter/module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/python_console_interpreter/module.py b/openpype/modules/default_modules/python_console_interpreter/module.py index 7fd8d80f289..f4df3fb6d8e 100644 --- a/openpype/modules/default_modules/python_console_interpreter/module.py +++ b/openpype/modules/default_modules/python_console_interpreter/module.py @@ -1,5 +1,5 @@ from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayModule +from openpype_interfaces import ITrayAction class PythonInterpreterAction(OpenPypeModule, ITrayAction): @@ -26,7 +26,7 @@ def create_interpreter_window(self): if self._interpreter_window: return - from openpype.modules.python_console_interpreter.window import ( + from openpype_modules.python_console_interpreter.window import ( PythonInterpreterWidget ) From c52f0535f6688885715a43208ccb8403e2521183 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 23 Aug 2021 19:04:01 +0200 Subject: [PATCH 76/77] fix tools --- openpype/lib/applications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 1644b9c9776..d7baf1d27b0 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1105,7 +1105,7 @@ def prepare_host_environments(data, implementation_envs=True): asset_doc = data.get("asset_doc") # Add tools environments groups_by_name = {} - tool_by_group_name = collections.defaultdict(list) + tool_by_group_name = collections.defaultdict(dict) if asset_doc: # Make sure each tool group can be added only once for key in asset_doc["data"].get("tools_env") or []: From 14d8789bff9acfc7d91abaa7eef322be01c55f0a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 23 Aug 2021 19:06:24 +0200 Subject: [PATCH 77/77] second fix of tools --- openpype/lib/applications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index d7baf1d27b0..71ab2eac61b 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1120,7 +1120,7 @@ def prepare_host_environments(data, implementation_envs=True): environments.append(group.environment) added_env_keys.add(group_name) for tool_name in sorted(tool_by_group_name[group_name].keys()): - tool = tool_by_group_name[tool_name] + tool = tool_by_group_name[group_name][tool_name] environments.append(tool.environment) added_env_keys.add(tool.name)