diff --git a/doc/conf.py b/doc/conf.py index 1cea37bc..89c2667c 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,7 +1,7 @@ # :coding: utf-8 # :copyright: Copyright (c) 2014 ftrack -'''ftrack Python API documentation build configuration file.''' +"""ftrack Python API documentation build configuration file.""" import os import re @@ -11,57 +11,56 @@ # Extensions. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.extlinks', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', - 'lowdown' + "sphinx.ext.autodoc", + "sphinx.ext.extlinks", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "lowdown", ] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'ftrack Python API' -copyright = u'2014, ftrack' +project = "ftrack Python API" +copyright = "2014, ftrack" # contents of docs/conf.py try: - release = get_distribution('ftrack-python-api').version + release = get_distribution("ftrack-python-api").version # take major/minor/patch - VERSION = '.'.join(release.split('.')[:3]) + VERSION = ".".join(release.split(".")[:3]) except DistributionNotFound: - # package is not installed - VERSION = 'Unknown version' + # package is not installed + VERSION = "Unknown version" version = VERSION release = VERSION # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_template'] +exclude_patterns = ["_template"] # A list of prefixes to ignore for module listings. -modindex_common_prefix = [ - 'ftrack_api.' -] +modindex_common_prefix = ["ftrack_api."] # -- HTML output -------------------------------------------------------------- -if not os.environ.get('READTHEDOCS', None) == 'True': +if not os.environ.get("READTHEDOCS", None) == "True": # Only import and set the theme if building locally. import sphinx_rtd_theme + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" -html_static_path = ['_static'] -html_style = 'ftrack.css' +html_static_path = ["_static"] +html_style = "ftrack.css" # If True, copy source rst files to output for reference. html_copy_source = True @@ -69,13 +68,17 @@ # -- Autodoc ------------------------------------------------------------------ -autodoc_default_options = {"members": None, 'undoc-members': None, 'inherited-members': None} -autodoc_member_order = 'bysource' +autodoc_default_options = { + "members": None, + "undoc-members": None, + "inherited-members": None, +} +autodoc_member_order = "bysource" def autodoc_skip(app, what, name, obj, skip, options): - '''Don't skip __init__ method for autodoc.''' - if name == '__init__': + """Don't skip __init__ method for autodoc.""" + if name == "__init__": return False return skip @@ -83,17 +86,16 @@ def autodoc_skip(app, what, name, obj, skip, options): # -- Intersphinx -------------------------------------------------------------- -intersphinx_mapping = { - 'python': ('http://docs.python.org/', None) -} +intersphinx_mapping = {"python": ("http://docs.python.org/", None)} # -- Todos --------------------------------------------------------------------- -todo_include_todos = os.environ.get('FTRACK_DOC_INCLUDE_TODOS', False) == 'True' +todo_include_todos = os.environ.get("FTRACK_DOC_INCLUDE_TODOS", False) == "True" # -- Setup -------------------------------------------------------------------- + def setup(app): - app.connect('autodoc-skip-member', autodoc_skip) + app.connect("autodoc-skip-member", autodoc_skip) diff --git a/doc/resource/example_plugin.py b/doc/resource/example_plugin.py index 5fda0195..4f3b76d1 100644 --- a/doc/resource/example_plugin.py +++ b/doc/resource/example_plugin.py @@ -5,20 +5,20 @@ def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') + """Register plugin. Called when used as an plugin.""" + logger = logging.getLogger("com.example.example-plugin") # Validate that session is an instance of ftrack_api.Session. If not, # assume that register is being called from an old or incompatible API and # return without doing anything. if not isinstance(session, ftrack_api.session.Session): logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0!r} is not an " + "ftrack_api.Session instance.".format(session) ) return # Perform your logic here, such as subscribe to an event. pass - logger.debug('Plugin registered') + logger.debug("Plugin registered") diff --git a/doc/resource/example_plugin_using_session.py b/doc/resource/example_plugin_using_session.py index 2fd99636..a17ebc12 100644 --- a/doc/resource/example_plugin_using_session.py +++ b/doc/resource/example_plugin_using_session.py @@ -5,33 +5,32 @@ def register_with_session_ready(event): - '''Called when session is ready to be used.''' - logger = logging.getLogger('com.example.example-plugin') - logger.debug('Session ready.') - session = event['data']['session'] + """Called when session is ready to be used.""" + logger = logging.getLogger("com.example.example-plugin") + logger.debug("Session ready.") + session = event["data"]["session"] # Session is now ready and can be used to e.g. query objects. - task = session.query('Task').first() - print(task['name']) + task = session.query("Task").first() + print(task["name"]) def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') + """Register plugin. Called when used as an plugin.""" + logger = logging.getLogger("com.example.example-plugin") # Validate that session is an instance of ftrack_api.Session. If not, # assume that register is being called from an old or incompatible API and # return without doing anything. if not isinstance(session, ftrack_api.session.Session): logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0!r} is not an " + "ftrack_api.Session instance.".format(session) ) return session.event_hub.subscribe( - 'topic=ftrack.api.session.ready', - register_with_session_ready + "topic=ftrack.api.session.ready", register_with_session_ready ) - logger.debug('Plugin registered') + logger.debug("Plugin registered") diff --git a/resource/plugin/configure_locations.py b/resource/plugin/configure_locations.py index 0682a5ee..8a91ae95 100644 --- a/resource/plugin/configure_locations.py +++ b/resource/plugin/configure_locations.py @@ -9,8 +9,8 @@ def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] + """Configure locations for session.""" + session = event["data"]["session"] # Find location(s) and customise instances. # @@ -20,20 +20,19 @@ def configure_locations(event): def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:configure_locations.register') + """Register plugin with *session*.""" + logger = logging.getLogger("ftrack_plugin:configure_locations.register") # Validate that session is an instance of ftrack_api.Session. If not, assume # that register is being called from an old or incompatible API and return # without doing anything. if not isinstance(session, ftrack_api.Session): logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0} is not an " + "ftrack_api.Session instance.".format(session) ) return session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations + "topic=ftrack.api.session.configure-location", configure_locations ) diff --git a/resource/plugin/construct_entity_type.py b/resource/plugin/construct_entity_type.py index 45f78416..477f62e1 100644 --- a/resource/plugin/construct_entity_type.py +++ b/resource/plugin/construct_entity_type.py @@ -7,10 +7,10 @@ class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' + """Entity class factory.""" def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' + """Create and return entity class from *schema*.""" # Optionally change bases for class to be generated. cls = super(Factory, self).create(schema, bases=bases) @@ -20,27 +20,26 @@ def create(self, schema, bases=None): def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') + """Register plugin with *session*.""" + logger = logging.getLogger("ftrack_plugin:construct_entity_type.register") # Validate that session is an instance of ftrack_api.Session. If not, assume # that register is being called from an old or incompatible API and return # without doing anything. if not isinstance(session, ftrack_api.Session): logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0!r} is not an " + "ftrack_api.Session instance.".format(session) ) return factory = Factory() def construct_entity_type(event): - '''Return class to represent entity type specified by *event*.''' - schema = event['data']['schema'] + """Return class to represent entity type specified by *event*.""" + schema = event["data"]["schema"] return factory.create(schema) session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type + "topic=ftrack.api.session.construct-entity-type", construct_entity_type ) diff --git a/source/ftrack_api/__init__.py b/source/ftrack_api/__init__.py index d8ee30bd..8fbdad87 100644 --- a/source/ftrack_api/__init__.py +++ b/source/ftrack_api/__init__.py @@ -6,27 +6,18 @@ def mixin(instance, mixin_class, name=None): - '''Mixin *mixin_class* to *instance*. + """Mixin *mixin_class* to *instance*. *name* can be used to specify new class name. If not specified then one will be generated. - ''' + """ if name is None: - name = '{0}{1}'.format( - instance.__class__.__name__, mixin_class.__name__ - ) + name = "{0}{1}".format(instance.__class__.__name__, mixin_class.__name__) # Check mixin class not already present in mro in order to avoid consistent # method resolution failure. if mixin_class in instance.__class__.mro(): return - instance.__class__ = type( - name, - ( - mixin_class, - instance.__class__ - ), - {} - ) + instance.__class__ = type(name, (mixin_class, instance.__class__), {}) diff --git a/source/ftrack_api/_centralized_storage_scenario.py b/source/ftrack_api/_centralized_storage_scenario.py index 1770b0c8..5dbd345c 100644 --- a/source/ftrack_api/_centralized_storage_scenario.py +++ b/source/ftrack_api/_centralized_storage_scenario.py @@ -16,140 +16,131 @@ from ftrack_api.logging import LazyLogMessage as L -scenario_name = 'ftrack.centralized-storage' +scenario_name = "ftrack.centralized-storage" class ConfigureCentralizedStorageScenario(object): - '''Configure a centralized storage scenario.''' + """Configure a centralized storage scenario.""" def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + """Instansiate centralized storage scenario.""" + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) @property def storage_scenario(self): - '''Return storage scenario setting.''' + """Return storage scenario setting.""" return self.session.query( - 'select value from Setting ' + "select value from Setting " 'where name is "storage_scenario" and group is "STORAGE"' ).one() @property def existing_centralized_storage_configuration(self): - '''Return existing centralized storage configuration.''' + """Return existing centralized storage configuration.""" storage_scenario = self.storage_scenario try: - configuration = json.loads(storage_scenario['value']) + configuration = json.loads(storage_scenario["value"]) except (ValueError, TypeError): return None if not isinstance(configuration, dict): return None - if configuration.get('scenario') != scenario_name: + if configuration.get("scenario") != scenario_name: return None - return configuration.get('data', {}) + return configuration.get("data", {}) def _get_confirmation_text(self, configuration): - '''Return confirmation text from *configuration*.''' - configure_location = configuration.get('configure_location') - select_location = configuration.get('select_location') - select_mount_point = configuration.get('select_mount_point') + """Return confirmation text from *configuration*.""" + configure_location = configuration.get("configure_location") + select_location = configuration.get("select_location") + select_mount_point = configuration.get("select_mount_point") if configure_location: location_text = str( - 'A new location will be created:\n\n' - '* Label: {location_label}\n' - '* Name: {location_name}\n' - '* Description: {location_description}\n' + "A new location will be created:\n\n" + "* Label: {location_label}\n" + "* Name: {location_name}\n" + "* Description: {location_description}\n" ).format(**configure_location) else: - location = self.session.get( - 'Location', select_location['location_id'] - ) - location_text = ( - u'You have choosen to use an existing location: {0}'.format( - location['label'] - ) + location = self.session.get("Location", select_location["location_id"]) + location_text = "You have choosen to use an existing location: {0}".format( + location["label"] ) mount_points_text = str( - '* Linux: {linux}\n' - '* OS X: {osx}\n' - '* Windows: {windows}\n\n' + "* Linux: {linux}\n" "* OS X: {osx}\n" "* Windows: {windows}\n\n" ).format( - linux=select_mount_point.get('linux_mount_point') or '*Not set*', - osx=select_mount_point.get('osx_mount_point') or '*Not set*', - windows=select_mount_point.get('windows_mount_point') or '*Not set*' + linux=select_mount_point.get("linux_mount_point") or "*Not set*", + osx=select_mount_point.get("osx_mount_point") or "*Not set*", + windows=select_mount_point.get("windows_mount_point") or "*Not set*", ) mount_points_not_set = [] - if not select_mount_point.get('linux_mount_point'): - mount_points_not_set.append('Linux') + if not select_mount_point.get("linux_mount_point"): + mount_points_not_set.append("Linux") - if not select_mount_point.get('osx_mount_point'): - mount_points_not_set.append('OS X') + if not select_mount_point.get("osx_mount_point"): + mount_points_not_set.append("OS X") - if not select_mount_point.get('windows_mount_point'): - mount_points_not_set.append('Windows') + if not select_mount_point.get("windows_mount_point"): + mount_points_not_set.append("Windows") if mount_points_not_set: mount_points_text += str( - 'Please be aware that this location will not be working on ' - '{missing} because the mount points are not set up.' - ).format( - missing=' and '.join(mount_points_not_set) - ) + "Please be aware that this location will not be working on " + "{missing} because the mount points are not set up." + ).format(missing=" and ".join(mount_points_not_set)) text = str( - '#Confirm storage setup#\n\n' - 'Almost there! Please take a moment to verify the settings you ' - 'are about to save. You can always come back later and update the ' - 'configuration.\n' - '##Location##\n\n' - '{location}\n' - '##Mount points##\n\n' - '{mount_points}' - ).format( - location=location_text, - mount_points=mount_points_text - ) + "#Confirm storage setup#\n\n" + "Almost there! Please take a moment to verify the settings you " + "are about to save. You can always come back later and update the " + "configuration.\n" + "##Location##\n\n" + "{location}\n" + "##Mount points##\n\n" + "{mount_points}" + ).format(location=location_text, mount_points=mount_points_text) return text def configure_scenario(self, event): - '''Configure scenario based on *event* and return form items.''' + """Configure scenario based on *event* and return form items.""" steps = ( - 'select_scenario', - 'select_location', - 'configure_location', - 'select_structure', - 'select_mount_point', - 'confirm_summary', - 'save_configuration' + "select_scenario", + "select_location", + "configure_location", + "select_structure", + "select_mount_point", + "confirm_summary", + "save_configuration", ) - warning_message = '' - values = event['data'].get('values', {}) + warning_message = "" + values = event["data"].get("values", {}) # Calculate previous step and the next. - previous_step = values.get('step', 'select_scenario') + previous_step = values.get("step", "select_scenario") next_step = steps[steps.index(previous_step) + 1] - state = 'configuring' - - self.logger.info(L( - u'Configuring scenario, previous step: {0}, next step: {1}. ' - u'Values {2!r}.', - previous_step, next_step, values - )) + state = "configuring" + + self.logger.info( + L( + "Configuring scenario, previous step: {0}, next step: {1}. " + "Values {2!r}.", + previous_step, + next_step, + values, + ) + ) - if 'configuration' in values: - configuration = values.pop('configuration') + if "configuration" in values: + configuration = values.pop("configuration") else: configuration = {} @@ -157,181 +148,182 @@ def configure_scenario(self, event): # Update configuration with values from the previous step. configuration[previous_step] = values - if previous_step == 'select_location': - values = configuration['select_location'] - if values.get('location_id') != 'create_new_location': + if previous_step == "select_location": + values = configuration["select_location"] + if values.get("location_id") != "create_new_location": location_exists = self.session.query( - 'Location where id is "{0}"'.format( - values.get('location_id') - ) + 'Location where id is "{0}"'.format(values.get("location_id")) ).first() if not location_exists: - next_step = 'select_location' + next_step = "select_location" warning_message = ( - '**The selected location does not exist. Please choose ' - 'one from the dropdown or create a new one.**' + "**The selected location does not exist. Please choose " + "one from the dropdown or create a new one.**" ) - if next_step == 'select_location': + if next_step == "select_location": try: - location_id = ( - self.existing_centralized_storage_configuration['location_id'] - ) + location_id = self.existing_centralized_storage_configuration[ + "location_id" + ] except (KeyError, TypeError): location_id = None - options = [{ - 'label': 'Create new location', - 'value': 'create_new_location' - }] + options = [{"label": "Create new location", "value": "create_new_location"}] for location in self.session.query( - 'select name, label, description from Location' + "select name, label, description from Location" ): - if location['name'] not in ( - 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', - 'ftrack.server', 'ftrack.review' + if location["name"] not in ( + "ftrack.origin", + "ftrack.unmanaged", + "ftrack.connect", + "ftrack.server", + "ftrack.review", ): - options.append({ - 'label': u'{label} ({name})'.format( - label=location['label'], name=location['name'] - ), - 'description': location['description'], - 'value': location['id'] - }) + options.append( + { + "label": "{label} ({name})".format( + label=location["label"], name=location["name"] + ), + "description": location["description"], + "value": location["id"], + } + ) - warning = '' + warning = "" if location_id is not None: # If there is already a location configured we must make the # user aware that changing the location may be problematic. warning = ( - '\n\n**Be careful if you switch to another location ' - 'for an existing storage scenario. Components that have ' - 'already been published to the previous location will be ' - 'made unavailable for common use.**' + "\n\n**Be careful if you switch to another location " + "for an existing storage scenario. Components that have " + "already been published to the previous location will be " + "made unavailable for common use.**" ) default_value = location_id elif location_id is None and len(options) == 1: # No location configured and no existing locations to use. - default_value = 'create_new_location' + default_value = "create_new_location" else: # There are existing locations to choose from but non of them # are currently active in the centralized storage scenario. default_value = None - items = [{ - 'type': 'label', - 'value': ( - '#Select location#\n' - 'Choose an already existing location or create a new one ' - 'to represent your centralized storage. {0}'.format( - warning - ) - ) - }, { - 'type': 'enumerator', - 'label': 'Location', - 'name': 'location_id', - 'value': default_value, - 'data': options - }] - - default_location_name = 'studio.central-storage-location' - default_location_label = 'Studio location' + items = [ + { + "type": "label", + "value": ( + "#Select location#\n" + "Choose an already existing location or create a new one " + "to represent your centralized storage. {0}".format(warning) + ), + }, + { + "type": "enumerator", + "label": "Location", + "name": "location_id", + "value": default_value, + "data": options, + }, + ] + + default_location_name = "studio.central-storage-location" + default_location_label = "Studio location" default_location_description = ( - 'The studio central location where all components are ' - 'stored.' + "The studio central location where all components are " "stored." ) - if previous_step == 'configure_location': - configure_location = configuration.get( - 'configure_location' - ) + if previous_step == "configure_location": + configure_location = configuration.get("configure_location") if configure_location: try: existing_location = self.session.query( - u'Location where name is "{0}"'.format( - configure_location.get('location_name') + 'Location where name is "{0}"'.format( + configure_location.get("location_name") ) ).first() - except UnicodeEncodeError: - next_step = 'configure_location' + except UnicodeEncodeError: + next_step = "configure_location" warning_message += ( - '**The location name contains non-ascii characters. ' - 'Please change the name and try again.**' + "**The location name contains non-ascii characters. " + "Please change the name and try again.**" ) - values = configuration['select_location'] + values = configuration["select_location"] else: if existing_location: - next_step = 'configure_location' + next_step = "configure_location" warning_message += ( - u'**There is already a location named {0}. ' - u'Please change the name and try again.**'.format( - configure_location.get('location_name') + "**There is already a location named {0}. " + "Please change the name and try again.**".format( + configure_location.get("location_name") ) ) - values = configuration['select_location'] + values = configuration["select_location"] if ( - not configure_location.get('location_name') or - not configure_location.get('location_label') or - not configure_location.get('location_description') + not configure_location.get("location_name") + or not configure_location.get("location_label") + or not configure_location.get("location_description") ): - next_step = 'configure_location' + next_step = "configure_location" warning_message += ( - '**Location name, label and description cannot ' - 'be empty.**' + "**Location name, label and description cannot " "be empty.**" ) - values = configuration['select_location'] + values = configuration["select_location"] - if next_step == 'configure_location': + if next_step == "configure_location": # Populate form with previous configuration. - default_location_label = configure_location['location_label'] - default_location_name = configure_location['location_name'] - default_location_description = ( - configure_location['location_description'] - ) - - if next_step == 'configure_location': - - if values.get('location_id') == 'create_new_location': + default_location_label = configure_location["location_label"] + default_location_name = configure_location["location_name"] + default_location_description = configure_location[ + "location_description" + ] + + if next_step == "configure_location": + if values.get("location_id") == "create_new_location": # Add options to create a new location. - items = [{ - 'type': 'label', - 'value': ( - '#Create location#\n' - 'Here you will create a new location to be used ' - 'with your new Storage scenario. For your ' - 'convenience we have already filled in some default ' - 'values. If this is the first time you are configuring ' - 'a storage scenario in ftrack we recommend that you ' - 'stick with these settings.' - ) - }, { - 'label': 'Label', - 'name': 'location_label', - 'value': default_location_label, - 'type': 'text' - }, { - 'label': 'Name', - 'name': 'location_name', - 'value': default_location_name, - 'type': 'text' - }, { - 'label': 'Description', - 'name': 'location_description', - 'value': default_location_description, - 'type': 'text' - }] + items = [ + { + "type": "label", + "value": ( + "#Create location#\n" + "Here you will create a new location to be used " + "with your new Storage scenario. For your " + "convenience we have already filled in some default " + "values. If this is the first time you are configuring " + "a storage scenario in ftrack we recommend that you " + "stick with these settings." + ), + }, + { + "label": "Label", + "name": "location_label", + "value": default_location_label, + "type": "text", + }, + { + "label": "Name", + "name": "location_name", + "value": default_location_name, + "type": "text", + }, + { + "label": "Description", + "name": "location_description", + "value": default_location_description, + "type": "text", + }, + ] else: # The user selected an existing location. Move on to next # step. - next_step = 'select_mount_point' + next_step = "select_mount_point" - if next_step == 'select_structure': + if next_step == "select_structure": # There is only one structure to choose from, go to next step. - next_step = 'select_mount_point' + next_step = "select_mount_point" # items = [ # { # 'type': 'label', @@ -358,280 +350,254 @@ def configure_scenario(self, event): # } # ] - if next_step == 'select_mount_point': + if next_step == "select_mount_point": try: - mount_points = ( - self.existing_centralized_storage_configuration['accessor']['mount_points'] - ) + mount_points = self.existing_centralized_storage_configuration[ + "accessor" + ]["mount_points"] except (KeyError, TypeError): mount_points = dict() items = [ { - 'value': ( - '#Mount points#\n' - 'Set mount points for your centralized storage ' - 'location. For the location to work as expected each ' - 'platform that you intend to use must have the ' - 'corresponding mount point set and the storage must ' - 'be accessible. If not set correctly files will not be ' - 'saved or read.' + "value": ( + "#Mount points#\n" + "Set mount points for your centralized storage " + "location. For the location to work as expected each " + "platform that you intend to use must have the " + "corresponding mount point set and the storage must " + "be accessible. If not set correctly files will not be " + "saved or read." ), - 'type': 'label' - }, { - 'type': 'text', - 'label': 'Linux', - 'name': 'linux_mount_point', - 'empty_text': 'E.g. /usr/mnt/MyStorage ...', - 'value': mount_points.get('linux', '') - }, { - 'type': 'text', - 'label': 'OS X', - 'name': 'osx_mount_point', - 'empty_text': 'E.g. /Volumes/MyStorage ...', - 'value': mount_points.get('osx', '') - }, { - 'type': 'text', - 'label': 'Windows', - 'name': 'windows_mount_point', - 'empty_text': 'E.g. \\\\MyStorage ...', - 'value': mount_points.get('windows', '') - } + "type": "label", + }, + { + "type": "text", + "label": "Linux", + "name": "linux_mount_point", + "empty_text": "E.g. /usr/mnt/MyStorage ...", + "value": mount_points.get("linux", ""), + }, + { + "type": "text", + "label": "OS X", + "name": "osx_mount_point", + "empty_text": "E.g. /Volumes/MyStorage ...", + "value": mount_points.get("osx", ""), + }, + { + "type": "text", + "label": "Windows", + "name": "windows_mount_point", + "empty_text": "E.g. \\\\MyStorage ...", + "value": mount_points.get("windows", ""), + }, ] - if next_step == 'confirm_summary': - items = [{ - 'type': 'label', - 'value': self._get_confirmation_text(configuration) - }] - state = 'confirm' + if next_step == "confirm_summary": + items = [ + {"type": "label", "value": self._get_confirmation_text(configuration)} + ] + state = "confirm" - if next_step == 'save_configuration': - mount_points = configuration['select_mount_point'] - select_location = configuration['select_location'] + if next_step == "save_configuration": + mount_points = configuration["select_mount_point"] + select_location = configuration["select_location"] - if select_location['location_id'] == 'create_new_location': - configure_location = configuration['configure_location'] + if select_location["location_id"] == "create_new_location": + configure_location = configuration["configure_location"] location = self.session.create( - 'Location', + "Location", { - 'name': configure_location['location_name'], - 'label': configure_location['location_label'], - 'description': ( - configure_location['location_description'] - ) - } + "name": configure_location["location_name"], + "label": configure_location["location_label"], + "description": (configure_location["location_description"]), + }, ) else: location = self.session.query( - 'Location where id is "{0}"'.format( - select_location['location_id'] - ) + 'Location where id is "{0}"'.format(select_location["location_id"]) ).one() - setting_value = json.dumps({ - 'scenario': scenario_name, - 'data': { - 'location_id': location['id'], - 'location_name': location['name'], - 'accessor': { - 'mount_points': { - 'linux': mount_points['linux_mount_point'], - 'osx': mount_points['osx_mount_point'], - 'windows': mount_points['windows_mount_point'] - } - } + setting_value = json.dumps( + { + "scenario": scenario_name, + "data": { + "location_id": location["id"], + "location_name": location["name"], + "accessor": { + "mount_points": { + "linux": mount_points["linux_mount_point"], + "osx": mount_points["osx_mount_point"], + "windows": mount_points["windows_mount_point"], + } + }, + }, } - }) + ) - self.storage_scenario['value'] = setting_value + self.storage_scenario["value"] = setting_value self.session.commit() # Broadcast an event that storage scenario has been configured. event = ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.configure-done' + topic="ftrack.storage-scenario.configure-done" ) self.session.event_hub.publish(event) - items = [{ - 'type': 'label', - 'value': ( - '#Done!#\n' - 'Your storage scenario is now configured and ready ' - 'to use. **Note that you may have to restart Connect and ' - 'other applications to start using it.**' - ) - }] - state = 'done' + items = [ + { + "type": "label", + "value": ( + "#Done!#\n" + "Your storage scenario is now configured and ready " + "to use. **Note that you may have to restart Connect and " + "other applications to start using it.**" + ), + } + ] + state = "done" if warning_message: - items.insert(0, { - 'type': 'label', - 'value': warning_message - }) - - items.append({ - 'type': 'hidden', - 'value': configuration, - 'name': 'configuration' - }) - items.append({ - 'type': 'hidden', - 'value': next_step, - 'name': 'step' - }) + items.insert(0, {"type": "label", "value": warning_message}) - return { - 'items': items, - 'state': state - } + items.append( + {"type": "hidden", "value": configuration, "name": "configuration"} + ) + items.append({"type": "hidden", "value": next_step, "name": "step"}) + + return {"items": items, "state": state} def discover_centralized_scenario(self, event): - '''Return action discover dictionary for *event*.''' + """Return action discover dictionary for *event*.""" return { - 'id': scenario_name, - 'name': 'Centralized storage scenario', - 'description': ( - '(Recommended) centralized storage scenario where all files ' - 'are kept on a storage that is mounted and available to ' - 'everyone in the studio.' - ) + "id": scenario_name, + "name": "Centralized storage scenario", + "description": ( + "(Recommended) centralized storage scenario where all files " + "are kept on a storage that is mounted and available to " + "everyone in the studio." + ), } def register(self, session): - '''Subscribe to events on *session*.''' + """Subscribe to events on *session*.""" self.session = session #: TODO: Move these to a separate function. session.event_hub.subscribe( str( - 'topic=ftrack.storage-scenario.discover ' + "topic=ftrack.storage-scenario.discover " 'and source.user.username="{0}"' - ).format( - session.api_user - ), - self.discover_centralized_scenario + ).format(session.api_user), + self.discover_centralized_scenario, ) session.event_hub.subscribe( str( - 'topic=ftrack.storage-scenario.configure ' + "topic=ftrack.storage-scenario.configure " 'and data.scenario_id="{0}" ' 'and source.user.username="{1}"' - ).format( - scenario_name, - session.api_user - ), - self.configure_scenario + ).format(scenario_name, session.api_user), + self.configure_scenario, ) class ActivateCentralizedStorageScenario(object): - '''Activate a centralized storage scenario.''' + """Activate a centralized storage scenario.""" def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + """Instansiate centralized storage scenario.""" + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) def activate(self, event): - '''Activate scenario in *event*.''' - storage_scenario = event['data']['storage_scenario'] + """Activate scenario in *event*.""" + storage_scenario = event["data"]["storage_scenario"] try: - location_data = storage_scenario['data'] - location_name = location_data['location_name'] - location_id = location_data['location_id'] - mount_points = location_data['accessor']['mount_points'] + location_data = storage_scenario["data"] + location_name = location_data["location_name"] + location_id = location_data["location_id"] + mount_points = location_data["accessor"]["mount_points"] except KeyError: - error_message = ( - 'Unable to read storage scenario data.' - ) + error_message = "Unable to read storage scenario data." self.logger.error(L(error_message)) raise ftrack_api.exception.LocationError( - 'Unable to configure location based on scenario.' + "Unable to configure location based on scenario." ) else: location = self.session.create( - 'Location', - data=dict( - name=location_name, - id=location_id - ), - reconstructing=True + "Location", + data=dict(name=location_name, id=location_id), + reconstructing=True, ) - if 'darwin' in sys.platform: - prefix = mount_points['osx'] - elif 'linux' in sys.platform: - prefix = mount_points['linux'] - elif 'win' in sys.platform: - prefix = mount_points['windows'] + if "darwin" in sys.platform: + prefix = mount_points["osx"] + elif "linux" in sys.platform: + prefix = mount_points["linux"] + elif "win" in sys.platform: + prefix = mount_points["windows"] else: raise ftrack_api.exception.LocationError( - ( - 'Unable to find accessor prefix for platform {0}.' - ).format(sys.platform) + ("Unable to find accessor prefix for platform {0}.").format( + sys.platform + ) ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=prefix - ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix=prefix) location.structure = _standard.StandardStructure() location.priority = 1 - self.logger.info(L( - u'Storage scenario activated. Configured {0!r} from ' - u'{1!r}', - location, storage_scenario - )) + self.logger.info( + L( + "Storage scenario activated. Configured {0!r} from " "{1!r}", + location, + storage_scenario, + ) + ) def _verify_startup(self, event): - '''Verify the storage scenario configuration.''' - storage_scenario = event['data']['storage_scenario'] - location_data = storage_scenario['data'] - mount_points = location_data['accessor']['mount_points'] + """Verify the storage scenario configuration.""" + storage_scenario = event["data"]["storage_scenario"] + location_data = storage_scenario["data"] + mount_points = location_data["accessor"]["mount_points"] prefix = None - if 'darwin' in sys.platform: - prefix = mount_points['osx'] - elif 'linux' in sys.platform: - prefix = mount_points['linux'] - elif 'win' in sys.platform: - prefix = mount_points['windows'] + if "darwin" in sys.platform: + prefix = mount_points["osx"] + elif "linux" in sys.platform: + prefix = mount_points["linux"] + elif "win" in sys.platform: + prefix = mount_points["windows"] if not prefix: return ( - u'The storage scenario has not been configured for your ' - u'operating system. ftrack may not be able to ' - u'store and track files correctly.' + "The storage scenario has not been configured for your " + "operating system. ftrack may not be able to " + "store and track files correctly." ) if not os.path.isdir(prefix): - return ( - str( - 'The path {0} does not exist. ftrack may not be able to ' - 'store and track files correctly. \n\nIf the storage is ' - 'newly setup you may want to create necessary folder ' - 'structures. If the storage is a network drive you should ' - 'make sure that it is mounted correctly.' - ).format(prefix) - ) + return str( + "The path {0} does not exist. ftrack may not be able to " + "store and track files correctly. \n\nIf the storage is " + "newly setup you may want to create necessary folder " + "structures. If the storage is a network drive you should " + "make sure that it is mounted correctly." + ).format(prefix) def register(self, session): - '''Subscribe to events on *session*.''' + """Subscribe to events on *session*.""" self.session = session session.event_hub.subscribe( ( - 'topic=ftrack.storage-scenario.activate ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) + "topic=ftrack.storage-scenario.activate " + 'and data.storage_scenario.scenario="{0}"'.format(scenario_name) ), - self.activate + self.activate, ) # Listen to verify startup event from ftrack connect to allow responding @@ -639,21 +605,20 @@ def register(self, session): # scenario that the user should be notified about. self.session.event_hub.subscribe( ( - 'topic=ftrack.connect.verify-startup ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) + "topic=ftrack.connect.verify-startup " + 'and data.storage_scenario.scenario="{0}"'.format(scenario_name) ), - self._verify_startup + self._verify_startup, ) + def register(session): - '''Register storage scenario.''' + """Register storage scenario.""" scenario = ActivateCentralizedStorageScenario() scenario.register(session) def register_configuration(session): - '''Register storage scenario.''' + """Register storage scenario.""" scenario = ConfigureCentralizedStorageScenario() scenario.register(session) diff --git a/source/ftrack_api/_python_ntpath.py b/source/ftrack_api/_python_ntpath.py index c5a8fe93..43d88992 100644 --- a/source/ftrack_api/_python_ntpath.py +++ b/source/ftrack_api/_python_ntpath.py @@ -17,32 +17,65 @@ from genericpath import * -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","walk","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames","relpath"] +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "walk", + "expanduser", + "expandvars", + "normpath", + "abspath", + "splitunc", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", +] # strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '\\' -pathsep = ';' -altsep = '/' -defpath = '.;C:\\bin' -if 'ce' in sys.builtin_module_names: - defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: +curdir = "." +pardir = ".." +extsep = "." +sep = "\\" +pathsep = ";" +altsep = "/" +defpath = ".;C:\\bin" +if "ce" in sys.builtin_module_names: + defpath = "\\Windows" +elif "os2" in sys.builtin_module_names: # OS/2 w/ VACPP - altsep = '/' -devnull = 'nul' + altsep = "/" +devnull = "nul" # Normalize the case of a pathname and map slashes to backslashes. # Other normalizations (such as optimizing '../' away) are not done # (this is done by normpath). + def normcase(s): """Normalize case of pathname. @@ -56,14 +89,16 @@ def normcase(s): # volume), or if a pathname after the volume letter and colon / UNC resource # starts with a slash or backslash. + def isabs(s): """Test whether a path is absolute""" s = splitdrive(s)[1] - return s != '' and s[:1] in '/\\' + return s != "" and s[:1] in "/\\" # Join two (or more) paths. + def join(a, *p): """Join two or more pathname components, inserting "\\" as needed. If any component is an absolute path, all previous path components @@ -88,8 +123,7 @@ def join(a, *p): b_wins = 1 # Else path has a drive letter, and b doesn't but is absolute. - elif len(path) > 3 or (len(path) == 3 and - path[-1] not in "/\\"): + elif len(path) > 3 or (len(path) == 3 and path[-1] not in "/\\"): # case 3 b_wins = 1 @@ -115,7 +149,7 @@ def join(a, *p): # but b is empty; since, e.g., split('a/') produces # ('a', ''), it's best if join() adds a backslash in # this case. - path += '\\' + path += "\\" return path @@ -125,10 +159,10 @@ def join(a, *p): # It is always true that drivespec + pathspec == p def splitdrive(p): """Split a pathname into drive and path specifiers. Returns a 2-tuple -"(drive,path)"; either part may be empty""" - if p[1:2] == ':': + "(drive,path)"; either part may be empty""" + if p[1:2] == ":": return p[0:2], p[2:] - return '', p + return "", p # Parse UNC paths @@ -140,24 +174,24 @@ def splitunc(p): using backslashes). unc+rest is always the input path. Paths containing drive letters never have an UNC part. """ - if p[1:2] == ':': - return '', p # Drive letter present + if p[1:2] == ":": + return "", p # Drive letter present firstTwo = p[0:2] - if firstTwo == '//' or firstTwo == '\\\\': + if firstTwo == "//" or firstTwo == "\\\\": # is a UNC path: # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter # \\machine\mountpoint\directories... # directory ^^^^^^^^^^^^^^^ normp = normcase(p) - index = normp.find('\\', 2) + index = normp.find("\\", 2) if index == -1: ##raise RuntimeError, 'illegal UNC path: "' + p + '"' return ("", p) - index = normp.find('\\', index + 1) + index = normp.find("\\", index + 1) if index == -1: index = len(p) return p[:index], p[index:] - return '', p + return "", p # Split a path in head (everything up to the last '/') and tail (the @@ -165,6 +199,7 @@ def splitunc(p): # join(head, tail) == p holds. # The resulting head won't end in '/' unless it is the root. + def split(p): """Split a pathname. @@ -174,12 +209,12 @@ def split(p): d, p = splitdrive(p) # set i to index beyond p's last slash i = len(p) - while i and p[i-1] not in '/\\': + while i and p[i - 1] not in "/\\": i = i - 1 head, tail = p[:i], p[i:] # now tail has no slashes # remove trailing slashes from head, unless it's all slashes head2 = head - while head2 and head2[-1] in '/\\': + while head2 and head2[-1] in "/\\": head2 = head2[:-1] head = head2 or head return d + head, tail @@ -190,13 +225,17 @@ def split(p): # pathname component; the root is everything before that. # It is always true that root + ext == p. + def splitext(p): return genericpath._splitext(p, sep, altsep, extsep) + + splitext.__doc__ = genericpath._splitext.__doc__ # Return the tail (basename) part of a path. + def basename(p): """Returns the final component of a pathname""" return split(p)[1] @@ -204,32 +243,37 @@ def basename(p): # Return the head (dirname) part of a path. + def dirname(p): """Returns the directory component of a pathname""" return split(p)[0] + # Is a path a symbolic link? # This will always return false on systems where posix.lstat doesn't exist. + def islink(path): """Test for symbolic link. On WindowsNT/95 and OS/2 always returns false """ return False + # alias exists to lexists lexists = exists # Is a path a mount point? Either a root (with or without drive letter) # or an UNC path with at most a / or \ after the mount point. + def ismount(path): """Test whether a path is a mount point (defined as root of drive)""" unc, rest = splitunc(path) if unc: return rest in ("", "/", "\\") p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' + return len(p) == 1 and p[0] in "/\\" # Directory tree walk. @@ -240,6 +284,7 @@ def ismount(path): # The func may modify the filenames list, to implement a filter, # or to impose a different order of visiting. + def walk(top, func, arg): """Directory tree walk with callback function. @@ -254,8 +299,9 @@ def walk(top, func, arg): beyond that arg is always passed to func. It can be used, e.g., to pass a filename pattern, or a mutable object designed to accumulate statistics. Passing None for arg is common.""" - warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", - stacklevel=2) + warnings.warnpy3k( + "In 3.x, os.path.walk is removed in favor of os.walk.", stacklevel=2 + ) try: names = os.listdir(top) except os.error: @@ -276,30 +322,31 @@ def walk(top, func, arg): # (A function should also be defined to do full *sh-style environment # variable expansion.) + def expanduser(path): """Expand ~ and ~user constructs. If user or $HOME is unknown, do nothing.""" - if path[:1] != '~': + if path[:1] != "~": return path i, n = 1, len(path) - while i < n and path[i] not in '/\\': + while i < n and path[i] not in "/\\": i = i + 1 - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif not 'HOMEPATH' in os.environ: + if "HOME" in os.environ: + userhome = os.environ["HOME"] + elif "USERPROFILE" in os.environ: + userhome = os.environ["USERPROFILE"] + elif not "HOMEPATH" in os.environ: return path else: try: - drive = os.environ['HOMEDRIVE'] + drive = os.environ["HOMEDRIVE"] except KeyError: - drive = '' - userhome = join(drive, os.environ['HOMEPATH']) + drive = "" + userhome = join(drive, os.environ["HOMEPATH"]) - if i != 1: #~user + if i != 1: # ~user userhome = join(dirname(userhome), path[1:i]) return userhome + path[i:] @@ -318,76 +365,78 @@ def expanduser(path): # XXX With COMMAND.COM you can use any characters in a variable name, # XXX except '^|<>='. + def expandvars(path): """Expand shell variables of the forms $var, ${var} and %var%. Unknown variables are left unchanged.""" - if '$' not in path and '%' not in path: + if "$" not in path and "%" not in path: return path import string - varchars = string.ascii_letters + string.digits + '_-' - res = '' + + varchars = string.ascii_letters + string.digits + "_-" + res = "" index = 0 pathlen = len(path) while index < pathlen: c = path[index] - if c == '\'': # no expansion within single quotes - path = path[index + 1:] + if c == "'": # no expansion within single quotes + path = path[index + 1 :] pathlen = len(path) try: - index = path.index('\'') - res = res + '\'' + path[:index + 1] + index = path.index("'") + res = res + "'" + path[: index + 1] except ValueError: res = res + path index = pathlen - 1 - elif c == '%': # variable or '%' - if path[index + 1:index + 2] == '%': + elif c == "%": # variable or '%' + if path[index + 1 : index + 2] == "%": res = res + c index = index + 1 else: - path = path[index+1:] + path = path[index + 1 :] pathlen = len(path) try: - index = path.index('%') + index = path.index("%") except ValueError: - res = res + '%' + path + res = res + "%" + path index = pathlen - 1 else: var = path[:index] if var in os.environ: res = res + os.environ[var] else: - res = res + '%' + var + '%' - elif c == '$': # variable or '$$' - if path[index + 1:index + 2] == '$': + res = res + "%" + var + "%" + elif c == "$": # variable or '$$' + if path[index + 1 : index + 2] == "$": res = res + c index = index + 1 - elif path[index + 1:index + 2] == '{': - path = path[index+2:] + elif path[index + 1 : index + 2] == "{": + path = path[index + 2 :] pathlen = len(path) try: - index = path.index('}') + index = path.index("}") var = path[:index] if var in os.environ: res = res + os.environ[var] else: - res = res + '${' + var + '}' + res = res + "${" + var + "}" except ValueError: - res = res + '${' + path + res = res + "${" + path index = pathlen - 1 else: - var = '' + var = "" index = index + 1 - c = path[index:index + 1] - while c != '' and c in varchars: + c = path[index : index + 1] + while c != "" and c in varchars: var = var + c index = index + 1 - c = path[index:index + 1] + c = path[index : index + 1] if var in os.environ: res = res + os.environ[var] else: - res = res + '$' + var - if c != '': + res = res + "$" + var + if c != "": index = index - 1 else: res = res + c @@ -399,11 +448,12 @@ def expandvars(path): # Previously, this function also truncated pathnames to 8+3 format, # but as this module is called "ntpath", that's obviously wrong! + def normpath(path): """Normalize path, eliminating double slashes, etc.""" # Preserve unicode (if path is unicode) - backslash, dot = (u'\\', u'.') if isinstance(path, str) else ('\\', '.') - if path.startswith(('\\\\.\\', '\\\\?\\')): + backslash, dot = ("\\", ".") if isinstance(path, str) else ("\\", ".") + if path.startswith(("\\\\.\\", "\\\\?\\")): # in the case of paths with these prefixes: # \\.\ -> device names # \\?\ -> literal paths @@ -420,7 +470,7 @@ def normpath(path): # letter. This means that the invalid filename \\\a\b is preserved # unchanged, where a\\\b is normalised to a\b. It's not clear that there # is any better behaviour for such edge cases. - if prefix == '': + if prefix == "": # No drive letter - preserve initial backslashes while path[:1] == "\\": prefix = prefix + backslash @@ -433,11 +483,11 @@ def normpath(path): comps = path.split("\\") i = 0 while i < len(comps): - if comps[i] in ('.', ''): + if comps[i] in (".", ""): del comps[i] - elif comps[i] == '..': - if i > 0 and comps[i-1] != '..': - del comps[i-1:i+1] + elif comps[i] == "..": + if i > 0 and comps[i - 1] != "..": + del comps[i - 1 : i + 1] i -= 1 elif i == 0 and prefix.endswith("\\"): del comps[i] @@ -455,7 +505,8 @@ def normpath(path): try: from nt import _getfullpathname -except ImportError: # not running on Windows - mock up something sensible +except ImportError: # not running on Windows - mock up something sensible + def abspath(path): """Return the absolute version of a path.""" if not isabs(path): @@ -467,25 +518,29 @@ def abspath(path): return normpath(path) else: # use native Windows method on Windows + def abspath(path): """Return the absolute version of a path.""" - if path: # Empty path must return current working directory. + if path: # Empty path must return current working directory. try: path = _getfullpathname(path) except WindowsError: - pass # Bad path - return unchanged. + pass # Bad path - return unchanged. elif isinstance(path, str): path = os.getcwd() else: path = os.getcwd() return normpath(path) + # realpath is a no-op on systems without islink support realpath = abspath # Win9x family and earlier have no Unicode filename support. -supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and - sys.getwindowsversion()[3] >= 2) +supports_unicode_filenames = ( + hasattr(sys, "getwindowsversion") and sys.getwindowsversion()[3] >= 2 +) + def _abspath_split(path): abs = abspath(normpath(path)) @@ -495,6 +550,7 @@ def _abspath_split(path): prefix, rest = splitdrive(abs) return is_unc, prefix, [x for x in rest.split(sep) if x] + def relpath(path, start=curdir): """Return a relative version of a path""" @@ -505,15 +561,17 @@ def relpath(path, start=curdir): path_is_unc, path_prefix, path_list = _abspath_split(path) if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) + raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) if path_prefix.lower() != start_prefix.lower(): if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) + raise ValueError( + "path is on UNC root %s, start on UNC root %s" + % (path_prefix, start_prefix) + ) else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) + raise ValueError( + "path is on drive %s, start on drive %s" % (path_prefix, start_prefix) + ) # Work out how much of the filepath is shared by start and path. i = 0 for e1, e2 in zip(start_list, path_list): @@ -521,11 +579,12 @@ def relpath(path, start=curdir): break i += 1 - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + rel_list = [pardir] * (len(start_list) - i) + path_list[i:] if not rel_list: return curdir return join(*rel_list) + try: # The genericpath.isdir implementation uses os.stat and checks the mode # attribute to tell whether or not the path is a directory. diff --git a/source/ftrack_api/accessor/base.py b/source/ftrack_api/accessor/base.py index b76e2a6a..32085a67 100644 --- a/source/ftrack_api/accessor/base.py +++ b/source/ftrack_api/accessor/base.py @@ -9,7 +9,7 @@ class Accessor(with_metaclass(abc.ABCMeta, object)): - '''Provide data access to a location. + """Provide data access to a location. A location represents a specific storage, but access to that storage may vary. For example, both local filesystem and FTP access may be possible for @@ -26,15 +26,15 @@ class Accessor(with_metaclass(abc.ABCMeta, object)): calling any of the accessor methods that accept a *resource_identifier* argument. - ''' + """ def __init__(self): - '''Initialise location accessor.''' + """Initialise location accessor.""" super(Accessor, self).__init__() @abc.abstractmethod def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. + """Return list of entries in *resource_identifier* container. Each entry in the returned list should be a valid resource identifier. @@ -43,82 +43,82 @@ def list(self, resource_identifier): :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if *resource_identifier* is not a container. - ''' + """ @abc.abstractmethod def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' + """Return if *resource_identifier* is valid and exists in location.""" @abc.abstractmethod def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' + """Return whether *resource_identifier* refers to a file.""" @abc.abstractmethod def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' + """Return whether *resource_identifier* refers to a container.""" @abc.abstractmethod def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' + """Return whether *resource_identifier* refers to a file sequence.""" @abc.abstractmethod - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' + def open(self, resource_identifier, mode="rb"): + """Return :class:`~ftrack_api.data.Data` for *resource_identifier*.""" @abc.abstractmethod def remove(self, resource_identifier): - '''Remove *resource_identifier*. + """Remove *resource_identifier*. Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if *resource_identifier* does not exist. - ''' + """ @abc.abstractmethod def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. + """Make a container at *resource_identifier*. If *recursive* is True, also make any intermediate containers. Should silently ignore existing containers and not recreate them. - ''' + """ @abc.abstractmethod def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. + """Return resource_identifier of container for *resource_identifier*. Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if container of *resource_identifier* could not be determined. - ''' + """ def remove_container(self, resource_identifier): # pragma: no cover - '''Remove container at *resource_identifier*.''' + """Remove container at *resource_identifier*.""" return self.remove(resource_identifier) def get_filesystem_path(self, resource_identifier): # pragma: no cover - '''Return filesystem path for *resource_identifier*. + """Return filesystem path for *resource_identifier*. Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if filesystem path could not be determined from *resource_identifier* or :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if retrieving filesystem paths is not supported by this accessor. - ''' + """ raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_filesystem_path', resource_identifier=resource_identifier + "get_filesystem_path", resource_identifier=resource_identifier ) def get_url(self, resource_identifier): - '''Return URL for *resource_identifier*. + """Return URL for *resource_identifier*. Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if URL could not be determined from *resource_identifier* or :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if retrieving URL is not supported by this accessor. - ''' + """ raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_url', resource_identifier=resource_identifier + "get_url", resource_identifier=resource_identifier ) diff --git a/source/ftrack_api/accessor/disk.py b/source/ftrack_api/accessor/disk.py index 20a2d4b5..f0faec3c 100644 --- a/source/ftrack_api/accessor/disk.py +++ b/source/ftrack_api/accessor/disk.py @@ -17,25 +17,25 @@ AccessorPermissionDeniedError, AccessorResourceInvalidError, AccessorContainerNotEmptyError, - AccessorParentResourceNotFoundError + AccessorParentResourceNotFoundError, ) class DiskAccessor(ftrack_api.accessor.base.Accessor): - '''Provide disk access to a location. + """Provide disk access to a location. Expect resource identifiers to refer to relative filesystem paths. - ''' + """ def __init__(self, prefix, **kw): - '''Initialise location accessor. + """Initialise location accessor. *prefix* specifies the base folder for the disk based structure and will be prepended to any path. It should be specified in the syntax of the current OS. - ''' + """ if prefix: prefix = os.path.expanduser(os.path.expandvars(prefix)) prefix = os.path.abspath(prefix) @@ -44,7 +44,7 @@ def __init__(self, prefix, **kw): super(DiskAccessor, self).__init__(**kw) def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. + """Return list of entries in *resource_identifier* container. Each entry in the returned list should be a valid resource identifier. @@ -53,12 +53,10 @@ def list(self, resource_identifier): :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if *resource_identifier* is not a container. - ''' + """ filesystem_path = self.get_filesystem_path(resource_identifier) - with error_handler( - operation='list', resource_identifier=resource_identifier - ): + with error_handler(operation="list", resource_identifier=resource_identifier): listing = [] for entry in os.listdir(filesystem_path): listing.append(os.path.join(resource_identifier, entry)) @@ -66,71 +64,67 @@ def list(self, resource_identifier): return listing def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' + """Return if *resource_identifier* is valid and exists in location.""" filesystem_path = self.get_filesystem_path(resource_identifier) return os.path.exists(filesystem_path) def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' + """Return whether *resource_identifier* refers to a file.""" filesystem_path = self.get_filesystem_path(resource_identifier) return os.path.isfile(filesystem_path) def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' + """Return whether *resource_identifier* refers to a container.""" filesystem_path = self.get_filesystem_path(resource_identifier) return os.path.isdir(filesystem_path) def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise AccessorUnsupportedOperationError(operation='is_sequence') + """Return whether *resource_identifier* refers to a file sequence.""" + raise AccessorUnsupportedOperationError(operation="is_sequence") - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' + def open(self, resource_identifier, mode="rb"): + """Return :class:`~ftrack_api.Data` for *resource_identifier*.""" filesystem_path = self.get_filesystem_path(resource_identifier) - with error_handler( - operation='open', resource_identifier=resource_identifier - ): + with error_handler(operation="open", resource_identifier=resource_identifier): data = ftrack_api.data.File(filesystem_path, mode) return data def remove(self, resource_identifier): - '''Remove *resource_identifier*. + """Remove *resource_identifier*. Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if *resource_identifier* does not exist. - ''' + """ filesystem_path = self.get_filesystem_path(resource_identifier) if self.is_file(resource_identifier): with error_handler( - operation='remove', resource_identifier=resource_identifier + operation="remove", resource_identifier=resource_identifier ): os.remove(filesystem_path) elif self.is_container(resource_identifier): with error_handler( - operation='remove', resource_identifier=resource_identifier + operation="remove", resource_identifier=resource_identifier ): os.rmdir(filesystem_path) else: - raise AccessorResourceNotFoundError( - resource_identifier=resource_identifier - ) + raise AccessorResourceNotFoundError(resource_identifier=resource_identifier) def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. + """Make a container at *resource_identifier*. If *recursive* is True, also make any intermediate containers. - ''' + """ filesystem_path = self.get_filesystem_path(resource_identifier) with error_handler( - operation='makeContainer', resource_identifier=resource_identifier + operation="makeContainer", resource_identifier=resource_identifier ): try: if recursive: @@ -151,12 +145,12 @@ def make_container(self, resource_identifier, recursive=True): raise def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. + """Return resource_identifier of container for *resource_identifier*. Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if container of *resource_identifier* could not be determined. - ''' + """ filesystem_path = self.get_filesystem_path(resource_identifier) container = os.path.dirname(filesystem_path) @@ -165,24 +159,24 @@ def get_container(self, resource_identifier): if not container.startswith(self.prefix): raise AccessorParentResourceNotFoundError( resource_identifier=resource_identifier, - message='Could not determine container for ' - '{resource_identifier} as container falls outside ' - 'of configured prefix.' + message="Could not determine container for " + "{resource_identifier} as container falls outside " + "of configured prefix.", ) # Convert container filesystem path into resource identifier. - container = container[len(self.prefix):] + container = container[len(self.prefix) :] if ntpath.isabs(container): # Ensure that resulting path is relative by stripping any # leftover prefixed slashes from string. # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the # result will be 'foo/bar'. - container = container.lstrip('\\/') + container = container.lstrip("\\/") return container def get_filesystem_path(self, resource_identifier): - '''Return filesystem path for *resource_identifier*. + """Return filesystem path for *resource_identifier*. For example:: @@ -195,7 +189,7 @@ def get_filesystem_path(self, resource_identifier): Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem path could not be determined from *resource_identifier*. - ''' + """ filesystem_path = resource_identifier if filesystem_path: filesystem_path = os.path.normpath(filesystem_path) @@ -209,9 +203,9 @@ def get_filesystem_path(self, resource_identifier): if not filesystem_path.startswith(self.prefix): raise AccessorFilesystemPathError( resource_identifier=resource_identifier, - message='Could not determine access path for ' - 'resource_identifier outside of configured prefix: ' - '{resource_identifier}.' + message="Could not determine access path for " + "resource_identifier outside of configured prefix: " + "{resource_identifier}.", ) return filesystem_path @@ -219,16 +213,15 @@ def get_filesystem_path(self, resource_identifier): @contextlib.contextmanager def error_handler(**kw): - '''Conform raised OSError/IOError exception to appropriate FTrack error.''' + """Conform raised OSError/IOError exception to appropriate FTrack error.""" try: yield except (OSError, IOError) as error: (exception_type, exception_value, traceback) = sys.exc_info() - kw.setdefault('error', error) - + kw.setdefault("error", error) - error_code = getattr(error, 'errno') + error_code = getattr(error, "errno") if not error_code: raise AccessorOperationFailedError(**kw) diff --git a/source/ftrack_api/accessor/server.py b/source/ftrack_api/accessor/server.py index 0fc5f43f..da6b0262 100644 --- a/source/ftrack_api/accessor/server.py +++ b/source/ftrack_api/accessor/server.py @@ -15,10 +15,10 @@ class ServerFile(String): - '''Representation of a server file.''' + """Representation of a server file.""" - def __init__(self, resource_identifier, session, mode='rb'): - '''Initialise file.''' + def __init__(self, resource_identifier, session, mode="rb"): + """Initialise file.""" self.mode = mode self.resource_identifier = resource_identifier self._session = session @@ -27,14 +27,14 @@ def __init__(self, resource_identifier, session, mode='rb'): super(ServerFile, self).__init__() def flush(self): - '''Flush all changes.''' + """Flush all changes.""" super(ServerFile, self).flush() - if self.mode == 'wb': + if self.mode == "wb": self._write() def read(self, limit=None): - '''Read file.''' + """Read file.""" if not self._has_read: self._read() self._has_read = True @@ -42,25 +42,25 @@ def read(self, limit=None): return super(ServerFile, self).read(limit) def _read(self): - '''Read all remote content from key into wrapped_file.''' + """Read all remote content from key into wrapped_file.""" position = self.tell() self.seek(0) response = requests.get( - '{0}/component/get'.format(self._session.server_url), + "{0}/component/get".format(self._session.server_url), params={ - 'id': self.resource_identifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key + "id": self.resource_identifier, + "username": self._session.api_user, + "apiKey": self._session.api_key, }, - stream=True + stream=True, ) try: response.raise_for_status() except requests.exceptions.HTTPError as error: raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to read data: {0}.'.format(error) + "Failed to read data: {0}.".format(error) ) for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): @@ -70,37 +70,34 @@ def _read(self): self.seek(position) def _write(self): - '''Write current data to remote key.''' + """Write current data to remote key.""" position = self.tell() self.seek(0) # Retrieve component from cache to construct a filename. - component = self._session.get('FileComponent', self.resource_identifier) + component = self._session.get("FileComponent", self.resource_identifier) if not component: raise ftrack_api.exception.AccessorOperationFailedError( - 'Unable to retrieve component with id: {0}.'.format( + "Unable to retrieve component with id: {0}.".format( self.resource_identifier ) ) # Construct a name from component name and file_type. - name = component['name'] - if component['file_type']: - name = u'{0}.{1}'.format( - name, - component['file_type'].lstrip('.') - ) + name = component["name"] + if component["file_type"]: + name = "{0}.{1}".format(name, component["file_type"].lstrip(".")) try: metadata = self._session.get_upload_metadata( component_id=self.resource_identifier, file_name=name, file_size=self._get_size(), - checksum=self._compute_checksum() + checksum=self._compute_checksum(), ) except Exception as error: raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to get put metadata: {0}.'.format(error) + "Failed to get put metadata: {0}.".format(error) ) # Ensure at beginning of file before put. @@ -108,22 +105,20 @@ def _write(self): # Put the file based on the metadata. response = requests.put( - metadata['url'], - data=self.wrapped_file, - headers=metadata['headers'] + metadata["url"], data=self.wrapped_file, headers=metadata["headers"] ) try: response.raise_for_status() except requests.exceptions.HTTPError as error: raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to put file to server: {0}.'.format(error) + "Failed to put file to server: {0}.".format(error) ) self.seek(position) def _get_size(self): - '''Return size of file in bytes.''' + """Return size of file in bytes.""" position = self.tell() self.seek(0, os.SEEK_END) length = self.tell() @@ -131,7 +126,7 @@ def _get_size(self): return length def _compute_checksum(self): - '''Return checksum for file.''' + """Return checksum for file.""" fp = self.wrapped_file buf_size = ftrack_api.symbol.CHUNK_SIZE hash_obj = hashlib.md5() @@ -142,8 +137,8 @@ def _compute_checksum(self): hash_obj.update(s) s = fp.read(buf_size) - base64_digest = base64.encodebytes(hash_obj.digest()).decode('utf-8') - if base64_digest[-1] == '\n': + base64_digest = base64.encodebytes(hash_obj.digest()).decode("utf-8") + if base64_digest[-1] == "\n": base64_digest = base64_digest[0:-1] fp.seek(spos) @@ -151,90 +146,88 @@ def _compute_checksum(self): class _ServerAccessor(Accessor): - '''Provide server location access.''' + """Provide server location access.""" def __init__(self, session, **kw): - '''Initialise location accessor.''' + """Initialise location accessor.""" super(_ServerAccessor, self).__init__(**kw) self._session = session - def open(self, resource_identifier, mode='rb'): - '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' + def open(self, resource_identifier, mode="rb"): + """Return :py:class:`~ftrack_api.Data` for *resource_identifier*.""" return ServerFile(resource_identifier, session=self._session, mode=mode) def remove(self, resourceIdentifier): - '''Remove *resourceIdentifier*.''' + """Remove *resourceIdentifier*.""" response = requests.get( - '{0}/component/remove'.format(self._session.server_url), + "{0}/component/remove".format(self._session.server_url), params={ - 'id': resourceIdentifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - } + "id": resourceIdentifier, + "username": self._session.api_user, + "apiKey": self._session.api_key, + }, ) if response.status_code != 200: raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to remove file.' + "Failed to remove file." ) def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*.''' + """Return resource_identifier of container for *resource_identifier*.""" return None def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*.''' + """Make a container at *resource_identifier*.""" def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container.''' + """Return list of entries in *resource_identifier* container.""" raise NotImplementedError() def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' + """Return if *resource_identifier* is valid and exists in location.""" return False def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' + """Return whether *resource_identifier* refers to a file.""" raise NotImplementedError() def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' + """Return whether *resource_identifier* refers to a container.""" raise NotImplementedError() def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' + """Return whether *resource_identifier* refers to a file sequence.""" raise NotImplementedError() def get_url(self, resource_identifier): - '''Return url for *resource_identifier*.''' + """Return url for *resource_identifier*.""" url_string = ( - u'{url}/component/get?id={id}&username={username}' - u'&apiKey={apiKey}' + "{url}/component/get?id={id}&username={username}" "&apiKey={apiKey}" ) return url_string.format( url=self._session.server_url, id=resource_identifier, username=self._session.api_user, - apiKey=self._session.api_key + apiKey=self._session.api_key, ) def get_thumbnail_url(self, resource_identifier, size=None): - '''Return thumbnail url for *resource_identifier*. + """Return thumbnail url for *resource_identifier*. Optionally, specify *size* to constrain the downscaled image to size x size pixels. - ''' + """ url_string = ( - u'{url}/component/thumbnail?id={id}&username={username}' - u'&apiKey={apiKey}' + "{url}/component/thumbnail?id={id}&username={username}" "&apiKey={apiKey}" ) url = url_string.format( url=self._session.server_url, id=resource_identifier, username=self._session.api_user, - apiKey=self._session.api_key + apiKey=self._session.api_key, ) if size: - url += u'&size={0}'.format(size) + url += "&size={0}".format(size) return url diff --git a/source/ftrack_api/attribute.py b/source/ftrack_api/attribute.py index c997d5f2..4a8fdbab 100644 --- a/source/ftrack_api/attribute.py +++ b/source/ftrack_api/attribute.py @@ -16,27 +16,23 @@ import ftrack_api.inspection import ftrack_api.operation -logger = logging.getLogger( - __name__ -) +logger = logging.getLogger(__name__) def merge_references(function): - '''Decorator to handle merging of references / collections.''' + """Decorator to handle merging of references / collections.""" @functools.wraps(function) def get_value(attribute, entity): - '''Merge the attribute with the local cache.''' + """Merge the attribute with the local cache.""" if attribute.name not in entity._inflated: # Only merge on first access to avoid # inflating them multiple times. logger.debug( - 'Merging potential new data into attached ' - 'entity for attribute {0}.'.format( - attribute.name - ) + "Merging potential new data into attached " + "entity for attribute {0}.".format(attribute.name) ) # Local attributes. @@ -46,16 +42,12 @@ def get_value(attribute, entity): ( ftrack_api.entity.base.Entity, ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) + ftrack_api.collection.MappedCollectionProxy, + ), ): - logger.debug( - 'Merging local value for attribute {0}.'.format(attribute) - ) + logger.debug("Merging local value for attribute {0}.".format(attribute)) - merged_local_value = entity.session._merge( - local_value, merged=dict() - ) + merged_local_value = entity.session._merge(local_value, merged=dict()) if merged_local_value is not local_value: with entity.session.operation_recording(False): @@ -68,33 +60,27 @@ def get_value(attribute, entity): ( ftrack_api.entity.base.Entity, ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) + ftrack_api.collection.MappedCollectionProxy, + ), ): logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) + "Merging remote value for attribute {0}.".format(attribute) ) - merged_remote_value = entity.session._merge( - remote_value, merged=dict() - ) + merged_remote_value = entity.session._merge(remote_value, merged=dict()) if merged_remote_value is not remote_value: attribute.set_remote_value(entity, merged_remote_value) - entity._inflated.add( - attribute.name - ) + entity._inflated.add(attribute.name) - return function( - attribute, entity - ) + return function(attribute, entity) return get_value class Attributes(object): - '''Collection of properties accessible by name.''' + """Collection of properties accessible by name.""" def __init__(self, attributes=None): super(Attributes, self).__init__() @@ -104,56 +90,60 @@ def __init__(self, attributes=None): self.add(attribute) def add(self, attribute): - '''Add *attribute*.''' + """Add *attribute*.""" existing = self._data.get(attribute.name, None) if existing: raise ftrack_api.exception.NotUniqueError( - 'Attribute with name {0} already added as {1}' - .format(attribute.name, existing) + "Attribute with name {0} already added as {1}".format( + attribute.name, existing + ) ) self._data[attribute.name] = attribute def remove(self, attribute): - '''Remove attribute.''' + """Remove attribute.""" self._data.pop(attribute.name) def get(self, name): - '''Return attribute by *name*. + """Return attribute by *name*. If no attribute matches *name* then return None. - ''' + """ return self._data.get(name, None) def keys(self): - '''Return list of attribute names.''' + """Return list of attribute names.""" return list(self._data.keys()) def __contains__(self, item): - '''Return whether *item* present.''' + """Return whether *item* present.""" if not isinstance(item, Attribute): return False return item.name in self._data def __iter__(self): - '''Return iterator over attributes.''' + """Return iterator over attributes.""" return iter(self._data.values()) def __len__(self): - '''Return count of attributes.''' + """Return count of attributes.""" return len(self._data) class Attribute(object): - '''A name and value pair persisted remotely.''' + """A name and value pair persisted remotely.""" def __init__( - self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True, - computed=False + self, + name, + default_value=ftrack_api.symbol.NOT_SET, + mutable=True, + computed=False, ): - '''Initialise attribute with *name*. + """Initialise attribute with *name*. *default_value* represents the default value for the attribute. It may be a callable. It is not used within the attribute when providing @@ -167,35 +157,31 @@ def __init__( If *computed* is set to True the value is a remote side computed value and should not be long-term cached. - ''' + """ super(Attribute, self).__init__() self._name = name self._mutable = mutable self._computed = computed self.default_value = default_value - self._local_key = 'local' - self._remote_key = 'remote' + self._local_key = "local" + self._remote_key = "remote" def __repr__(self): - '''Return representation of entity.''' - return '<{0}.{1}({2}) object at {3}>'.format( - self.__module__, - self.__class__.__name__, - self.name, - id(self) + """Return representation of entity.""" + return "<{0}.{1}({2}) object at {3}>".format( + self.__module__, self.__class__.__name__, self.name, id(self) ) def get_entity_storage(self, entity): - '''Return attribute storage on *entity* creating if missing.''' - storage_key = '_ftrack_attribute_storage' + """Return attribute storage on *entity* creating if missing.""" + storage_key = "_ftrack_attribute_storage" storage = getattr(entity, storage_key, None) if storage is None: storage = collections.defaultdict( - lambda: - { + lambda: { self._local_key: ftrack_api.symbol.NOT_SET, - self._remote_key: ftrack_api.symbol.NOT_SET + self._remote_key: ftrack_api.symbol.NOT_SET, } ) setattr(entity, storage_key, storage) @@ -204,27 +190,27 @@ def get_entity_storage(self, entity): @property def name(self): - '''Return name.''' + """Return name.""" return self._name @property def mutable(self): - '''Return whether attribute is mutable.''' + """Return whether attribute is mutable.""" return self._mutable @property def computed(self): - '''Return whether attribute is computed.''' + """Return whether attribute is computed.""" return self._computed def get_value(self, entity): - '''Return current value for *entity*. + """Return current value for *entity*. If a value was set locally then return it, otherwise return last known remote value. If no remote value yet retrieved, make a request for it via the session and block until available. - ''' + """ value = self.get_local_value(entity) if value is not ftrack_api.symbol.NOT_SET: return value @@ -240,23 +226,23 @@ def get_value(self, entity): return self.get_remote_value(entity) def get_local_value(self, entity): - '''Return locally set value for *entity*.''' + """Return locally set value for *entity*.""" storage = self.get_entity_storage(entity) return storage[self.name][self._local_key] def get_remote_value(self, entity): - '''Return remote value for *entity*. + """Return remote value for *entity*. .. note:: Only return locally stored remote value, do not fetch from remote. - ''' + """ storage = self.get_entity_storage(entity) return storage[self.name][self._remote_key] def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' + """Set local *value* for *entity*.""" if ( not self.mutable and self.is_set(entity) @@ -277,94 +263,95 @@ def set_local_value(self, entity, value): ftrack_api.inspection.primary_key(entity), self.name, old_value, - value + value, ) ) def set_remote_value(self, entity, value): - '''Set remote *value*. + """Set remote *value*. .. note:: Only set locally stored remote value, do not persist to remote. - ''' + """ storage = self.get_entity_storage(entity) storage[self.name][self._remote_key] = value def populate_remote_value(self, entity): - '''Populate remote value for *entity*.''' + """Populate remote value for *entity*.""" entity.session.populate([entity], self.name) def is_modified(self, entity): - '''Return whether local value set and differs from remote. + """Return whether local value set and differs from remote. .. note:: Will not fetch remote value so may report True even when values are the same on the remote. - ''' + """ local_value = self.get_local_value(entity) remote_value = self.get_remote_value(entity) return ( - local_value is not ftrack_api.symbol.NOT_SET - and local_value != remote_value + local_value is not ftrack_api.symbol.NOT_SET and local_value != remote_value ) def is_set(self, entity): - '''Return whether a value is set for *entity*.''' - return any([ - self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, - self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET - ]) + """Return whether a value is set for *entity*.""" + return any( + [ + self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, + self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET, + ] + ) class ScalarAttribute(Attribute): - '''Represent a scalar value.''' + """Represent a scalar value.""" def __init__(self, name, data_type, **kw): - '''Initialise property.''' + """Initialise property.""" super(ScalarAttribute, self).__init__(name, **kw) self.data_type = data_type class ReferenceAttribute(Attribute): - '''Reference another entity.''' + """Reference another entity.""" def __init__(self, name, entity_type, **kw): - '''Initialise property.''' + """Initialise property.""" super(ReferenceAttribute, self).__init__(name, **kw) self.entity_type = entity_type def populate_remote_value(self, entity): - '''Populate remote value for *entity*. + """Populate remote value for *entity*. As attribute references another entity, use that entity's configured default projections to auto populate useful attributes when loading. - ''' + """ reference_entity_type = entity.session.types[self.entity_type] default_projections = reference_entity_type.default_projections projections = [] if default_projections: for projection in default_projections: - projections.append('{0}.{1}'.format(self.name, projection)) + projections.append("{0}.{1}".format(self.name, projection)) else: projections.append(self.name) - entity.session.populate([entity], ', '.join(projections)) + entity.session.populate([entity], ", ".join(projections)) def is_modified(self, entity): - '''Return whether a local value has been set and differs from remote. + """Return whether a local value has been set and differs from remote. .. note:: Will not fetch remote value so may report True even when values are the same on the remote. - ''' + """ local_value = self.get_local_value(entity) remote_value = self.get_remote_value(entity) @@ -374,30 +361,27 @@ def is_modified(self, entity): if remote_value is ftrack_api.symbol.NOT_SET: return True - if ( - ftrack_api.inspection.identity(local_value) - != ftrack_api.inspection.identity(remote_value) - ): + if ftrack_api.inspection.identity( + local_value + ) != ftrack_api.inspection.identity(remote_value): return True return False - @merge_references def get_value(self, entity): - return super(ReferenceAttribute, self).get_value( - entity - ) + return super(ReferenceAttribute, self).get_value(entity) + class AbstractCollectionAttribute(Attribute): - '''Base class for collection attributes.''' + """Base class for collection attributes.""" #: Collection class used by attribute. collection_class = None @merge_references def get_value(self, entity): - '''Return current value for *entity*. + """Return current value for *entity*. If a value was set locally then return it, otherwise return last known remote value. If no remote value yet retrieved, make a request for it @@ -409,16 +393,15 @@ def get_value(self, entity): value into the local value on access if no local value currently set. - ''' + """ super(AbstractCollectionAttribute, self).get_value(entity) # Conditionally, copy remote value into local value so that it can be # mutated without side effects. local_value = self.get_local_value(entity) remote_value = self.get_remote_value(entity) - if ( - local_value is ftrack_api.symbol.NOT_SET - and isinstance(remote_value, self.collection_class) + if local_value is ftrack_api.symbol.NOT_SET and isinstance( + remote_value, self.collection_class ): try: with entity.session.operation_recording(False): @@ -440,7 +423,7 @@ def get_value(self, entity): self.set_local_value( entity, # None should be treated as empty collection. - None + None, ) except ftrack_api.exception.ImmutableAttributeError: pass @@ -448,7 +431,7 @@ def get_value(self, entity): return self.get_local_value(entity) def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' + """Set local *value* for *entity*.""" if value is not ftrack_api.symbol.NOT_SET: value = self._adapt_to_collection(entity, value) value.mutable = self.mutable @@ -456,13 +439,13 @@ def set_local_value(self, entity, value): super(AbstractCollectionAttribute, self).set_local_value(entity, value) def set_remote_value(self, entity, value): - '''Set remote *value*. + """Set remote *value*. .. note:: Only set locally stored remote value, do not persist to remote. - ''' + """ if value is not ftrack_api.symbol.NOT_SET: value = self._adapt_to_collection(entity, value) value.mutable = False @@ -470,59 +453,54 @@ def set_remote_value(self, entity, value): super(AbstractCollectionAttribute, self).set_remote_value(entity, value) def _adapt_to_collection(self, entity, value): - '''Adapt *value* to appropriate collection instance for *entity*. + """Adapt *value* to appropriate collection instance for *entity*. .. note:: If *value* is None then return a suitable empty collection. - ''' + """ raise NotImplementedError() class CollectionAttribute(AbstractCollectionAttribute): - '''Represent a collection of other entities.''' + """Represent a collection of other entities.""" #: Collection class used by attribute. collection_class = ftrack_api.collection.Collection def _adapt_to_collection(self, entity, value): - '''Adapt *value* to a Collection instance on *entity*.''' + """Adapt *value* to a Collection instance on *entity*.""" if not isinstance(value, ftrack_api.collection.Collection): - if value is None: value = ftrack_api.collection.Collection(entity, self) elif isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) + value = ftrack_api.collection.Collection(entity, self, data=value) else: raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) + "Cannot convert {0!r} to collection.".format(value) ) else: if value.attribute is not self: raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute' + "Collection already bound to a different attribute" ) return value class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped key, value collection of entities.''' + """Represent a mapped key, value collection of entities.""" #: Collection class used by attribute. collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy - def __init__( - self, name, creator, key_attribute, value_attribute, **kw - ): - '''Initialise attribute with *name*. + def __init__(self, name, creator, key_attribute, value_attribute, **kw): + """Initialise attribute with *name*. *creator* should be a function that accepts a dictionary of data and is used by the referenced collection to create new entities in the @@ -534,7 +512,7 @@ def __init__( *value_attribute* should be the name of the attribute on an entity in the collection that represents the value for 'value' of the dictionary. - ''' + """ self.creator = creator self.key_attribute = key_attribute self.value_attribute = value_attribute @@ -542,28 +520,22 @@ def __init__( super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.KeyValueMappedCollectionProxy - ): - + """Adapt *value* to an *entity*.""" + if not isinstance(value, ftrack_api.collection.KeyValueMappedCollectionProxy): if value is None: value = ftrack_api.collection.KeyValueMappedCollectionProxy( ftrack_api.collection.Collection(entity, self), - self.creator, self.key_attribute, - self.value_attribute + self.creator, + self.key_attribute, + self.value_attribute, ) elif isinstance(value, (list, ftrack_api.collection.Collection)): - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) + value = ftrack_api.collection.Collection(entity, self, data=value) value = ftrack_api.collection.KeyValueMappedCollectionProxy( - value, self.creator, self.key_attribute, - self.value_attribute + value, self.creator, self.key_attribute, self.value_attribute ) elif isinstance(value, collections_abc.Mapping): @@ -576,12 +548,11 @@ def _adapt_to_collection(self, entity, value): # values should be mapped collections already. current_value = self.get_value(entity) if not isinstance( - current_value, - ftrack_api.collection.KeyValueMappedCollectionProxy + current_value, ftrack_api.collection.KeyValueMappedCollectionProxy ): raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a KeyValueMappedCollectionProxy.' + "Cannot adapt mapping to collection as current value " + "type is not a KeyValueMappedCollectionProxy." ) # Create the new collection using the existing collection as @@ -591,11 +562,8 @@ def _adapt_to_collection(self, entity, value): collection = ftrack_api.collection.Collection( entity, self, data=current_value.collection[:] ) - collection_proxy = ( - ftrack_api.collection.KeyValueMappedCollectionProxy( - collection, self.creator, - self.key_attribute, self.value_attribute - ) + collection_proxy = ftrack_api.collection.KeyValueMappedCollectionProxy( + collection, self.creator, self.key_attribute, self.value_attribute ) # Remove expired keys from collection. @@ -611,48 +579,38 @@ def _adapt_to_collection(self, entity, value): else: raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) + "Cannot convert {0!r} to collection.".format(value) ) else: if value.attribute is not self: raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' + "Collection already bound to a different attribute." ) return value class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped custom attribute collection of entities.''' + """Represent a mapped custom attribute collection of entities.""" #: Collection class used by attribute. - collection_class = ( - ftrack_api.collection.CustomAttributeCollectionProxy - ) + collection_class = ftrack_api.collection.CustomAttributeCollectionProxy def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.CustomAttributeCollectionProxy - ): - + """Adapt *value* to an *entity*.""" + if not isinstance(value, ftrack_api.collection.CustomAttributeCollectionProxy): if value is None: value = ftrack_api.collection.CustomAttributeCollectionProxy( ftrack_api.collection.Collection(entity, self) ) elif isinstance(value, (list, ftrack_api.collection.Collection)): - # Why are we creating a new if it is a list? This will cause # any merge to create a new proxy and collection. if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) + value = ftrack_api.collection.Collection(entity, self, data=value) - value = ftrack_api.collection.CustomAttributeCollectionProxy( - value - ) + value = ftrack_api.collection.CustomAttributeCollectionProxy(value) elif isinstance(value, collections_abc.Mapping): # Convert mapping. @@ -664,12 +622,11 @@ def _adapt_to_collection(self, entity, value): # values should be mapped collections already. current_value = self.get_value(entity) if not isinstance( - current_value, - ftrack_api.collection.CustomAttributeCollectionProxy + current_value, ftrack_api.collection.CustomAttributeCollectionProxy ): raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a MappedCollectionProxy.' + "Cannot adapt mapping to collection as current value " + "type is not a MappedCollectionProxy." ) # Create the new collection using the existing collection as @@ -679,10 +636,8 @@ def _adapt_to_collection(self, entity, value): collection = ftrack_api.collection.Collection( entity, self, data=current_value.collection[:] ) - collection_proxy = ( - ftrack_api.collection.CustomAttributeCollectionProxy( - collection - ) + collection_proxy = ftrack_api.collection.CustomAttributeCollectionProxy( + collection ) # Remove expired keys from collection. @@ -698,12 +653,12 @@ def _adapt_to_collection(self, entity, value): else: raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) + "Cannot convert {0!r} to collection.".format(value) ) else: if value.attribute is not self: raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' + "Collection already bound to a different attribute." ) return value diff --git a/source/ftrack_api/cache.py b/source/ftrack_api/cache.py index ed74d68b..2ea7ebe4 100644 --- a/source/ftrack_api/cache.py +++ b/source/ftrack_api/cache.py @@ -1,7 +1,7 @@ # :coding: utf-8 # :copyright: Copyright (c) 2014 ftrack -'''Caching framework. +"""Caching framework. Defines a standardised :class:`Cache` interface for storing data against specific keys. Key generation is also standardised using a :class:`KeyMaker` @@ -13,7 +13,7 @@ As a convenience a simple :func:`memoise` decorator is included for quick memoisation of function using a global cache and standard key maker. -''' +""" from builtins import str from six import string_types @@ -52,46 +52,46 @@ class Cache(with_metaclass(abc.ABCMeta, object)): - '''Cache interface. + """Cache interface. Derive from this to define concrete cache implementations. A cache is centered around the concept of key:value pairings where the key is unique across the cache. - ''' + """ @abc.abstractmethod def get(self, key): - '''Return value for *key*. + """Return value for *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ @abc.abstractmethod def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" @abc.abstractmethod def remove(self, key): - '''Remove *key* and return stored value. + """Remove *key* and return stored value. Raise :exc:`KeyError` if *key* not found. - ''' + """ def keys(self): - '''Return list of keys at this current time. + """Return list of keys at this current time. .. warning:: Actual keys may differ from those returned due to timing of access. - ''' + """ raise NotImplementedError() # pragma: no cover def values(self): - '''Return values for current keys.''' + """Return values for current keys.""" values = [] for key in list(self.keys()): try: @@ -104,13 +104,13 @@ def values(self): return values def clear(self, pattern=None): - '''Remove all keys matching *pattern*. + """Remove all keys matching *pattern*. *pattern* should be a regular expression string. If *pattern* is None then all keys will be removed. - ''' + """ if pattern is not None: pattern = re.compile(pattern) @@ -127,54 +127,54 @@ def clear(self, pattern=None): class ProxyCache(Cache): - '''Proxy another cache.''' + """Proxy another cache.""" def __init__(self, proxied): - '''Initialise cache with *proxied* cache instance.''' + """Initialise cache with *proxied* cache instance.""" self.proxied = proxied super(ProxyCache, self).__init__() def get(self, key): - '''Return value for *key*. + """Return value for *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ return self.proxied.get(key) def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" return self.proxied.set(key, value) def remove(self, key): - '''Remove *key* and return stored value. + """Remove *key* and return stored value. Raise :exc:`KeyError` if *key* not found. - ''' + """ return self.proxied.remove(key) def keys(self): - '''Return list of keys at this current time. + """Return list of keys at this current time. .. warning:: Actual keys may differ from those returned due to timing of access. - ''' + """ return list(self.proxied.keys()) class LayeredCache(Cache): - '''Layered cache.''' + """Layered cache.""" def __init__(self, caches): - '''Initialise cache with *caches*.''' + """Initialise cache with *caches*.""" super(LayeredCache, self).__init__() self.caches = caches def get(self, key): - '''Return value for *key*. + """Return value for *key*. Raise :exc:`KeyError` if *key* not found. @@ -182,7 +182,7 @@ def get(self, key): If value retrieved, then also set the value in each higher level cache up from where retrieved. - ''' + """ target_caches = [] value = ftrack_api.symbol.NOT_SET @@ -205,16 +205,16 @@ def get(self, key): return value def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" for cache in self.caches: cache.set(key, value) def remove(self, key): - '''Remove *key*. + """Remove *key*. Raise :exc:`KeyError` if *key* not found in any layer. - ''' + """ removed = False for cache in self.caches: try: @@ -228,13 +228,13 @@ def remove(self, key): raise KeyError(key) def keys(self): - '''Return list of keys at this current time. + """Return list of keys at this current time. .. warning:: Actual keys may differ from those returned due to timing of access. - ''' + """ keys = [] for cache in self.caches: keys.extend(list(cache.keys())) @@ -243,127 +243,127 @@ def keys(self): class MemoryCache(Cache): - '''Memory based cache.''' + """Memory based cache.""" def __init__(self): - '''Initialise cache.''' + """Initialise cache.""" self._cache = {} super(MemoryCache, self).__init__() def get(self, key): - '''Return value for *key*. + """Return value for *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ return self._cache[key] def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" self._cache[key] = value def remove(self, key): - '''Remove *key*. + """Remove *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ del self._cache[key] def keys(self): - '''Return list of keys at this current time. + """Return list of keys at this current time. .. warning:: Actual keys may differ from those returned due to timing of access. - ''' + """ return list(self._cache.keys()) class FileCache(Cache): - '''File based cache that uses :mod:`anydbm` module. + """File based cache that uses :mod:`anydbm` module. .. note:: No locking of the underlying file is performed. - ''' + """ def __init__(self, path): - '''Initialise cache at *path*.''' + """Initialise cache at *path*.""" self.path = path # Initialise cache. - cache = anydbm.open(self.path, 'c') + cache = anydbm.open(self.path, "c") cache.close() super(FileCache, self).__init__() @contextlib.contextmanager def _database(self): - '''Yield opened database file.''' - cache = anydbm.open(self.path, 'w') + """Yield opened database file.""" + cache = anydbm.open(self.path, "w") try: yield cache finally: cache.close() def get(self, key): - '''Return value for *key*. + """Return value for *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ with self._database() as cache: - return cache[key.encode('ascii')].decode('utf-8') + return cache[key.encode("ascii")].decode("utf-8") def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" with self._database() as cache: - cache[key.encode('ascii')] = value + cache[key.encode("ascii")] = value def remove(self, key): - '''Remove *key*. + """Remove *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ with self._database() as cache: - del cache[key.encode('ascii')] + del cache[key.encode("ascii")] def keys(self): - '''Return list of keys at this current time. + """Return list of keys at this current time. .. warning:: Actual keys may differ from those returned due to timing of access. - ''' + """ with self._database() as cache: - return [s.decode('utf-8') for s in cache.keys()] - #return list(map(str, cache.keys())) + return [s.decode("utf-8") for s in cache.keys()] + # return list(map(str, cache.keys())) class SerialisedCache(ProxyCache): - '''Proxied cache that stores values as serialised data.''' + """Proxied cache that stores values as serialised data.""" def __init__(self, proxied, encode=None, decode=None): - '''Initialise cache with *encode* and *decode* callables. + """Initialise cache with *encode* and *decode* callables. *proxied* is the underlying cache to use for storage. - ''' + """ self.encode = encode self.decode = decode super(SerialisedCache, self).__init__(proxied) def get(self, key): - '''Return value for *key*. + """Return value for *key*. Raise :exc:`KeyError` if *key* not found. - ''' + """ value = super(SerialisedCache, self).get(key) if self.decode: value = self.decode(value) @@ -371,7 +371,7 @@ def get(self, key): return value def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" if self.encode: value = self.encode(value) @@ -379,15 +379,15 @@ def set(self, key, value): class KeyMaker(with_metaclass(abc.ABCMeta, object)): - '''Generate unique keys.''' + """Generate unique keys.""" def __init__(self): - '''Initialise key maker.''' + """Initialise key maker.""" super(KeyMaker, self).__init__() - self.item_separator = '' + self.item_separator = "" def key(self, *items): - '''Return key for *items*.''' + """Return key for *items*.""" keys = [] for item in items: keys.append(self._key(item)) @@ -396,34 +396,34 @@ def key(self, *items): @abc.abstractmethod def _key(self, obj): - '''Return key for *obj*.''' + """Return key for *obj*.""" class StringKeyMaker(KeyMaker): - '''Generate string key.''' + """Generate string key.""" def _key(self, obj): - '''Return key for *obj*.''' + """Return key for *obj*.""" return str(obj) class ObjectKeyMaker(KeyMaker): - '''Generate unique keys for objects.''' + """Generate unique keys for objects.""" def __init__(self): - '''Initialise key maker.''' + """Initialise key maker.""" super(ObjectKeyMaker, self).__init__() - self.item_separator = b'\0' - self.mapping_identifier = b'\1' - self.mapping_pair_separator = b'\2' - self.iterable_identifier = b'\3' - self.name_identifier = b'\4' + self.item_separator = b"\0" + self.mapping_identifier = b"\1" + self.mapping_pair_separator = b"\2" + self.iterable_identifier = b"\3" + self.name_identifier = b"\4" def _key(self, item): return self.__key(item) def __key(self, item): - '''Return key for *item*. + """Return key for *item*. Returned key will be a pickle like string representing the *item*. This allows for typically non-hashable objects to be used in key generation @@ -447,7 +447,7 @@ def __key(self, item): >>> key_maker.key(add, (1, 3)) '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' - ''' + """ # Ensure p3k uses a protocol available in py2 so can decode it. pickle_protocol = 2 @@ -455,53 +455,47 @@ def __key(self, item): # TODO: Consider using a more robust and comprehensive solution such as # dill (https://github.com/uqfoundation/dill). if isinstance(item, collections_abc.Iterable): - if isinstance(item, string_types): return pickle.dumps(item, pickle_protocol) if isinstance(item, collections_abc.Mapping): - contents = self.item_separator.join([ - ( - self._key(key) + - self.mapping_pair_separator + - self._key(value) - ) - for key, value in sorted(item.items()) - ]) - - return ( - self.mapping_identifier + - contents + - self.mapping_identifier + contents = self.item_separator.join( + [ + ( + self._key(key) + + self.mapping_pair_separator + + self._key(value) + ) + for key, value in sorted(item.items()) + ] ) + + return self.mapping_identifier + contents + self.mapping_identifier else: - contents = self.item_separator.join([ - self._key(item) for item in item - ]) - return ( - self.iterable_identifier + - contents + - self.iterable_identifier - ) + contents = self.item_separator.join([self._key(item) for item in item]) + return self.iterable_identifier + contents + self.iterable_identifier elif inspect.ismethod(item): - - return b''.join(( - self.name_identifier, - item.__name__.encode(), - self.item_separator, - item.__self__.__class__.__name__.encode(), - self.item_separator, - item.__module__.encode() - )) + return b"".join( + ( + self.name_identifier, + item.__name__.encode(), + self.item_separator, + item.__self__.__class__.__name__.encode(), + self.item_separator, + item.__module__.encode(), + ) + ) elif inspect.isfunction(item) or inspect.isclass(item): - return b''.join(( - self.name_identifier, - item.__name__.encode(), - self.item_separator, - item.__module__.encode() - )) + return b"".join( + ( + self.name_identifier, + item.__name__.encode(), + self.item_separator, + item.__module__.encode(), + ) + ) elif inspect.isbuiltin(item): return self.name_identifier + item.__name__.encode() @@ -511,7 +505,7 @@ def __key(self, item): class Memoiser(object): - '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. + """Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. Example:: @@ -527,10 +521,10 @@ class Memoiser(object): >>> memoiser.call(add, (1, 3), {}) Called - ''' + """ def __init__(self, cache=None, key_maker=None, return_copies=True): - '''Initialise with *cache* and *key_maker* to use. + """Initialise with *cache* and *key_maker* to use. If *cache* is not specified a default :class:`MemoryCache` will be used. Similarly, if *key_maker* is not specified a default @@ -539,7 +533,7 @@ def __init__(self, cache=None, key_maker=None, return_copies=True): If *return_copies* is True then all results returned from the cache will be deep copies to avoid indirect mutation of cached values. - ''' + """ self.cache = cache if self.cache is None: self.cache = MemoryCache() @@ -552,14 +546,14 @@ def __init__(self, cache=None, key_maker=None, return_copies=True): super(Memoiser, self).__init__() def call(self, function, args=None, kw=None): - '''Call *function* with *args* and *kw* and return result. + """Call *function* with *args* and *kw* and return result. If *function* was previously called with exactly the same arguments then return cached result if available. Store result for call in cache. - ''' + """ if args is None: args = () @@ -586,9 +580,9 @@ def call(self, function, args=None, kw=None): def memoise_decorator(memoiser): - '''Decorator to memoise function calls using *memoiser*.''' - def outer(function): + """Decorator to memoise function calls using *memoiser*.""" + def outer(function): @functools.wraps(function) def inner(*args, **kw): return memoiser.call(function, args, kw) diff --git a/source/ftrack_api/collection.py b/source/ftrack_api/collection.py index a27bffb6..1e77f906 100644 --- a/source/ftrack_api/collection.py +++ b/source/ftrack_api/collection.py @@ -20,10 +20,10 @@ class Collection(collections_abc.MutableSequence): - '''A collection of entities.''' + """A collection of entities.""" def __init__(self, entity, attribute, mutable=True, data=None): - '''Initialise collection.''' + """Initialise collection.""" self.entity = entity self.attribute = attribute self._data = [] @@ -45,18 +45,18 @@ def __init__(self, entity, attribute, mutable=True, data=None): self.mutable = mutable def _identity_key(self, entity): - '''Return identity key for *entity*.''' + """Return identity key for *entity*.""" return str(ftrack_api.inspection.identity(entity)) def __copy__(self): - '''Return shallow copy. + """Return shallow copy. .. note:: To maintain expectations on usage, the shallow copy will include a shallow copy of the underlying data store. - ''' + """ cls = self.__class__ copied_instance = cls.__new__(cls) copied_instance.__dict__.update(self.__dict__) @@ -66,7 +66,7 @@ def __copy__(self): return copied_instance def _notify(self, old_value): - '''Notify about modification.''' + """Notify about modification.""" # Record operation. if self.entity.session.record_operations: self.entity.session.recorded_operations.push( @@ -75,19 +75,17 @@ def _notify(self, old_value): ftrack_api.inspection.primary_key(self.entity), self.attribute.name, old_value, - self + self, ) ) def insert(self, index, item): - '''Insert *item* at *index*.''' + """Insert *item* at *index*.""" if not self.mutable: raise ftrack_api.exception.ImmutableCollectionError(self) if item in self: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) + raise ftrack_api.exception.DuplicateItemInCollectionError(item, self) old_value = copy.copy(self) self._data.insert(index, item) @@ -95,15 +93,15 @@ def insert(self, index, item): self._notify(old_value) def __contains__(self, value): - '''Return whether *value* present in collection.''' + """Return whether *value* present in collection.""" return self._identity_key(value) in self._identities def __getitem__(self, index): - '''Return item at *index*.''' + """Return item at *index*.""" return self._data[index] def __setitem__(self, index, item): - '''Set *item* against *index*.''' + """Set *item* against *index*.""" if not self.mutable: raise ftrack_api.exception.ImmutableCollectionError(self) @@ -113,9 +111,7 @@ def __setitem__(self, index, item): pass else: if index != existing_index: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) + raise ftrack_api.exception.DuplicateItemInCollectionError(item, self) old_value = copy.copy(self) try: @@ -130,7 +126,7 @@ def __setitem__(self, index, item): self._notify(old_value) def __delitem__(self, index): - '''Remove item at *index*.''' + """Remove item at *index*.""" if not self.mutable: raise ftrack_api.exception.ImmutableCollectionError(self) @@ -141,41 +137,39 @@ def __delitem__(self, index): self._notify(old_value) def __len__(self): - '''Return count of items.''' + """Return count of items.""" return len(self._data) def __eq__(self, other): - '''Return whether this collection is equal to *other*.''' + """Return whether this collection is equal to *other*.""" if not isinstance(other, Collection): return False return sorted(self._identities) == sorted(other._identities) def __ne__(self, other): - '''Return whether this collection is not equal to *other*.''' + """Return whether this collection is not equal to *other*.""" return not self == other class MappedCollectionProxy(collections_abc.MutableMapping): - '''Common base class for mapped collection of entities.''' + """Common base class for mapped collection of entities.""" def __init__(self, collection): - '''Initialise proxy for *collection*.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + """Initialise proxy for *collection*.""" + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) self.collection = collection super(MappedCollectionProxy, self).__init__() def __copy__(self): - '''Return shallow copy. + """Return shallow copy. .. note:: To maintain expectations on usage, the shallow copy will include a shallow copy of the underlying collection. - ''' + """ cls = self.__class__ copied_instance = cls.__new__(cls) copied_instance.__dict__.update(self.__dict__) @@ -185,27 +179,27 @@ def __copy__(self): @property def mutable(self): - '''Return whether collection is mutable.''' + """Return whether collection is mutable.""" return self.collection.mutable @mutable.setter def mutable(self, value): - '''Set whether collection is mutable to *value*.''' + """Set whether collection is mutable to *value*.""" self.collection.mutable = value @property def attribute(self): - '''Return attribute bound to.''' + """Return attribute bound to.""" return self.collection.attribute @attribute.setter def attribute(self, value): - '''Set bound attribute to *value*.''' + """Set bound attribute to *value*.""" self.collection.attribute = value class KeyValueMappedCollectionProxy(MappedCollectionProxy): - '''A mapped collection of key, value entities. + """A mapped collection of key, value entities. Proxy a standard :class:`Collection` as a mapping where certain attributes from the entities in the collection are mapped to key, value pairs. @@ -223,19 +217,17 @@ class KeyValueMappedCollectionProxy(MappedCollectionProxy): >>> print mapped.collection[-1] Metadata(key='bam', value='biz') - ''' + """ - def __init__( - self, collection, creator, key_attribute, value_attribute - ): - '''Initialise collection.''' + def __init__(self, collection, creator, key_attribute, value_attribute): + """Initialise collection.""" self.creator = creator self.key_attribute = key_attribute self.value_attribute = value_attribute super(KeyValueMappedCollectionProxy, self).__init__(collection) def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' + """Return entity instance with matching *key* from collection.""" for entity in self.collection: if entity[self.key_attribute] == key: return entity @@ -243,25 +235,19 @@ def _get_entity_by_key(self, key): raise KeyError(key) def __getitem__(self, key): - '''Return value for *key*.''' + """Return value for *key*.""" entity = self._get_entity_by_key(key) return entity[self.value_attribute] def __setitem__(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" try: entity = self._get_entity_by_key(key) except KeyError: - data = { - self.key_attribute: key, - self.value_attribute: value - } + data = {self.key_attribute: key, self.value_attribute: value} entity = self.creator(self, data) - if ( - ftrack_api.inspection.state(entity) is - ftrack_api.symbol.CREATED - ): + if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: # Persisting this entity will be handled here, record the # operation. self.collection.append(entity) @@ -278,13 +264,13 @@ def __setitem__(self, key, value): entity[self.value_attribute] = value def __delitem__(self, key): - '''Remove and delete *key*. + """Remove and delete *key*. .. note:: The associated entity will be deleted as well. - ''' + """ for index, entity in enumerate(self.collection): if entity[self.key_attribute] == key: break @@ -295,7 +281,7 @@ def __delitem__(self, key): entity.session.delete(entity) def __iter__(self): - '''Iterate over all keys.''' + """Iterate over all keys.""" keys = set() for entity in self.collection: keys.add(entity[self.key_attribute]) @@ -303,7 +289,7 @@ def __iter__(self): return iter(keys) def __len__(self): - '''Return count of keys.''' + """Return count of keys.""" keys = set() for entity in self.collection: keys.add(entity[self.key_attribute]) @@ -312,18 +298,16 @@ def __len__(self): def keys(self): # COMPAT for unit tests.. - return list(super( - KeyValueMappedCollectionProxy, self - ).keys()) + return list(super(KeyValueMappedCollectionProxy, self).keys()) class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for session.''' + """Generate key for session.""" def _key(self, obj): - '''Return key for *obj*.''' + """Return key for *obj*.""" if isinstance(obj, dict): - session = obj.get('session') + session = obj.get("session") if session is not None: # Key by session only. return str(id(session)) @@ -341,89 +325,81 @@ def _key(self, obj): @memoise_session def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. + """Return list of custom attribute configurations. The configuration objects will have key, project_id, id and object_type_id populated. - ''' + """ return session.query( - 'select key, project_id, id, object_type_id, entity_type from ' - 'CustomAttributeConfiguration' + "select key, project_id, id, object_type_id, entity_type from " + "CustomAttributeConfiguration" ).all() class CustomAttributeCollectionProxy(MappedCollectionProxy): - '''A mapped collection of custom attribute value entities.''' - - def __init__( - self, collection - ): - '''Initialise collection.''' - self.key_attribute = 'configuration_id' - self.value_attribute = 'value' + """A mapped collection of custom attribute value entities.""" + + def __init__(self, collection): + """Initialise collection.""" + self.key_attribute = "configuration_id" + self.value_attribute = "value" super(CustomAttributeCollectionProxy, self).__init__(collection) def _get_entity_configurations(self): - '''Return all configurations for current collection entity.''' + """Return all configurations for current collection entity.""" entity = self.collection.entity entity_type = None project_id = None object_type_id = None - if 'object_type_id' in list(entity.keys()): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] + if "object_type_id" in list(entity.keys()): + project_id = entity["project_id"] + entity_type = "task" + object_type_id = entity["object_type_id"] - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' + if entity.entity_type == "AssetVersion": + project_id = entity["asset"]["parent"]["project_id"] + entity_type = "assetversion" - if entity.entity_type == 'Asset': - project_id = entity['parent']['project_id'] - entity_type = 'asset' + if entity.entity_type == "Asset": + project_id = entity["parent"]["project_id"] + entity_type = "asset" - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' + if entity.entity_type == "Project": + project_id = entity["id"] + entity_type = "show" - if entity.entity_type == 'User': - entity_type = 'user' + if entity.entity_type == "User": + entity_type = "user" if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) + raise ValueError("Entity {!r} not supported.".format(entity)) configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): + for configuration in _get_custom_attribute_configurations(entity.session): if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id + configuration["entity_type"] == entity_type + and configuration["project_id"] in (project_id, None) + and configuration["object_type_id"] == object_type_id ): configurations.append(configuration) # Return with global configurations at the end of the list. This is done # so that global conigurations are shadowed by project specific if the # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) + return sorted(configurations, key=lambda item: item["project_id"] is None) def _get_keys(self): - '''Return a list of all keys.''' + """Return a list of all keys.""" keys = [] for configuration in self._get_entity_configurations(): - keys.append(configuration['key']) + keys.append(configuration["key"]) return keys def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' + """Return entity instance with matching *key* from collection.""" configuration_id = self.get_configuration_id_from_key(key) for entity in self.collection: if entity[self.key_attribute] == configuration_id: @@ -432,32 +408,32 @@ def _get_entity_by_key(self, key): return None def get_configuration_id_from_key(self, key): - '''Return id of configuration with matching *key*. + """Return id of configuration with matching *key*. Raise :exc:`KeyError` if no configuration with matching *key* found. - ''' + """ for configuration in self._get_entity_configurations(): - if key == configuration['key']: - return configuration['id'] + if key == configuration["key"]: + return configuration["id"] raise KeyError(key) def __getitem__(self, key): - '''Return value for *key*.''' + """Return value for *key*.""" entity = self._get_entity_by_key(key) if entity: return entity[self.value_attribute] for configuration in self._get_entity_configurations(): - if configuration['key'] == key: - return configuration['default'] + if configuration["key"] == key: + return configuration["default"] raise KeyError(key) def __setitem__(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" custom_attribute_value = self._get_entity_by_key(key) if custom_attribute_value: @@ -468,23 +444,23 @@ def __setitem__(self, key, value): data = { self.key_attribute: self.get_configuration_id_from_key(key), self.value_attribute: value, - 'entity_id': entity['id'] + "entity_id": entity["id"], } # Make sure to use the currently active collection. This is # necessary since a merge might have replaced the current one. - self.collection.entity['custom_attributes'].collection.append( - session.create('CustomAttributeValue', data) + self.collection.entity["custom_attributes"].collection.append( + session.create("CustomAttributeValue", data) ) def __delitem__(self, key): - '''Remove and delete *key*. + """Remove and delete *key*. .. note:: The associated entity will be deleted as well. - ''' + """ custom_attribute_value = self._get_entity_by_key(key) if custom_attribute_value: @@ -493,24 +469,27 @@ def __delitem__(self, key): custom_attribute_value.session.delete(custom_attribute_value) else: - self.logger.warning(L( - 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', - key, self.collection.entity - )) + self.logger.warning( + L( + "Cannot delete {0!r} on {1!r}, no custom attribute value set.", + key, + self.collection.entity, + ) + ) def __eq__(self, collection): - '''Return True if *collection* equals proxy collection.''' + """Return True if *collection* equals proxy collection.""" if collection is ftrack_api.symbol.NOT_SET: return False return collection.collection == self.collection def __iter__(self): - '''Iterate over all keys.''' + """Iterate over all keys.""" keys = self._get_keys() return iter(keys) def __len__(self): - '''Return count of keys.''' + """Return count of keys.""" keys = self._get_keys() return len(keys) diff --git a/source/ftrack_api/data.py b/source/ftrack_api/data.py index 108b2edf..2f9a3518 100644 --- a/source/ftrack_api/data.py +++ b/source/ftrack_api/data.py @@ -9,55 +9,55 @@ class Data(with_metaclass(ABCMeta, object)): - '''File-like object for manipulating data.''' + """File-like object for manipulating data.""" def __init__(self): - '''Initialise data access.''' + """Initialise data access.""" self.closed = False @abstractmethod def read(self, limit=None): - '''Return content from current position up to *limit*.''' + """Return content from current position up to *limit*.""" @abstractmethod def write(self, content): - '''Write content at current position.''' + """Write content at current position.""" def flush(self): - '''Flush buffers ensuring data written.''' + """Flush buffers ensuring data written.""" def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*. + """Move internal pointer by *offset*. The *whence* argument is optional and defaults to os.SEEK_SET or 0 (absolute file positioning); other values are os.SEEK_CUR or 1 (seek relative to the current position) and os.SEEK_END or 2 (seek relative to the file's end). - ''' - raise NotImplementedError('Seek not supported.') + """ + raise NotImplementedError("Seek not supported.") def tell(self): - '''Return current position of internal pointer.''' - raise NotImplementedError('Tell not supported.') + """Return current position of internal pointer.""" + raise NotImplementedError("Tell not supported.") def close(self): - '''Flush buffers and prevent further access.''' + """Flush buffers and prevent further access.""" self.flush() self.closed = True class FileWrapper(Data): - '''Data wrapper for Python file objects.''' + """Data wrapper for Python file objects.""" def __init__(self, wrapped_file): - '''Initialise access to *wrapped_file*.''' + """Initialise access to *wrapped_file*.""" self.wrapped_file = wrapped_file self._read_since_last_write = False super(FileWrapper, self).__init__() def read(self, limit=None): - '''Return content from current position up to *limit*.''' + """Return content from current position up to *limit*.""" self._read_since_last_write = True if limit is None: @@ -66,7 +66,7 @@ def read(self, limit=None): return self.wrapped_file.read(limit) def write(self, content): - '''Write content at current position.''' + """Write content at current position.""" if self._read_since_last_write: # Windows requires a seek before switching from read to write. self.seek(self.tell()) @@ -75,49 +75,47 @@ def write(self, content): self._read_since_last_write = False def flush(self): - '''Flush buffers ensuring data written.''' + """Flush buffers ensuring data written.""" super(FileWrapper, self).flush() - if hasattr(self.wrapped_file, 'flush'): + if hasattr(self.wrapped_file, "flush"): self.wrapped_file.flush() def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*.''' + """Move internal pointer by *offset*.""" self.wrapped_file.seek(offset, whence) def tell(self): - '''Return current position of internal pointer.''' + """Return current position of internal pointer.""" return self.wrapped_file.tell() def close(self): - '''Flush buffers and prevent further access.''' + """Flush buffers and prevent further access.""" if not self.closed: super(FileWrapper, self).close() - if hasattr(self.wrapped_file, 'close'): + if hasattr(self.wrapped_file, "close"): self.wrapped_file.close() class File(FileWrapper): - '''Data wrapper accepting filepath.''' + """Data wrapper accepting filepath.""" - def __init__(self, path, mode='rb'): - '''Open file at *path* with *mode*.''' + def __init__(self, path, mode="rb"): + """Open file at *path* with *mode*.""" file_object = open(path, mode) super(File, self).__init__(file_object) class String(FileWrapper): - '''Data wrapper using TemporaryFile instance.''' + """Data wrapper using TemporaryFile instance.""" def __init__(self, content=None): - '''Initialise data with *content*.''' + """Initialise data with *content*.""" # Track if data is binary or not. If it is binary then read should also # return binary. self.is_binary = True - super(String, self).__init__( - tempfile.TemporaryFile() - ) + super(String, self).__init__(tempfile.TemporaryFile()) if content is not None: if not isinstance(content, bytes): @@ -132,14 +130,12 @@ def write(self, content): self.is_binary = False content = content.encode() - super(String, self).write( - content - ) + super(String, self).write(content) def read(self, limit=None): content = super(String, self).read(limit) if not self.is_binary: - content = content.decode('utf-8') + content = content.decode("utf-8") return content diff --git a/source/ftrack_api/entity/__init__.py b/source/ftrack_api/entity/__init__.py index 1d452f28..1aab07ed 100644 --- a/source/ftrack_api/entity/__init__.py +++ b/source/ftrack_api/entity/__init__.py @@ -1,2 +1,2 @@ # :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file +# :copyright: Copyright (c) 2014 ftrack diff --git a/source/ftrack_api/entity/asset_version.py b/source/ftrack_api/entity/asset_version.py index 859d94e4..40a66dcd 100644 --- a/source/ftrack_api/entity/asset_version.py +++ b/source/ftrack_api/entity/asset_version.py @@ -5,12 +5,10 @@ class AssetVersion(ftrack_api.entity.base.Entity): - '''Represent asset version.''' + """Represent asset version.""" - def create_component( - self, path, data=None, location=None - ): - '''Create a new component from *path* with additional *data* + def create_component(self, path, data=None, location=None): + """Create a new component from *path* with additional *data* .. note:: @@ -37,17 +35,17 @@ def create_component( If *location* is specified then automatically add component to that location. - ''' + """ if data is None: data = {} - data.pop('version_id', None) - data['version'] = self + data.pop("version_id", None) + data["version"] = self return self.session.create_component(path, data=data, location=location) - def encode_media(self, media, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. + def encode_media(self, media, keep_original="auto"): + """Return a new Job that encode *media* to make it playable in browsers. *media* can be a path to a file or a FileComponent in the ftrack.server location. @@ -70,7 +68,7 @@ def encode_media(self, media, keep_original='auto'): The output components are associated with the job via the job_components relation. - An image component will always be generated if possible, and will be + An image component will always be generated if possible, and will be set as the version's thumbnail. The new components will automatically be associated with the version. @@ -85,7 +83,7 @@ def encode_media(self, media, keep_original='auto'): If *keep_original* is not set, the original media will be kept if it is a FileComponent, and deleted if it is a file path. You can specify True or False to change this behavior. - ''' + """ return self.session.encode_media( - media, version_id=self['id'], keep_original=keep_original + media, version_id=self["id"], keep_original=keep_original ) diff --git a/source/ftrack_api/entity/base.py b/source/ftrack_api/entity/base.py index 861cec9c..cf8fa4b7 100644 --- a/source/ftrack_api/entity/base.py +++ b/source/ftrack_api/entity/base.py @@ -19,33 +19,39 @@ class _EntityBase(object): - '''Base class to allow for mixins, we need a common base.''' + """Base class to allow for mixins, we need a common base.""" + pass class DynamicEntityTypeMetaclass(abc.ABCMeta): - '''Custom metaclass to customise representation of dynamic classes. + """Custom metaclass to customise representation of dynamic classes. .. note:: Derive from same metaclass as derived bases to avoid conflicts. - ''' + """ + def __repr__(self): - '''Return representation of class.''' - return ''.format(self.__name__) + """Return representation of class.""" + return "".format(self.__name__) -class Entity(with_metaclass(DynamicEntityTypeMetaclass, _EntityBase, collections_abc.MutableMapping)): - '''Base class for all entities.''' +class Entity( + with_metaclass( + DynamicEntityTypeMetaclass, _EntityBase, collections_abc.MutableMapping + ) +): + """Base class for all entities.""" - entity_type = 'Entity' + entity_type = "Entity" attributes = None primary_key_attributes = None default_projections = None def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. + """Initialise entity. *session* is an instance of :class:`ftrack_api.session.Session` that this entity instance is bound to. @@ -57,30 +63,31 @@ def __init__(self, session, data=None, reconstructing=False): such as from a query, and therefore should not have any special creation logic applied, such as initialising defaults for missing data. - ''' + """ super(Entity, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) self.session = session self._inflated = set() if data is None: data = {} - self.logger.debug(L( - '{0} entity from {1!r}.', - ('Reconstructing' if reconstructing else 'Constructing'), data - )) + self.logger.debug( + L( + "{0} entity from {1!r}.", + ("Reconstructing" if reconstructing else "Constructing"), + data, + ) + ) - self._ignore_data_keys = ['__entity_type__'] + self._ignore_data_keys = ["__entity_type__"] if not reconstructing: self._construct(data) else: self._reconstruct(data) def _construct(self, data): - '''Construct from *data*.''' + """Construct from *data*.""" # Suspend operation recording so that all modifications can be applied # in single create operation. In addition, recording a modification # operation requires a primary key which may not be available yet. @@ -97,7 +104,6 @@ def _construct(self, data): attribute.set_local_value(self, default_value) - # Data represents locally set values. for key, value in list(data.items()): if key in self._ignore_data_keys: @@ -105,16 +111,18 @@ def _construct(self, data): attribute = self.__class__.attributes.get(key) if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such ' - 'attribute found on entity {1!r}.', key, self - )) + self.logger.debug( + L( + "Cannot populate {0!r} attribute as no such " + "attribute found on entity {1!r}.", + key, + self, + ) + ) continue if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - relational_attributes.setdefault( - attribute, value - ) + relational_attributes.setdefault(attribute, value) else: attribute.set_local_value(self, value) @@ -146,7 +154,7 @@ def _construct(self, data): ftrack_api.operation.CreateEntityOperation( self.entity_type, ftrack_api.inspection.primary_key(self), - entity_data + entity_data, ) ) @@ -156,12 +164,10 @@ def _construct(self, data): # in the correct order as the newly created attributes might # contain references to the newly created entity. - attribute.set_local_value( - self, value - ) + attribute.set_local_value(self, value) def _reconstruct(self, data): - '''Reconstruct from *data*.''' + """Reconstruct from *data*.""" # Data represents remote values. for key, value in list(data.items()): if key in self._ignore_data_keys: @@ -169,56 +175,57 @@ def _reconstruct(self, data): attribute = self.__class__.attributes.get(key) if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such attribute ' - 'found on entity {1!r}.', key, self - )) + self.logger.debug( + L( + "Cannot populate {0!r} attribute as no such attribute " + "found on entity {1!r}.", + key, + self, + ) + ) continue attribute.set_remote_value(self, value) def __repr__(self): - '''Return representation of instance.''' - return ''.format( + """Return representation of instance.""" + return "".format( self.__class__.__name__, id(self) ) def __str__(self): - '''Return string representation of instance.''' + """Return string representation of instance.""" with self.session.auto_populating(False): - primary_key = ['Unknown'] + primary_key = ["Unknown"] try: primary_key = list(ftrack_api.inspection.primary_key(self).values()) except KeyError: pass - return '<{0}({1})>'.format( - self.__class__.__name__, ', '.join(primary_key) - ) + return "<{0}({1})>".format(self.__class__.__name__, ", ".join(primary_key)) def __hash__(self): - '''Return hash representing instance.''' + """Return hash representing instance.""" return hash(str(ftrack_api.inspection.identity(self))) def __eq__(self, other): - '''Return whether *other* is equal to this instance. + """Return whether *other* is equal to this instance. .. note:: Equality is determined by both instances having the same identity. Values of attributes are not considered. - ''' + """ try: - return ( - ftrack_api.inspection.identity(other) - == ftrack_api.inspection.identity(self) - ) + return ftrack_api.inspection.identity( + other + ) == ftrack_api.inspection.identity(self) except (AttributeError, KeyError): return False def __getitem__(self, key): - '''Return attribute value for *key*.''' + """Return attribute value for *key*.""" attribute = self.__class__.attributes.get(key) if attribute is None: raise KeyError(key) @@ -226,7 +233,7 @@ def __getitem__(self, key): return attribute.get_value(self) def __setitem__(self, key, value): - '''Set attribute *value* for *key*.''' + """Set attribute *value* for *key*.""" attribute = self.__class__.attributes.get(key) if attribute is None: raise KeyError(key) @@ -234,54 +241,54 @@ def __setitem__(self, key, value): attribute.set_local_value(self, value) def __delitem__(self, key): - '''Clear attribute value for *key*. + """Clear attribute value for *key*. .. note:: Will not remove the attribute, but instead clear any local value and revert to the last known server value. - ''' + """ attribute = self.__class__.attributes.get(key) attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) def __iter__(self): - '''Iterate over all attributes keys.''' + """Iterate over all attributes keys.""" for attribute in self.__class__.attributes: yield attribute.name def __len__(self): - '''Return count of attributes.''' + """Return count of attributes.""" return len(self.__class__.attributes) def values(self): - '''Return list of values.''' + """Return list of values.""" if self.session.auto_populate: self._populate_unset_scalar_attributes() return list(super(Entity, self).values()) def items(self): - '''Return list of tuples of (key, value) pairs. + """Return list of tuples of (key, value) pairs. .. note:: Will fetch all values from the server if not already fetched or set locally. - ''' + """ if self.session.auto_populate: self._populate_unset_scalar_attributes() return list(super(Entity, self).items()) def clear(self): - '''Reset all locally modified attribute values.''' + """Reset all locally modified attribute values.""" for attribute in self: del self[attribute] def merge(self, entity, merged=None): - '''Merge *entity* attribute values and other data into this entity. + """Merge *entity* attribute values and other data into this entity. Only merge values from *entity* that are not :attr:`ftrack_api.symbol.NOT_SET`. @@ -294,7 +301,7 @@ def merge(self, entity, merged=None): * old_value - The previous value. * new_value - The new merged value. - ''' + """ log_debug = self.logger.isEnabledFor(logging.DEBUG) if merged is None: @@ -329,15 +336,15 @@ def merge(self, entity, merged=None): ) attribute.set_local_value(self, merged_local_value) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_local_value - }) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) + changes.append( + { + "type": "local_attribute", + "name": attribute.name, + "old_value": local_value, + "new_value": merged_local_value, + } ) + log_debug and self.logger.debug(log_message.format(**changes[-1])) # Remote attributes. other_remote_value = other_attribute.get_remote_value(entity) @@ -348,21 +355,19 @@ def merge(self, entity, merged=None): other_remote_value, merged=merged ) - attribute.set_remote_value( - self, merged_remote_value - ) - - changes.append({ - 'type': 'remote_attribute', - 'name': attribute.name, - 'old_value': remote_value, - 'new_value': merged_remote_value - }) + attribute.set_remote_value(self, merged_remote_value) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) + changes.append( + { + "type": "remote_attribute", + "name": attribute.name, + "old_value": remote_value, + "new_value": merged_remote_value, + } ) + log_debug and self.logger.debug(log_message.format(**changes[-1])) + # We need to handle collections separately since # they may store a local copy of the remote attribute # even though it may not be modified. @@ -371,24 +376,22 @@ def merge(self, entity, merged=None): ): continue - local_value = attribute.get_local_value( - self - ) + local_value = attribute.get_local_value(self) # Populated but not modified, update it. if ( - local_value is not ftrack_api.symbol.NOT_SET and - local_value == remote_value + local_value is not ftrack_api.symbol.NOT_SET + and local_value == remote_value ): - attribute.set_local_value( - self, merged_remote_value + attribute.set_local_value(self, merged_remote_value) + changes.append( + { + "type": "local_attribute", + "name": attribute.name, + "old_value": local_value, + "new_value": merged_remote_value, + } ) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_remote_value - }) log_debug and self.logger.debug( log_message.format(**changes[-1]) @@ -397,7 +400,7 @@ def merge(self, entity, merged=None): return changes def _populate_unset_scalar_attributes(self): - '''Populate all unset scalar attributes in one query.''' + """Populate all unset scalar attributes in one query.""" projections = [] for attribute in self.attributes: if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): @@ -405,4 +408,4 @@ def _populate_unset_scalar_attributes(self): projections.append(attribute.name) if projections: - self.session.populate([self], ', '.join(projections)) + self.session.populate([self], ", ".join(projections)) diff --git a/source/ftrack_api/entity/component.py b/source/ftrack_api/entity/component.py index c08ff791..e36e6f06 100644 --- a/source/ftrack_api/entity/component.py +++ b/source/ftrack_api/entity/component.py @@ -6,29 +6,27 @@ class Component(ftrack_api.entity.base.Entity): - '''Represent a component.''' + """Represent a component.""" def get_availability(self, locations=None): - '''Return availability in *locations*. + """Return availability in *locations*. If *locations* is None, all known locations will be checked. Return a dictionary of {location_id:percentage_availability} - ''' - return self.session.get_component_availability( - self, locations=locations - ) + """ + return self.session.get_component_availability(self, locations=locations) class CreateThumbnailMixin(object): - '''Mixin to add create_thumbnail method on entity class.''' + """Mixin to add create_thumbnail method on entity class.""" def create_thumbnail(self, path, data=None): - '''Set entity thumbnail from *path*. + """Set entity thumbnail from *path*. - Creates a thumbnail component using in the ftrack.server location - :meth:`Session.create_component + Creates a thumbnail component using in the ftrack.server location + :meth:`Session.create_component ` The thumbnail component will be created using *data* if specified. If no component name is given, `thumbnail` will be used. @@ -41,35 +39,33 @@ def create_thumbnail(self, path, data=None): A :meth:`Session.commit` will be automatically issued. - ''' + """ if data is None: data = {} - if not data.get('name'): - data['name'] = 'thumbnail' + if not data.get("name"): + data["name"] = "thumbnail" - thumbnail_component = self.session.create_component( - path, data, location=None - ) + thumbnail_component = self.session.create_component(path, data, location=None) origin_location = self.session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + "Location", ftrack_api.symbol.ORIGIN_LOCATION_ID ) server_location = self.session.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID + "Location", ftrack_api.symbol.SERVER_LOCATION_ID ) server_location.add_component(thumbnail_component, [origin_location]) - # TODO: This commit can be avoided by reordering the operations in + # TODO: This commit can be avoided by reordering the operations in # this method so that the component is transferred to ftrack.server # after the thumbnail has been set. - # + # # There is currently a bug in the API backend, causing the operations # to *some* times be ordered wrongly, where the update occurs before # the component has been created, causing an integrity error. - # + # # Once this issue has been resolved, this commit can be removed and - # and the update placed between component creation and registration. - self['thumbnail_id'] = thumbnail_component['id'] + # and the update placed between component creation and registration. + self["thumbnail_id"] = thumbnail_component["id"] self.session.commit() return thumbnail_component diff --git a/source/ftrack_api/entity/factory.py b/source/ftrack_api/entity/factory.py index ba1f086f..cda4f0d5 100644 --- a/source/ftrack_api/entity/factory.py +++ b/source/ftrack_api/entity/factory.py @@ -26,23 +26,21 @@ class Factory(object): - '''Entity class factory.''' + """Entity class factory.""" def __init__(self): - '''Initialise factory.''' + """Initialise factory.""" super(Factory, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) def create(self, schema, bases=None): - '''Create and return entity class from *schema*. + """Create and return entity class from *schema*. *bases* should be a list of bases to give the constructed class. If not specified, default to :class:`ftrack_api.entity.base.Entity`. - ''' - entity_type = schema['id'] + """ + entity_type = schema["id"] class_name = entity_type class_bases = bases @@ -53,32 +51,35 @@ def create(self, schema, bases=None): # Build attributes for class. attributes = ftrack_api.attribute.Attributes() - immutable_properties = schema.get('immutable', []) - computed_properties = schema.get('computed', []) - for name, fragment in list(schema.get('properties', {}).items()): + immutable_properties = schema.get("immutable", []) + computed_properties = schema.get("computed", []) + for name, fragment in list(schema.get("properties", {}).items()): mutable = name not in immutable_properties computed = name in computed_properties - default = fragment.get('default', ftrack_api.symbol.NOT_SET) - if default == '{uid}': + default = fragment.get("default", ftrack_api.symbol.NOT_SET) + if default == "{uid}": default = lambda instance: str(uuid.uuid4()) - data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) + data_type = fragment.get("type", ftrack_api.symbol.NOT_SET) if data_type is not ftrack_api.symbol.NOT_SET: - if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable', - 'object' + "string", + "boolean", + "integer", + "number", + "variable", + "object", ): # Basic scalar attribute. - if data_type == 'number': - data_type = 'float' + if data_type == "number": + data_type = "float" - if data_type == 'string': - data_format = fragment.get('format') - if data_format == 'date-time': - data_type = 'datetime' + if data_type == "string": + data_format = fragment.get("format") + if data_format == "date-time": + data_type = "datetime" attribute = self.create_scalar_attribute( class_name, name, mutable, computed, default, data_type @@ -86,20 +87,24 @@ def create(self, schema, bases=None): if attribute: attributes.add(attribute) - elif data_type == 'array': + elif data_type == "array": attribute = self.create_collection_attribute( class_name, name, mutable ) if attribute: attributes.add(attribute) - elif data_type == 'mapped_array': - reference = fragment.get('items', {}).get('$ref') + elif data_type == "mapped_array": + reference = fragment.get("items", {}).get("$ref") if not reference: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) + self.logger.debug( + L( + "Skipping {0}.{1} mapped_array attribute that does " + "not define a schema reference.", + class_name, + name, + ) + ) continue attribute = self.create_mapped_collection_attribute( @@ -109,18 +114,27 @@ def create(self, schema, bases=None): attributes.add(attribute) else: - self.logger.debug(L( - 'Skipping {0}.{1} attribute with unrecognised data ' - 'type {2}', class_name, name, data_type - )) + self.logger.debug( + L( + "Skipping {0}.{1} attribute with unrecognised data " + "type {2}", + class_name, + name, + data_type, + ) + ) else: # Reference attribute. - reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) + reference = fragment.get("$ref", ftrack_api.symbol.NOT_SET) if reference is ftrack_api.symbol.NOT_SET: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) + self.logger.debug( + L( + "Skipping {0}.{1} mapped_array attribute that does " + "not define a schema reference.", + class_name, + name, + ) + ) continue attribute = self.create_reference_attribute( @@ -129,22 +143,20 @@ def create(self, schema, bases=None): if attribute: attributes.add(attribute) - default_projections = schema.get('default_projections', []) + default_projections = schema.get("default_projections", []) # Construct class. - class_namespace['entity_type'] = entity_type - class_namespace['attributes'] = attributes - class_namespace['primary_key_attributes'] = schema['primary_key'][:] - class_namespace['default_projections'] = default_projections + class_namespace["entity_type"] = entity_type + class_namespace["attributes"] = attributes + class_namespace["primary_key_attributes"] = schema["primary_key"][:] + class_namespace["default_projections"] = default_projections - from future.utils import ( - native_str - ) + from future.utils import native_str cls = type( native_str(class_name), # type doesn't accept unicode. tuple(class_bases), - class_namespace + class_namespace, ) return cls @@ -152,42 +164,43 @@ def create(self, schema, bases=None): def create_scalar_attribute( self, class_name, name, mutable, computed, default, data_type ): - '''Return appropriate scalar attribute instance.''' + """Return appropriate scalar attribute instance.""" return ftrack_api.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable, - computed=computed + name, + data_type=data_type, + default_value=default, + mutable=mutable, + computed=computed, ) def create_reference_attribute(self, class_name, name, mutable, reference): - '''Return appropriate reference attribute instance.''' - return ftrack_api.attribute.ReferenceAttribute( - name, reference, mutable=mutable - ) + """Return appropriate reference attribute instance.""" + return ftrack_api.attribute.ReferenceAttribute(name, reference, mutable=mutable) def create_collection_attribute(self, class_name, name, mutable): - '''Return appropriate collection attribute instance.''' - return ftrack_api.attribute.CollectionAttribute( - name, mutable=mutable + """Return appropriate collection attribute instance.""" + return ftrack_api.attribute.CollectionAttribute(name, mutable=mutable) + + def create_mapped_collection_attribute(self, class_name, name, mutable, reference): + """Return appropriate mapped collection attribute instance.""" + self.logger.debug( + L( + "Skipping {0}.{1} mapped_array attribute that has " + "no implementation defined for reference {2}.", + class_name, + name, + reference, + ) ) - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has ' - 'no implementation defined for reference {2}.', - class_name, name, reference - )) - class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for defaults.''' + """Generate key for defaults.""" def _key(self, obj): - '''Return key for *obj*.''' + """Return key for *obj*.""" if isinstance(obj, dict): - entity = obj.get('entity') + entity = obj.get("entity") if entity is not None: # Key by session only. return str(id(entity.session)) @@ -213,66 +226,62 @@ def _key(self, obj): @memoise_session def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. + """Return list of custom attribute configurations. The configuration objects will have key, project_id, id and object_type_id populated. - ''' + """ return session.query( - 'select key, project_id, id, object_type_id, entity_type, ' - 'is_hierarchical from CustomAttributeConfiguration' + "select key, project_id, id, object_type_id, entity_type, " + "is_hierarchical from CustomAttributeConfiguration" ).all() def _get_entity_configurations(entity): - '''Return all configurations for current collection entity.''' + """Return all configurations for current collection entity.""" entity_type = None project_id = None object_type_id = None - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] + if "object_type_id" in entity.keys(): + project_id = entity["project_id"] + entity_type = "task" + object_type_id = entity["object_type_id"] - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' + if entity.entity_type == "AssetVersion": + project_id = entity["asset"]["parent"]["project_id"] + entity_type = "assetversion" - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' + if entity.entity_type == "Project": + project_id = entity["id"] + entity_type = "show" - if entity.entity_type == 'User': - entity_type = 'user' + if entity.entity_type == "User": + entity_type = "user" - if entity.entity_type == 'Asset': - entity_type = 'asset' + if entity.entity_type == "Asset": + entity_type = "asset" - if entity.entity_type in ('TypedContextList', 'AssetVersionList'): - entity_type = 'list' + if entity.entity_type in ("TypedContextList", "AssetVersionList"): + entity_type = "list" if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) + raise ValueError("Entity {!r} not supported.".format(entity)) configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): + for configuration in _get_custom_attribute_configurations(entity.session): if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id + configuration["entity_type"] == entity_type + and configuration["project_id"] in (project_id, None) + and configuration["object_type_id"] == object_type_id ): # The custom attribute configuration is for the target entity type. configurations.append(configuration) elif ( - entity_type in ('asset', 'assetversion', 'show', 'task') and - configuration['project_id'] in (project_id, None) and - configuration['is_hierarchical'] + entity_type in ("asset", "assetversion", "show", "task") + and configuration["project_id"] in (project_id, None) + and configuration["is_hierarchical"] ): # The target entity type allows hierarchical attributes. configurations.append(configuration) @@ -280,40 +289,38 @@ def _get_entity_configurations(entity): # Return with global configurations at the end of the list. This is done # so that global conigurations are shadowed by project specific if the # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) + return sorted(configurations, key=lambda item: item["project_id"] is None) class StandardFactory(Factory): - '''Standard entity class factory.''' + """Standard entity class factory.""" def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' + """Create and return entity class from *schema*.""" if not bases: bases = [] extra_bases = [] # Customise classes. - if schema['id'] == 'ProjectSchema': + if schema["id"] == "ProjectSchema": extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] - elif schema['id'] == 'Location': + elif schema["id"] == "Location": extra_bases = [ftrack_api.entity.location.Location] - elif schema['id'] == 'AssetVersion': + elif schema["id"] == "AssetVersion": extra_bases = [ftrack_api.entity.asset_version.AssetVersion] - elif schema['id'].endswith('Component'): + elif schema["id"].endswith("Component"): extra_bases = [ftrack_api.entity.component.Component] - elif schema['id'] == 'Note': + elif schema["id"] == "Note": extra_bases = [ftrack_api.entity.note.Note] - elif schema['id'] == 'Job': + elif schema["id"] == "Job": extra_bases = [ftrack_api.entity.job.Job] - elif schema['id'] == 'User': + elif schema["id"] == "User": extra_bases = [ftrack_api.entity.user.User] bases = extra_bases + bases @@ -323,56 +330,46 @@ def create(self, schema, bases=None): bases = [ftrack_api.entity.base.Entity] # Add mixins. - if 'notes' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.note.CreateNoteMixin - ) + if "notes" in schema.get("properties", {}): + bases.append(ftrack_api.entity.note.CreateNoteMixin) - if 'thumbnail_id' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.component.CreateThumbnailMixin - ) + if "thumbnail_id" in schema.get("properties", {}): + bases.append(ftrack_api.entity.component.CreateThumbnailMixin) cls = super(StandardFactory, self).create(schema, bases=bases) return cls - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - if reference == 'Metadata': + def create_mapped_collection_attribute(self, class_name, name, mutable, reference): + """Return appropriate mapped collection attribute instance.""" + if reference == "Metadata": def create_metadata(proxy, data, reference): - '''Return metadata for *data*.''' + """Return metadata for *data*.""" entity = proxy.collection.entity session = entity.session - data.update({ - 'parent_id': entity['id'], - 'parent_type': entity.entity_type - }) + data.update( + {"parent_id": entity["id"], "parent_type": entity.entity_type} + ) return session.create(reference, data) - creator = functools.partial( - create_metadata, reference=reference - ) - key_attribute = 'key' - value_attribute = 'value' + creator = functools.partial(create_metadata, reference=reference) + key_attribute = "key" + value_attribute = "value" return ftrack_api.attribute.KeyValueMappedCollectionAttribute( name, creator, key_attribute, value_attribute, mutable=mutable ) - elif reference == 'CustomAttributeValue': - return ( - ftrack_api.attribute.CustomAttributeCollectionAttribute( - name, mutable=mutable - ) + elif reference == "CustomAttributeValue": + return ftrack_api.attribute.CustomAttributeCollectionAttribute( + name, mutable=mutable ) - elif reference.endswith('CustomAttributeValue'): + elif reference.endswith("CustomAttributeValue"): + def creator(proxy, data): - '''Create a custom attribute based on *proxy* and *data*. + """Create a custom attribute based on *proxy* and *data*. Raise :py:exc:`KeyError` if related entity is already presisted to the server. The proxy represents dense custom attribute @@ -385,32 +382,30 @@ def creator(proxy, data): the proxy. Instead a CustomAttributeValue will be reconstructed and an update operation will be recorded. - ''' + """ entity = proxy.collection.entity - if ( - ftrack_api.inspection.state(entity) is not - ftrack_api.symbol.CREATED - ): + if ftrack_api.inspection.state(entity) is not ftrack_api.symbol.CREATED: raise KeyError( - 'Custom attributes must be created explicitly for the ' - 'given entity type before being set.' + "Custom attributes must be created explicitly for the " + "given entity type before being set." ) configuration = None for candidate in _get_entity_configurations(entity): - if candidate['key'] == data['key']: + if candidate["key"] == data["key"]: configuration = candidate break if configuration is None: raise ValueError( - u'No valid custom attribute for data {0!r} was found.' - .format(data) + "No valid custom attribute for data {0!r} was found.".format( + data + ) ) create_data = dict(list(data.items())) - create_data['configuration_id'] = configuration['id'] - create_data['entity_id'] = entity['id'] + create_data["configuration_id"] = configuration["id"] + create_data["entity_id"] = entity["id"] session = entity.session @@ -418,26 +413,27 @@ def creator(proxy, data): # value. This will prevent a create operation to be sent to the # remote, as create operations for this entity type is not # allowed. Instead an update operation will be recorded. - value = create_data.pop('value') - item = session.create( - reference, - create_data, - reconstructing=True - ) + value = create_data.pop("value") + item = session.create(reference, create_data, reconstructing=True) # Record update operation. - item['value'] = value + item["value"] = value return item - key_attribute = 'key' - value_attribute = 'value' + key_attribute = "key" + value_attribute = "value" return ftrack_api.attribute.KeyValueMappedCollectionAttribute( name, creator, key_attribute, value_attribute, mutable=mutable ) - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has no configuration ' - 'for reference {2}.', class_name, name, reference - )) + self.logger.debug( + L( + "Skipping {0}.{1} mapped_array attribute that has no configuration " + "for reference {2}.", + class_name, + name, + reference, + ) + ) diff --git a/source/ftrack_api/entity/job.py b/source/ftrack_api/entity/job.py index ae37922c..38a6cca7 100644 --- a/source/ftrack_api/entity/job.py +++ b/source/ftrack_api/entity/job.py @@ -5,10 +5,10 @@ class Job(ftrack_api.entity.base.Entity): - '''Represent job.''' + """Represent job.""" def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. + """Initialise entity. *session* is an instance of :class:`ftrack_api.session.Session` that this entity instance is bound to. @@ -33,16 +33,12 @@ def __init__(self, session, data=None, reconstructing=False): such as from a query, and therefore should not have any special creation logic applied, such as initialising defaults for missing data. - ''' + """ if not reconstructing: - if data.get('type') not in ('api_job', None): + if data.get("type") not in ("api_job", None): raise ValueError( - 'Invalid job type "{0}". Must be "api_job"'.format( - data.get('type') - ) + 'Invalid job type "{0}". Must be "api_job"'.format(data.get("type")) ) - super(Job, self).__init__( - session, data=data, reconstructing=reconstructing - ) + super(Job, self).__init__(session, data=data, reconstructing=reconstructing) diff --git a/source/ftrack_api/entity/location.py b/source/ftrack_api/entity/location.py index df09ef25..418e345b 100644 --- a/source/ftrack_api/entity/location.py +++ b/source/ftrack_api/entity/location.py @@ -19,17 +19,17 @@ MixinBaseClass = with_metaclass( - ftrack_api.entity.base.DynamicEntityTypeMetaclass, - ftrack_api.entity.base._EntityBase, - collections_abc.MutableMapping + ftrack_api.entity.base.DynamicEntityTypeMetaclass, + ftrack_api.entity.base._EntityBase, + collections_abc.MutableMapping, ) class Location(ftrack_api.entity.base.Entity): - '''Represent storage for components.''' + """Represent storage for components.""" def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. + """Initialise entity. *session* is an instance of :class:`ftrack_api.session.Session` that this entity instance is bound to. @@ -41,7 +41,7 @@ def __init__(self, session, data=None, reconstructing=False): such as from a query, and therefore should not have any special creation logic applied, such as initialising defaults for missing data. - ''' + """ self.accessor = ftrack_api.symbol.NOT_SET self.structure = ftrack_api.symbol.NOT_SET self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET @@ -51,20 +51,18 @@ def __init__(self, session, data=None, reconstructing=False): ) def __str__(self): - '''Return string representation of instance.''' + """Return string representation of instance.""" representation = super(Location, self).__str__() with self.session.auto_populating(False): - name = self['name'] + name = self["name"] if name is not ftrack_api.symbol.NOT_SET: - representation = representation.replace( - '(', '("{0}", '.format(name) - ) + representation = representation.replace("(", '("{0}", '.format(name)) return representation def add_component(self, component, source, recursive=True): - '''Add *component* to location. + """Add *component* to location. *component* should be a single component instance. @@ -86,13 +84,11 @@ def add_component(self, component, source, recursive=True): A :meth:`Session.commit` may be automatically issued as part of the component registration. - ''' - return self.add_components( - [component], sources=source, recursive=recursive - ) + """ + return self.add_components([component], sources=source, recursive=recursive) def add_components(self, components, sources, recursive=True, _depth=0): - '''Add *components* to location. + """Add *components* to location. *components* should be a list of component instances. @@ -132,24 +128,23 @@ def add_components(self, components, sources, recursive=True, _depth=0): :exc:`ftrack_api.exception.LocationError` will be raised detailing issues and any transferred data under the 'transferred' detail key. - ''' - if ( - isinstance(sources, string_types) - or not isinstance(sources, collections_abc.Sequence) + """ + if isinstance(sources, string_types) or not isinstance( + sources, collections_abc.Sequence ): sources = [sources] sources_count = len(sources) if sources_count not in (1, len(components)): raise ValueError( - 'sources must be either a single source or a sequence of ' - 'sources with indexes corresponding to passed components.' + "sources must be either a single source or a sequence of " + "sources with indexes corresponding to passed components." ) if not self.structure: raise ftrack_api.exception.LocationError( - 'No structure defined for location {location}.', - details=dict(location=self) + "No structure defined for location {location}.", + details=dict(location=self), ) if not components: @@ -157,7 +152,7 @@ def add_components(self, components, sources, recursive=True, _depth=0): # when called recursively on an empty sequence component. return - indent = ' ' * (_depth + 1) + indent = " " * (_depth + 1) # Check that components not already added to location. existing_components = [] @@ -166,11 +161,11 @@ def add_components(self, components, sources, recursive=True, _depth=0): except ftrack_api.exception.ComponentNotInLocationError as error: missing_component_ids = [ - missing_component['id'] - for missing_component in error.details['components'] + missing_component["id"] + for missing_component in error.details["components"] ] for component in components: - if component['id'] not in missing_component_ids: + if component["id"] not in missing_component_ids: existing_components.append(component) else: @@ -194,11 +189,13 @@ def add_components(self, components, sources, recursive=True, _depth=0): source = sources[index] # Add members first for container components. - is_container = 'members' in list(component.keys()) + is_container = "members" in list(component.keys()) if is_container and recursive: self.add_components( - component['members'], source, recursive=recursive, - _depth=(_depth + 1) + component["members"], + source, + recursive=recursive, + _depth=(_depth + 1), ) # Add component to this location. @@ -212,17 +209,17 @@ def add_components(self, components, sources, recursive=True, _depth=0): except Exception as error: raise ftrack_api.exception.LocationError( - 'Failed to transfer component {component} data to location ' - '{location} due to error:\n{indent}{error}\n{indent}' - 'Transferred component data that may require cleanup: ' - '{transferred}', + "Failed to transfer component {component} data to location " + "{location} due to error:\n{indent}{error}\n{indent}" + "Transferred component data that may require cleanup: " + "{transferred}", details=dict( indent=indent, component=component, location=self, error=error, - transferred=transferred - ) + transferred=transferred, + ), ) else: @@ -236,11 +233,8 @@ def add_components(self, components, sources, recursive=True, _depth=0): for component, resource_identifier in transferred: if self.resource_identifier_transformer: # Optionally encode resource identifier before storing. - resource_identifier = ( - self.resource_identifier_transformer.encode( - resource_identifier, - context={'component': component} - ) + resource_identifier = self.resource_identifier_transformer.encode( + resource_identifier, context={"component": component} ) components_to_register.append(component) @@ -253,92 +247,86 @@ def add_components(self, components, sources, recursive=True, _depth=0): except Exception as error: raise ftrack_api.exception.LocationError( - 'Failed to register components with location {location} due to ' - 'error:\n{indent}{error}\n{indent}Transferred component data ' - 'that may require cleanup: {transferred}', + "Failed to register components with location {location} due to " + "error:\n{indent}{error}\n{indent}Transferred component data " + "that may require cleanup: {transferred}", details=dict( - indent=indent, - location=self, - error=error, - transferred=transferred - ) + indent=indent, location=self, error=error, transferred=transferred + ), ) # Publish events. for component in components_to_register: - - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] + component_id = list(ftrack_api.inspection.primary_key(component).values())[ + 0 + ] location_id = list(ftrack_api.inspection.primary_key(self).values())[0] self.session.event_hub.publish( ftrack_api.event.base.Event( topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ), + data=dict(component_id=component_id, location_id=location_id), ), - on_error='ignore' + on_error="ignore", ) def _get_context(self, component, source): - '''Return context for *component* and *source*.''' + """Return context for *component* and *source*.""" context = {} if source: try: - source_resource_identifier = source.get_resource_identifier( - component - ) + source_resource_identifier = source.get_resource_identifier(component) except ftrack_api.exception.ComponentNotInLocationError: pass else: - context.update(dict( - source_resource_identifier=source_resource_identifier - )) + context.update( + dict(source_resource_identifier=source_resource_identifier) + ) return context def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. + """Manage transfer of *component* data from *source*. *resource_identifier* specifies the identifier to use with this locations accessor. - ''' - self.logger.debug(L( - 'Adding data for component {0!r} from source {1!r} to location ' - '{2!r} using resource identifier {3!r}.', - component, resource_identifier, source, self - )) + """ + self.logger.debug( + L( + "Adding data for component {0!r} from source {1!r} to location " + "{2!r} using resource identifier {3!r}.", + component, + resource_identifier, + source, + self, + ) + ) # Read data from source and write to this location. if not source.accessor: raise ftrack_api.exception.LocationError( - 'No accessor defined for source location {location}.', - details=dict(location=source) + "No accessor defined for source location {location}.", + details=dict(location=source), ) if not self.accessor: raise ftrack_api.exception.LocationError( - 'No accessor defined for target location {location}.', - details=dict(location=self) + "No accessor defined for target location {location}.", + details=dict(location=self), ) - is_container = 'members' in list(component.keys()) + is_container = "members" in list(component.keys()) if is_container: # TODO: Improve this check. Possibly introduce an inspection # such as ftrack_api.inspection.is_sequence_component. - if component.entity_type != 'SequenceComponent': + if component.entity_type != "SequenceComponent": self.accessor.make_container(resource_identifier) else: # Try to make container of component. try: - container = self.accessor.get_container( - resource_identifier - ) + container = self.accessor.get_container(resource_identifier) except ftrack_api.exception.AccessorParentResourceNotFoundError: # Container could not be retrieved from @@ -359,86 +347,78 @@ def _add_data(self, component, resource_identifier, source): # good cross platform, cross accessor solution for this # at present. raise ftrack_api.exception.LocationError( - 'Cannot add component as data already exists and ' - 'overwriting could result in data loss. Computed ' - 'target resource identifier was: {0}' - .format(resource_identifier) + "Cannot add component as data already exists and " + "overwriting could result in data loss. Computed " + "target resource identifier was: {0}".format(resource_identifier) ) # Read and write data. source_data = source.accessor.open( - source.get_resource_identifier(component), 'rb' + source.get_resource_identifier(component), "rb" ) - target_data = self.accessor.open(resource_identifier, 'wb') + target_data = self.accessor.open(resource_identifier, "wb") # Read/write data in chunks to avoid reading all into memory at the # same time. chunked_read = functools.partial( source_data.read, ftrack_api.symbol.CHUNK_SIZE ) - for chunk in iter(chunked_read, b''): + for chunk in iter(chunked_read, b""): target_data.write(chunk) target_data.close() source_data.close() def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location against *resource_identifier*.''' - return self._register_components_in_location( - [component], [resource_identifier] - ) + """Register *component* in location against *resource_identifier*.""" + return self._register_components_in_location([component], [resource_identifier]) - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. + def _register_components_in_location(self, components, resource_identifiers): + """Register *components* in location against *resource_identifiers*. Indices of *components* and *resource_identifiers* should align. - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): + """ + for component, resource_identifier in zip(components, resource_identifiers): self.session.create( - 'ComponentLocation', data=dict( + "ComponentLocation", + data=dict( component=component, location=self, - resource_identifier=resource_identifier - ) + resource_identifier=resource_identifier, + ), ) self.session.commit() def remove_component(self, component, recursive=True): - '''Remove *component* from location. + """Remove *component* from location. .. note:: A :meth:`Session.commit` may be automatically issued as part of the component deregistration. - ''' + """ return self.remove_components([component], recursive=recursive) def remove_components(self, components, recursive=True): - '''Remove *components* from location. + """Remove *components* from location. .. note:: A :meth:`Session.commit` may be automatically issued as part of the components deregistration. - ''' + """ for component in components: # Check component is in this location self.get_resource_identifier(component) # Remove members first for container components. - is_container = 'members' in list(component.keys()) + is_container = "members" in list(component.keys()) if is_container and recursive: - self.remove_components( - component['members'], recursive=recursive - ) + self.remove_components(component["members"], recursive=recursive) # Remove data. self._remove_data(component) @@ -447,49 +427,44 @@ def remove_components(self, components, recursive=True): self._deregister_component_in_location(component) # Emit event. - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] + component_id = list(ftrack_api.inspection.primary_key(component).values())[ + 0 + ] location_id = list(ftrack_api.inspection.primary_key(self).values())[0] self.session.event_hub.publish( ftrack_api.event.base.Event( topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ) + data=dict(component_id=component_id, location_id=location_id), ), - on_error='ignore' + on_error="ignore", ) def _remove_data(self, component): - '''Remove data associated with *component*.''' + """Remove data associated with *component*.""" if not self.accessor: raise ftrack_api.exception.LocationError( - 'No accessor defined for location {location}.', - details=dict(location=self) + "No accessor defined for location {location}.", + details=dict(location=self), ) try: - self.accessor.remove( - self.get_resource_identifier(component) - ) + self.accessor.remove(self.get_resource_identifier(component)) except ftrack_api.exception.AccessorResourceNotFoundError: # If accessor does not support detecting sequence paths then an # AccessorResourceNotFoundError is raised. For now, if the # component type is 'SequenceComponent' assume success. - if not component.entity_type == 'SequenceComponent': + if not component.entity_type == "SequenceComponent": raise def _deregister_component_in_location(self, component): - '''Deregister *component* from location.''' + """Deregister *component* from location.""" component_id = list(ftrack_api.inspection.primary_key(component).values())[0] location_id = list(ftrack_api.inspection.primary_key(self).values())[0] # TODO: Use session.get for optimisation. component_location = self.session.query( - 'ComponentLocation where component_id is {0} and location_id is ' - '{1}'.format(component_id, location_id) + "ComponentLocation where component_id is {0} and location_id is " + "{1}".format(component_id, location_id) )[0] self.session.delete(component_location) @@ -498,82 +473,80 @@ def _deregister_component_in_location(self, component): self.session.commit() def get_component_availability(self, component): - '''Return availability of *component* in this location as a float.''' - return self.session.get_component_availability( - component, locations=[self] - )[self['id']] + """Return availability of *component* in this location as a float.""" + return self.session.get_component_availability(component, locations=[self])[ + self["id"] + ] def get_component_availabilities(self, components): - '''Return availabilities of *components* in this location. + """Return availabilities of *components* in this location. Return list of float values corresponding to each component. - ''' + """ return [ - availability[self['id']] for availability in - self.session.get_component_availabilities( + availability[self["id"]] + for availability in self.session.get_component_availabilities( components, locations=[self] ) ] def get_resource_identifier(self, component): - '''Return resource identifier for *component*. + """Return resource identifier for *component*. Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the component is not present in this location. - ''' + """ return self.get_resource_identifiers([component])[0] def get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. + """Return resource identifiers for *components*. Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any of the components are not present in this location. - ''' + """ resource_identifiers = self._get_resource_identifiers(components) # Optionally decode resource identifier. if self.resource_identifier_transformer: for index, resource_identifier in enumerate(resource_identifiers): - resource_identifiers[index] = ( - self.resource_identifier_transformer.decode( - resource_identifier, - context={'component': components[index]} - ) + resource_identifiers[ + index + ] = self.resource_identifier_transformer.decode( + resource_identifier, context={"component": components[index]} ) return resource_identifiers def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. + """Return resource identifiers for *components*. Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any of the components are not present in this location. - ''' + """ component_ids_mapping = collections.OrderedDict() for component in components: - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] + component_id = list(ftrack_api.inspection.primary_key(component).values())[ + 0 + ] component_ids_mapping[component_id] = component component_locations = self.session.query( - 'select component_id, resource_identifier from ComponentLocation ' - 'where location_id is {0} and component_id in ({1})' - .format( + "select component_id, resource_identifier from ComponentLocation " + "where location_id is {0} and component_id in ({1})".format( list(ftrack_api.inspection.primary_key(self).values())[0], - ', '.join(list(component_ids_mapping.keys())) + ", ".join(list(component_ids_mapping.keys())), ) ) resource_identifiers_map = {} for component_location in component_locations: - resource_identifiers_map[component_location['component_id']] = ( - component_location['resource_identifier'] - ) + resource_identifiers_map[ + component_location["component_id"] + ] = component_location["resource_identifier"] resource_identifiers = [] missing = [] @@ -581,23 +554,19 @@ def _get_resource_identifiers(self, components): if component_id not in resource_identifiers_map: missing.append(component) else: - resource_identifiers.append( - resource_identifiers_map[component_id] - ) + resource_identifiers.append(resource_identifiers_map[component_id]) if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) + raise ftrack_api.exception.ComponentNotInLocationError(missing, self) return resource_identifiers def get_filesystem_path(self, component): - '''Return filesystem path for *component*.''' + """Return filesystem path for *component*.""" return self.get_filesystem_paths([component])[0] def get_filesystem_paths(self, components): - '''Return filesystem paths for *components*.''' + """Return filesystem paths for *components*.""" resource_identifiers = self.get_resource_identifiers(components) filesystem_paths = [] @@ -609,29 +578,29 @@ def get_filesystem_paths(self, components): return filesystem_paths def get_url(self, component): - '''Return url for *component*. + """Return url for *component*. Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if URL could not be determined from *component* or :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if retrieving URL is not supported by the location's accessor. - ''' + """ resource_identifier = self.get_resource_identifier(component) return self.accessor.get_url(resource_identifier) class MemoryLocationMixin(MixinBaseClass): - '''Represent storage for components. + """Represent storage for components. Unlike a standard location, only store metadata for components in this location in memory rather than persisting to the database. - ''' + """ @property def _cache(self): - '''Return cache.''' + """Return cache.""" try: cache = self.__cache except AttributeError: @@ -640,41 +609,37 @@ def _cache(self): return cache def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location with *resource_identifier*.''' + """Register *component* in location with *resource_identifier*.""" component_id = list(ftrack_api.inspection.primary_key(component).values())[0] self._cache[component_id] = resource_identifier - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. + def _register_components_in_location(self, components, resource_identifiers): + """Register *components* in location against *resource_identifiers*. Indices of *components* and *resource_identifiers* should align. - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): + """ + for component, resource_identifier in zip(components, resource_identifiers): self._register_component_in_location(component, resource_identifier) def _deregister_component_in_location(self, component): - '''Deregister *component* in location.''' + """Deregister *component* in location.""" component_id = list(ftrack_api.inspection.primary_key(component).values())[0] self._cache.pop(component_id) def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. + """Return resource identifiers for *components*. Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any of the referenced components are not present in this location. - ''' + """ resource_identifiers = [] missing = [] for component in components: - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] + component_id = list(ftrack_api.inspection.primary_key(component).values())[ + 0 + ] resource_identifier = self._cache.get(component_id) if resource_identifier is None: missing.append(component) @@ -682,57 +647,54 @@ def _get_resource_identifiers(self, components): resource_identifiers.append(resource_identifier) if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) + raise ftrack_api.exception.ComponentNotInLocationError(missing, self) return resource_identifiers class UnmanagedLocationMixin(MixinBaseClass): - '''Location that does not manage data.''' + """Location that does not manage data.""" def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. + """Manage transfer of *component* data from *source*. *resource_identifier* specifies the identifier to use with this locations accessor. Overridden to have no effect. - ''' + """ return def _remove_data(self, component): - '''Remove data associated with *component*. + """Remove data associated with *component*. Overridden to have no effect. - ''' + """ return class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): - '''Special origin location that expects sources as filepaths.''' + """Special origin location that expects sources as filepaths.""" def _get_context(self, component, source): - '''Return context for *component* and *source*.''' + """Return context for *component* and *source*.""" context = {} if source: - context.update(dict( - source_resource_identifier=source - )) + context.update(dict(source_resource_identifier=source)) return context class ServerLocationMixin(MixinBaseClass): - '''Location representing ftrack server. + """Location representing ftrack server. Adds convenience methods to location, specific to ftrack server. - ''' + """ + def get_thumbnail_url(self, component, size=None): - '''Return thumbnail url for *component*. + """Return thumbnail url for *component*. Optionally, specify *size* to constrain the downscaled image to size x size pixels. @@ -741,6 +703,6 @@ def get_thumbnail_url(self, component, size=None): URL could not be determined from *resource_identifier* or :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if retrieving URL is not supported by the location's accessor. - ''' + """ resource_identifier = self.get_resource_identifier(component) return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/source/ftrack_api/entity/note.py b/source/ftrack_api/entity/note.py index 5fadbd9a..6f868d63 100644 --- a/source/ftrack_api/entity/note.py +++ b/source/ftrack_api/entity/note.py @@ -7,99 +7,80 @@ class Note(ftrack_api.entity.base.Entity): - '''Represent a note.''' + """Represent a note.""" - def create_reply( - self, content, author - ): - '''Create a reply with *content* and *author*. + def create_reply(self, content, author): + """Create a reply with *content* and *author*. .. note:: This is a helper method. To create replies manually use the standard :meth:`Session.create` method. - ''' - reply = self.session.create( - 'Note', { - 'author': author, - 'content': content - } - ) + """ + reply = self.session.create("Note", {"author": author, "content": content}) - self['replies'].append(reply) + self["replies"].append(reply) return reply class CreateNoteMixin(object): - '''Mixin to add create_note method on entity class.''' + """Mixin to add create_note method on entity class.""" - def create_note( - self, content, author, recipients=None, category=None, labels=None - ): - '''Create note with *content*, *author*. + def create_note(self, content, author, recipients=None, category=None, labels=None): + """Create note with *content*, *author*. NoteLabels can be set by including *labels*. Note category can be set by including *category*. - + *recipients* can be specified as a list of user or group instances. - ''' - note_label_support = 'NoteLabel' in self.session.types + """ + note_label_support = "NoteLabel" in self.session.types if not labels: labels = [] if labels and not note_label_support: raise ValueError( - 'NoteLabel is not supported by the current server version.' + "NoteLabel is not supported by the current server version." ) if category and labels: - raise ValueError( - 'Both category and labels cannot be set at the same time.' - ) + raise ValueError("Both category and labels cannot be set at the same time.") if not recipients: recipients = [] - data = { - 'content': content, - 'author': author - } + data = {"content": content, "author": author} if category: if note_label_support: labels = [category] warnings.warn( - 'category argument will be removed in an upcoming version, ' - 'please use labels instead.', - PendingDeprecationWarning + "category argument will be removed in an upcoming version, " + "please use labels instead.", + PendingDeprecationWarning, ) else: - data['category_id'] = category['id'] + data["category_id"] = category["id"] - note = self.session.create('Note', data) + note = self.session.create("Note", data) - self['notes'].append(note) + self["notes"].append(note) for resource in recipients: - recipient = self.session.create('Recipient', { - 'note_id': note['id'], - 'resource_id': resource['id'] - }) + recipient = self.session.create( + "Recipient", {"note_id": note["id"], "resource_id": resource["id"]} + ) - note['recipients'].append(recipient) + note["recipients"].append(recipient) for label in labels: self.session.create( - 'NoteLabelLink', - { - 'label_id': label['id'], - 'note_id': note['id'] - } + "NoteLabelLink", {"label_id": label["id"], "note_id": note["id"]} ) return note diff --git a/source/ftrack_api/entity/project_schema.py b/source/ftrack_api/entity/project_schema.py index ec6db7c0..9afc7ca4 100644 --- a/source/ftrack_api/entity/project_schema.py +++ b/source/ftrack_api/entity/project_schema.py @@ -5,90 +5,78 @@ class ProjectSchema(ftrack_api.entity.base.Entity): - '''Class representing ProjectSchema.''' + """Class representing ProjectSchema.""" def get_statuses(self, schema, type_id=None): - '''Return statuses for *schema* and optional *type_id*. + """Return statuses for *schema* and optional *type_id*. *type_id* is the id of the Type for a TypedContext and can be used to get statuses where the workflow has been overridden. - ''' + """ # Task has overrides and need to be handled separately. - if schema == 'Task': + if schema == "Task": if type_id is not None: - overrides = self['_overrides'] + overrides = self["_overrides"] for override in overrides: - if override['type_id'] == type_id: - return override['workflow_schema']['statuses'][:] + if override["type_id"] == type_id: + return override["workflow_schema"]["statuses"][:] - return self['_task_workflow']['statuses'][:] + return self["_task_workflow"]["statuses"][:] - elif schema == 'AssetVersion': - return self['_version_workflow']['statuses'][:] + elif schema == "AssetVersion": + return self["_version_workflow"]["statuses"][:] else: try: EntityTypeClass = self.session.types[schema] except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) + raise ValueError("Schema {0} does not exist.".format(schema)) - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) + object_type_id_attribute = EntityTypeClass.attributes.get("object_type_id") try: object_type_id = object_type_id_attribute.default_value except AttributeError: - raise ValueError( - 'Schema {0} does not have statuses.'.format(schema) - ) + raise ValueError("Schema {0} does not have statuses.".format(schema)) - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: + for _schema in self["_schemas"]: + if _schema["type_id"] == object_type_id: result = self.session.query( - 'select task_status from SchemaStatus ' - 'where schema_id is {0}'.format(_schema['id']) + "select task_status from SchemaStatus " + "where schema_id is {0}".format(_schema["id"]) ) - return [ - schema_type['task_status'] for schema_type in result - ] + return [schema_type["task_status"] for schema_type in result] raise ValueError( - 'No valid statuses were found for schema {0}.'.format(schema) + "No valid statuses were found for schema {0}.".format(schema) ) def get_types(self, schema): - '''Return types for *schema*.''' + """Return types for *schema*.""" # Task need to be handled separately. - if schema == 'Task': - return self['_task_type_schema']['types'][:] + if schema == "Task": + return self["_task_type_schema"]["types"][:] else: try: EntityTypeClass = self.session.types[schema] except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) + raise ValueError("Schema {0} does not exist.".format(schema)) - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) + object_type_id_attribute = EntityTypeClass.attributes.get("object_type_id") try: object_type_id = object_type_id_attribute.default_value except AttributeError: - raise ValueError( - 'Schema {0} does not have types.'.format(schema) - ) + raise ValueError("Schema {0} does not have types.".format(schema)) - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: + for _schema in self["_schemas"]: + if _schema["type_id"] == object_type_id: result = self.session.query( - 'select task_type from SchemaType ' - 'where schema_id is {0}'.format(_schema['id']) + "select task_type from SchemaType " + "where schema_id is {0}".format(_schema["id"]) ) - return [schema_type['task_type'] for schema_type in result] + return [schema_type["task_type"] for schema_type in result] - raise ValueError( - 'No valid types were found for schema {0}.'.format(schema) - ) + raise ValueError("No valid types were found for schema {0}.".format(schema)) diff --git a/source/ftrack_api/entity/user.py b/source/ftrack_api/entity/user.py index c0da4569..882830e6 100644 --- a/source/ftrack_api/entity/user.py +++ b/source/ftrack_api/entity/user.py @@ -9,10 +9,10 @@ class User(ftrack_api.entity.base.Entity): - '''Represent a user.''' + """Represent a user.""" - def start_timer(self, context=None, comment='', name=None, force=False): - '''Start a timer for *context* and return it. + def start_timer(self, context=None, comment="", name=None, force=False): + """Start a timer for *context* and return it. *force* can be used to automatically stop an existing timer and create a timelog for it. If you need to get access to the created timelog, use @@ -27,30 +27,30 @@ def start_timer(self, context=None, comment='', name=None, force=False): :class:`ftrack_api.exception.NotUniqueError` exception if a timer is already running. - ''' + """ if force: try: self.stop_timer() except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Failed to stop existing timer.') + self.logger.debug("Failed to stop existing timer.") - timer = self.session.create('Timer', { - 'user': self, - 'context': context, - 'name': name, - 'comment': comment - }) + timer = self.session.create( + "Timer", + {"user": self, "context": context, "name": name, "comment": comment}, + ) # Commit the new timer and try to catch any error that indicate another # timelog already exists and inform the user about it. try: self.session.commit() except ftrack_api.exception.ServerError as error: - if 'DuplicateEntryError' in str(error): + if "DuplicateEntryError" in str(error): raise ftrack_api.exception.NotUniqueError( - ('Failed to start a timelog for user with id: {0}, it is ' - 'likely that a timer is already running. Either use ' - 'force=True or stop the timer first.').format(self['id']) + ( + "Failed to start a timelog for user with id: {0}, it is " + "likely that a timer is already running. Either use " + "force=True or stop the timer first." + ).format(self["id"]) ) else: # Reraise the error as it might be something unrelated. @@ -59,7 +59,7 @@ def start_timer(self, context=None, comment='', name=None, force=False): return timer def stop_timer(self): - '''Stop the current timer and return a timelog created from it. + """Stop the current timer and return a timelog created from it. If a timer is not running, a :exc:`ftrack_api.exception.NoResultFoundError` exception will be @@ -69,39 +69,42 @@ def stop_timer(self): This method will automatically commit the changes. - ''' + """ timer = self.session.query( - 'Timer where user_id = "{0}"'.format(self['id']) + 'Timer where user_id = "{0}"'.format(self["id"]) ).one() # If the server is running in the same timezone as the local # timezone, we remove the TZ offset to get the correct duration. is_timezone_support_enabled = self.session.server_information.get( - 'is_timezone_support_enabled', None + "is_timezone_support_enabled", None ) if is_timezone_support_enabled is None: self.logger.warning( - 'Could not identify if server has timezone support enabled. ' - 'Will assume server is running in UTC.' + "Could not identify if server has timezone support enabled. " + "Will assume server is running in UTC." ) is_timezone_support_enabled = True if is_timezone_support_enabled: now = arrow.now() else: - now = arrow.now().replace(tzinfo='utc') + now = arrow.now().replace(tzinfo="utc") - delta = now - timer['start'] + delta = now - timer["start"] duration = delta.days * 24 * 60 * 60 + delta.seconds - timelog = self.session.create('Timelog', { - 'user_id': timer['user_id'], - 'context_id': timer['context_id'], - 'comment': timer['comment'], - 'start': timer['start'], - 'duration': duration, - 'name': timer['name'] - }) + timelog = self.session.create( + "Timelog", + { + "user_id": timer["user_id"], + "context_id": timer["context_id"], + "comment": timer["comment"], + "start": timer["start"], + "duration": duration, + "name": timer["name"], + }, + ) self.session.delete(timer) self.session.commit() @@ -109,16 +112,13 @@ def stop_timer(self): return timelog def send_invite(self): - '''Send a invation email to the user''' + """Send a invation email to the user""" + + self.session.send_user_invite(self) - self.session.send_user_invite( - self - ) def reset_api_key(self): - '''Reset the users api key.''' + """Reset the users api key.""" - response = self.session.reset_remote( - 'api_key', entity=self - ) + response = self.session.reset_remote("api_key", entity=self) - return response['api_key'] + return response["api_key"] diff --git a/source/ftrack_api/event/base.py b/source/ftrack_api/event/base.py index 888da911..733b8ca1 100644 --- a/source/ftrack_api/event/base.py +++ b/source/ftrack_api/event/base.py @@ -7,11 +7,19 @@ class Event(collections_abc.MutableMapping): - '''Represent a single event.''' - - def __init__(self, topic, id=None, data=None, sent=None, - source=None, target='', in_reply_to_event=None): - '''Initialise event. + """Represent a single event.""" + + def __init__( + self, + topic, + id=None, + data=None, + sent=None, + source=None, + target="", + in_reply_to_event=None, + ): + """Initialise event. *topic* is the required topic for the event. It can use a dotted notation to demarcate groupings. For example, 'ftrack.update'. @@ -38,7 +46,7 @@ def __init__(self, topic, id=None, data=None, sent=None, *in_reply_to_event* is used when replying to an event and should contain the unique id of the event being replied to. - ''' + """ super(Event, self).__init__() self._data = dict( id=id or uuid.uuid4().hex, @@ -47,40 +55,38 @@ def __init__(self, topic, id=None, data=None, sent=None, sent=sent, source=source or {}, target=target, - in_reply_to_event=in_reply_to_event + in_reply_to_event=in_reply_to_event, ) self._stopped = False def stop(self): - '''Stop further processing of this event.''' + """Stop further processing of this event.""" self._stopped = True def is_stopped(self): - '''Return whether event has been stopped.''' + """Return whether event has been stopped.""" return self._stopped def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) + """Return string representation.""" + return "<{0} {1}>".format(self.__class__.__name__, str(self._data)) def __getitem__(self, key): - '''Return value for *key*.''' + """Return value for *key*.""" return self._data[key] def __setitem__(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" self._data[key] = value def __delitem__(self, key): - '''Remove *key*.''' + """Remove *key*.""" del self._data[key] def __iter__(self): - '''Iterate over all keys.''' + """Iterate over all keys.""" return iter(self._data) def __len__(self): - '''Return count of keys.''' + """Return count of keys.""" return len(self._data) diff --git a/source/ftrack_api/event/expression.py b/source/ftrack_api/event/expression.py index b8dae6cf..05ca47bc 100644 --- a/source/ftrack_api/event/expression.py +++ b/source/ftrack_api/event/expression.py @@ -6,9 +6,20 @@ from builtins import object from operator import eq, ne, ge, le, gt, lt -from pyparsing import (Group, Word, CaselessKeyword, Forward, - FollowedBy, Suppress, oneOf, OneOrMore, Optional, - alphanums, quotedString, removeQuotes) +from pyparsing import ( + Group, + Word, + CaselessKeyword, + Forward, + FollowedBy, + Suppress, + oneOf, + OneOrMore, + Optional, + alphanums, + quotedString, + removeQuotes, +) import ftrack_api.exception @@ -18,216 +29,194 @@ class Parser(object): - '''Parse string based expression into :class:`Expression` instance.''' + """Parse string based expression into :class:`Expression` instance.""" def __init__(self): - '''Initialise parser.''' - self._operators = { - '=': eq, - '!=': ne, - '>=': ge, - '<=': le, - '>': gt, - '<': lt - } + """Initialise parser.""" + self._operators = {"=": eq, "!=": ne, ">=": ge, "<=": le, ">": gt, "<": lt} self._parser = self._construct_parser() super(Parser, self).__init__() def _construct_parser(self): - '''Construct and return parser.''' - field = Word(alphanums + '_.') + """Construct and return parser.""" + field = Word(alphanums + "_.") operator = oneOf(list(self._operators.keys())) - value = Word(alphanums + '-_,./*@+') - quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) + value = Word(alphanums + "-_,./*@+") + quoted_value = quotedString("quoted_value").setParseAction(removeQuotes) - condition = Group( - field + operator + (quoted_value | value) - )('condition') + condition = Group(field + operator + (quoted_value | value))("condition") - not_ = Optional(Suppress(CaselessKeyword('not')))('not') - and_ = Suppress(CaselessKeyword('and'))('and') - or_ = Suppress(CaselessKeyword('or'))('or') + not_ = Optional(Suppress(CaselessKeyword("not")))("not") + and_ = Suppress(CaselessKeyword("and"))("and") + or_ = Suppress(CaselessKeyword("or"))("or") expression = Forward() - parenthesis = Suppress('(') + expression + Suppress(')') + parenthesis = Suppress("(") + expression + Suppress(")") previous = condition | parenthesis for conjunction in (not_, and_, or_): current = Forward() if conjunction in (and_, or_): - conjunction_expression = ( - FollowedBy(previous + conjunction + previous) - + Group( - previous + OneOrMore(conjunction + previous) - )(conjunction.resultsName) + conjunction_expression = FollowedBy( + previous + conjunction + previous + ) + Group(previous + OneOrMore(conjunction + previous))( + conjunction.resultsName ) - elif conjunction in (not_, ): - conjunction_expression = ( - FollowedBy(conjunction.expr + current) - + Group(conjunction + current)(conjunction.resultsName) - ) + elif conjunction in (not_,): + conjunction_expression = FollowedBy(conjunction.expr + current) + Group( + conjunction + current + )(conjunction.resultsName) else: # pragma: no cover - raise ValueError('Unrecognised conjunction.') + raise ValueError("Unrecognised conjunction.") - current <<= (conjunction_expression | previous) + current <<= conjunction_expression | previous previous = current expression <<= previous - return expression('expression') + return expression("expression") def parse(self, expression): - '''Parse string *expression* into :class:`Expression`. + """Parse string *expression* into :class:`Expression`. Raise :exc:`ftrack_api.exception.ParseError` if *expression* could not be parsed. - ''' + """ result = None expression = expression.strip() if expression: try: - result = self._parser.parseString( - expression, parseAll=True - ) + result = self._parser.parseString(expression, parseAll=True) except Exception as error: raise ftrack_api.exception.ParseError( - 'Failed to parse: {0}. {1}'.format(expression, error) + "Failed to parse: {0}. {1}".format(expression, error) ) return self._process(result) def _process(self, result): - '''Process *result* using appropriate method. + """Process *result* using appropriate method. Method called is determined by the name of the result. - ''' - method_name = '_process_{0}'.format(result.getName()) + """ + method_name = "_process_{0}".format(result.getName()) method = getattr(self, method_name) return method(result) def _process_expression(self, result): - '''Process *result* as expression.''' + """Process *result* as expression.""" return self._process(result[0]) def _process_not(self, result): - '''Process *result* as NOT operation.''' + """Process *result* as NOT operation.""" return Not(self._process(result[0])) def _process_and(self, result): - '''Process *result* as AND operation.''' + """Process *result* as AND operation.""" return All([self._process(entry) for entry in result]) def _process_or(self, result): - '''Process *result* as OR operation.''' + """Process *result* as OR operation.""" return Any([self._process(entry) for entry in result]) def _process_condition(self, result): - '''Process *result* as condition.''' + """Process *result* as condition.""" key, operator, value = result return Condition(key, self._operators[operator], value) def _process_quoted_value(self, result): - '''Process *result* as quoted value.''' + """Process *result* as quoted value.""" return result class Expression(object): - '''Represent a structured expression to test candidates against.''' + """Represent a structured expression to test candidates against.""" def __str__(self): - '''Return string representation.''' - return '<{0}>'.format(self.__class__.__name__) + """Return string representation.""" + return "<{0}>".format(self.__class__.__name__) def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' + """Return whether *candidate* satisfies this expression.""" return True class All(Expression): - '''Match candidate that matches all of the specified expressions. + """Match candidate that matches all of the specified expressions. .. note:: If no expressions are supplied then will always match. - ''' + """ def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' + """Initialise with list of *expressions* to match against.""" self._expressions = expressions or [] super(All, self).__init__() def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) + """Return string representation.""" + return "<{0} [{1}]>".format( + self.__class__.__name__, " ".join(map(str, self._expressions)) ) def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return all([ - expression.match(candidate) for expression in self._expressions - ]) + """Return whether *candidate* satisfies this expression.""" + return all([expression.match(candidate) for expression in self._expressions]) class Any(Expression): - '''Match candidate that matches any of the specified expressions. + """Match candidate that matches any of the specified expressions. .. note:: If no expressions are supplied then will never match. - ''' + """ def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' + """Initialise with list of *expressions* to match against.""" self._expressions = expressions or [] super(Any, self).__init__() def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) + """Return string representation.""" + return "<{0} [{1}]>".format( + self.__class__.__name__, " ".join(map(str, self._expressions)) ) def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return any([ - expression.match(candidate) for expression in self._expressions - ]) + """Return whether *candidate* satisfies this expression.""" + return any([expression.match(candidate) for expression in self._expressions]) class Not(Expression): - '''Negate expression.''' + """Negate expression.""" def __init__(self, expression): - '''Initialise with *expression* to negate.''' + """Initialise with *expression* to negate.""" self._expression = expression super(Not, self).__init__() def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, - self._expression - ) + """Return string representation.""" + return "<{0} {1}>".format(self.__class__.__name__, self._expression) def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' + """Return whether *candidate* satisfies this expression.""" return not self._expression.match(candidate) class Condition(Expression): - '''Represent condition.''' + """Represent condition.""" def __init__(self, key, operator, value): - '''Initialise condition. + """Initialise condition. *key* is the key to check on the data when matching. It can be a nested key represented by dots. For example, 'data.eventType' would attempt to @@ -241,32 +230,32 @@ def __init__(self, key, operator, value): that any values matching the substring portion are valid when matching equality only. - ''' + """ self._key = key self._operator = operator self._value = value - self._wildcard = '*' + self._wildcard = "*" self._operatorMapping = { - eq: '=', - ne: '!=', - ge: '>=', - le: '<=', - gt: '>', - lt: '<' + eq: "=", + ne: "!=", + ge: ">=", + le: "<=", + gt: ">", + lt: "<", } def __str__(self): - '''Return string representation.''' - return '<{0} {1}{2}{3}>'.format( + """Return string representation.""" + return "<{0} {1}{2}{3}>".format( self.__class__.__name__, self._key, self._operatorMapping.get(self._operator, self._operator), - self._value + self._value, ) def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - key_parts = self._key.split('.') + """Return whether *candidate* satisfies this expression.""" + key_parts = self._key.split(".") try: value = candidate diff --git a/source/ftrack_api/event/hub.py b/source/ftrack_api/event/hub.py index fddca01c..937452a9 100644 --- a/source/ftrack_api/event/hub.py +++ b/source/ftrack_api/event/hub.py @@ -32,40 +32,44 @@ from ftrack_api.logging import LazyLogMessage as L -SocketIoSession = collections.namedtuple('SocketIoSession', [ - 'id', - 'heartbeatTimeout', - 'supportedTransports', -]) - - -ServerDetails = collections.namedtuple('ServerDetails', [ - 'scheme', - 'hostname', - 'port', -]) +SocketIoSession = collections.namedtuple( + "SocketIoSession", + [ + "id", + "heartbeatTimeout", + "supportedTransports", + ], +) + + +ServerDetails = collections.namedtuple( + "ServerDetails", + [ + "scheme", + "hostname", + "port", + ], +) class EventHub(object): - '''Manage routing of events.''' + """Manage routing of events.""" def __init__(self, server_url, api_user, api_key, headers=None, cookies=None): - '''Initialise hub, connecting to ftrack *server_url*. + """Initialise hub, connecting to ftrack *server_url*. *api_user* is the user to authenticate as and *api_key* is the API key to authenticate with. - + *cookies* should be an optional mapping (dict) of key-value pairs specifying custom cookies that we need to pass in alongside the requests to the server. *headers* should be an optional mapping (dict) of key-value pairs specifying custom headers that we need to pass in alongside the requests to the server. - ''' + """ super(EventHub, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) self.id = uuid.uuid4().hex self._connection = None @@ -81,7 +85,7 @@ def __init__(self, server_url, api_user, api_key, headers=None, cookies=None): self._event_queue = queue.Queue() self._event_send_queue = queue.Queue() - self._event_namespace = 'ftrack.event' + self._event_namespace = "ftrack.event" self._expression_parser = ftrack_api.event.expression.Parser() # Track if a connection has been initialised. @@ -94,14 +98,14 @@ def __init__(self, server_url, api_user, api_key, headers=None, cookies=None): # Mapping of Socket.IO codes to meaning. self._code_name_mapping = { - '0': 'disconnect', - '1': 'connect', - '2': 'heartbeat', - '3': 'message', - '4': 'json', - '5': 'event', - '6': 'acknowledge', - '7': 'error' + "0": "disconnect", + "1": "connect", + "2": "heartbeat", + "3": "message", + "4": "json", + "5": "event", + "6": "acknowledge", + "7": "error", } self._code_name_mapping.update( dict((name, code) for code, name in list(self._code_name_mapping.items())) @@ -114,21 +118,19 @@ def __init__(self, server_url, api_user, api_key, headers=None, cookies=None): # Parse server URL and store server details. url_parse_result = urllib.parse.urlparse(self._server_url) if not url_parse_result.scheme: - raise ValueError('Could not determine scheme from server url.') + raise ValueError("Could not determine scheme from server url.") if not url_parse_result.hostname: - raise ValueError('Could not determine hostname from server url.') + raise ValueError("Could not determine hostname from server url.") self.server = ServerDetails( - url_parse_result.scheme, - url_parse_result.hostname, - url_parse_result.port + url_parse_result.scheme, url_parse_result.hostname, url_parse_result.port ) def _validate_mapping(mapping): - '''Validate mapping is a mapping type and return as dict.''' + """Validate mapping is a mapping type and return as dict.""" if not isinstance(mapping, collections_abc.Mapping): - raise TypeError('Expected mapping, got {0!r}.'.format(mapping)) + raise TypeError("Expected mapping, got {0!r}.".format(mapping)) return dict(mapping) @@ -136,48 +138,42 @@ def _validate_mapping(mapping): self._headers = _validate_mapping(headers or {}) def get_server_url(self): - '''Return URL to server.''' - return '{0}://{1}'.format( - self.server.scheme, self.get_network_location() - ) + """Return URL to server.""" + return "{0}://{1}".format(self.server.scheme, self.get_network_location()) def get_network_location(self): - '''Return network location part of url (hostname with optional port).''' + """Return network location part of url (hostname with optional port).""" if self.server.port: - return '{0}:{1}'.format(self.server.hostname, self.server.port) + return "{0}:{1}".format(self.server.hostname, self.server.port) else: return self.server.hostname @property def secure(self): - '''Return whether secure connection used.''' - return self.server.scheme == 'https' + """Return whether secure connection used.""" + return self.server.scheme == "https" def init_connection(self): - '''If the connection is not handled synchronously the connection may be marked - as initialized to allow for published events to be queued. ''' + """If the connection is not handled synchronously the connection may be marked + as initialized to allow for published events to be queued.""" - self.logger.debug( - 'Connection initialized,' - ) + self.logger.debug("Connection initialized,") self._connection_initialised = True def connect(self): - '''Initialise connection to server. + """Initialise connection to server. Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already connected or connection fails. - ''' + """ if not self._connection_initialised: # Update tracking flag for connection. self.init_connection() if self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Already connected.' - ) + raise ftrack_api.exception.EventHubConnectionError("Already connected.") # Reset flag tracking whether disconnection was intentional. self._intentional_disconnect = False @@ -186,21 +182,16 @@ def connect(self): # Connect to socket.io server using websocket transport. session = self._get_socket_io_session() - if 'websocket' not in session.supportedTransports: - raise ValueError( - 'Server does not support websocket sessions.' - ) + if "websocket" not in session.supportedTransports: + raise ValueError("Server does not support websocket sessions.") - scheme = 'wss' if self.secure else 'ws' - url = '{0}://{1}/socket.io/1/websocket/{2}'.format( + scheme = "wss" if self.secure else "ws" + url = "{0}://{1}/socket.io/1/websocket/{2}".format( scheme, self.get_network_location(), session.id ) # Select highest available protocol for websocket connection. - ssl_protocols = [ - 'PROTOCOL_TLS', - 'PROTOCOL_TLSv1_2' - ] + ssl_protocols = ["PROTOCOL_TLS", "PROTOCOL_TLSv1_2"] available_ssl_protocol = None @@ -208,9 +199,10 @@ def connect(self): if hasattr(ssl, ssl_protocol): available_ssl_protocol = getattr(ssl, ssl_protocol) self.logger.debug( - 'Using protocol {} to connect to websocket.'.format( + "Using protocol {} to connect to websocket.".format( ssl_protocol - )) + ) + ) break # timeout is set to 60 seconds to avoid the issue where the socket @@ -220,31 +212,30 @@ def connect(self): # More information on how the timeout works can be found here: # https://docs.python.org/2/library/socket.html#socket.socket.setblocking self._connection = websocket.create_connection( - url, timeout=60, sslopt={"ssl_version": available_ssl_protocol}, - enable_multithread= True, header=self._headers, - cookie=';'.join(['{0}={1}'.format(x, self._cookies[x]) for x in self._cookies.keys()]) + url, + timeout=60, + sslopt={"ssl_version": available_ssl_protocol}, + enable_multithread=True, + header=self._headers, + cookie=";".join( + [ + "{0}={1}".format(x, self._cookies[x]) + for x in self._cookies.keys() + ] + ), ) except Exception as error: error_message = ( - 'Failed to connect to event server at {server_url} with ' + "Failed to connect to event server at {server_url} with " 'error: "{error}".' ) - error_details = { - 'error': str(error), - 'server_url': self.get_server_url() - } + error_details = {"error": str(error), "server_url": self.get_server_url()} - self.logger.debug( - L( - error_message, **error_details - ), - exc_info=1 - ) + self.logger.debug(L(error_message, **error_details), exc_info=1) raise ftrack_api.exception.EventHubConnectionError( - error_message, - details=error_details + error_message, details=error_details ) # Start background processing thread. @@ -257,11 +248,9 @@ def connect(self): # duplicate subscriber error if EventHub.subscribe was called here. try: self._add_subscriber( - 'topic=ftrack.meta.reply', + "topic=ftrack.meta.reply", self._handle_reply, - subscriber=dict( - id=self.id - ) + subscriber=dict(id=self.id), ) except ftrack_api.exception.NotUniqueError: pass @@ -281,11 +270,11 @@ def connect(self): @property def connected(self): - '''Return if connected.''' + """Return if connected.""" return self._connection is not None and self._connection.connected def disconnect(self, unsubscribe=True, reconnect=False): - '''Disconnect from server. + """Disconnect from server. Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not currently connected. @@ -296,10 +285,10 @@ def disconnect(self, unsubscribe=True, reconnect=False): If *reconnect* is True we do not set connection_initialized to False so that we may queue up messages that are published while disconnected. - ''' + """ if not self.connected: raise ftrack_api.exception.EventHubConnectionError( - 'Not currently connected.' + "Not currently connected." ) else: @@ -316,7 +305,7 @@ def disconnect(self, unsubscribe=True, reconnect=False): # Unsubscribe all subscribers. if unsubscribe: for subscriber in self._subscribers[:]: - self.unsubscribe(subscriber.metadata['id']) + self.unsubscribe(subscriber.metadata["id"]) # Now disconnect. self._connection.close() @@ -331,7 +320,7 @@ def disconnect(self, unsubscribe=True, reconnect=False): self._processor_thread.join(self._wait_timeout) def reconnect(self, attempts=10, delay=5): - '''Reconnect to server. + """Reconnect to server. Make *attempts* number of attempts with *delay* in seconds between each attempt. @@ -344,19 +333,15 @@ def reconnect(self, attempts=10, delay=5): Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to reconnect. - ''' + """ try: - self.disconnect( - unsubscribe=False, reconnect=True - ) + self.disconnect(unsubscribe=False, reconnect=True) except ftrack_api.exception.EventHubConnectionError: pass for attempt in range(attempts): - self.logger.debug(L( - 'Reconnect attempt {0} of {1}', attempt, attempts - )) + self.logger.debug(L("Reconnect attempt {0} of {1}", attempt, attempts)) # Silence logging temporarily to avoid lots of failed connection # related information. @@ -375,26 +360,27 @@ def reconnect(self, attempts=10, delay=5): if not self.connected: raise ftrack_api.exception.EventHubConnectionError( - 'Failed to reconnect to event server at {0} after {1} attempts.' - .format(self.get_server_url(), attempts) + "Failed to reconnect to event server at {0} after {1} attempts.".format( + self.get_server_url(), attempts + ) ) def wait(self, duration=None): - '''Wait for events and handle as they arrive. + """Wait for events and handle as they arrive. If *duration* is specified, then only process events until duration is reached. *duration* is in seconds though float values can be used for smaller values. - ''' + """ if not self._connection_initialised: raise ftrack_api.exception.EventHubConnectionError( - 'Event hub does not have a connection to the event server and ' - 'will therefore only be able to receive syncronous events.' - 'Please see http://ftrack-python-api.rtd.ftrack.com/en/stable/' - 'release/migration.html#default-behavior-for-connecting-to-event-hub' - ' for further information.' + "Event hub does not have a connection to the event server and " + "will therefore only be able to receive syncronous events." + "Please see http://ftrack-python-api.rtd.ftrack.com/en/stable/" + "release/migration.html#default-behavior-for-connecting-to-event-hub" + " for further information." ) started = time.time() @@ -408,7 +394,7 @@ def wait(self, duration=None): self._handle(event) # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': + if event["topic"] == "ftrack.meta.disconnected": break if duration is not None: @@ -416,19 +402,19 @@ def wait(self, duration=None): break def get_subscriber_by_identifier(self, identifier): - '''Return subscriber with matching *identifier*. + """Return subscriber with matching *identifier*. Return None if no subscriber with *identifier* found. - ''' + """ for subscriber in self._subscribers[:]: - if subscriber.metadata.get('id') == identifier: + if subscriber.metadata.get("id") == identifier: return subscriber return None def subscribe(self, subscription, callback, subscriber=None, priority=100): - '''Register *callback* for *subscription*. + """Register *callback* for *subscription*. A *subscription* is a string that can specify in detail which events the callback should receive. The filtering is applied against each event @@ -471,27 +457,26 @@ def subscribe(self, subscription, callback, subscriber=None, priority=100): Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with the same identifier already exists. - ''' + """ # Add subscriber locally. - subscriber = self._add_subscriber( - subscription, callback, subscriber, priority - ) + subscriber = self._add_subscriber(subscription, callback, subscriber, priority) # Notify server now if possible. try: self._notify_server_about_subscriber(subscriber) except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server about new subscriber {0} ' - 'as server not currently reachable.', subscriber.metadata['id'] - )) + self.logger.debug( + L( + "Failed to notify server about new subscriber {0} " + "as server not currently reachable.", + subscriber.metadata["id"], + ) + ) - return subscriber.metadata['id'] + return subscriber.metadata["id"] - def _add_subscriber( - self, subscription, callback, subscriber=None, priority=100 - ): - '''Add subscriber locally. + def _add_subscriber(self, subscription, callback, subscriber=None, priority=100): + """Add subscriber locally. See :meth:`subscribe` for argument descriptions. @@ -500,28 +485,27 @@ def _add_subscriber( Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with the same identifier already exists. - ''' + """ if subscriber is None: subscriber = {} - subscriber.setdefault('id', uuid.uuid4().hex) + subscriber.setdefault("id", uuid.uuid4().hex) # Check subscriber not already subscribed. - existing_subscriber = self.get_subscriber_by_identifier( - subscriber['id'] - ) + existing_subscriber = self.get_subscriber_by_identifier(subscriber["id"]) if existing_subscriber is not None: raise ftrack_api.exception.NotUniqueError( - 'Subscriber with identifier {0} already exists.' - .format(subscriber['id']) + "Subscriber with identifier {0} already exists.".format( + subscriber["id"] + ) ) subscriber = ftrack_api.event.subscriber.Subscriber( subscription=subscription, callback=callback, metadata=subscriber, - priority=priority + priority=priority, ) self._subscribers.append(subscriber) @@ -529,30 +513,32 @@ def _add_subscriber( return subscriber def _notify_server_about_subscriber(self, subscriber): - '''Notify server of new *subscriber*.''' + """Notify server of new *subscriber*.""" subscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.subscribe', + topic="ftrack.meta.subscribe", data=dict( subscriber=subscriber.metadata, - subscription=str(subscriber.subscription) - ) + subscription=str(subscriber.subscription), + ), ) self._publish( - subscribe_event, - callback=functools.partial(self._on_subscribed, subscriber) + subscribe_event, callback=functools.partial(self._on_subscribed, subscriber) ) def _on_subscribed(self, subscriber, response): - '''Handle acknowledgement of subscription.''' - if response.get('success') is False: - self.logger.warning(L( - 'Server failed to subscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) + """Handle acknowledgement of subscription.""" + if response.get("success") is False: + self.logger.warning( + L( + "Server failed to subscribe subscriber {0}: {1}", + subscriber.metadata["id"], + response.get("message"), + ) + ) def unsubscribe(self, subscriber_identifier): - '''Unsubscribe subscriber with *subscriber_identifier*. + """Unsubscribe subscriber with *subscriber_identifier*. .. note:: @@ -560,65 +546,67 @@ def unsubscribe(self, subscriber_identifier): unsubscription. However, the subscriber will be removed locally regardless. - ''' + """ subscriber = self.get_subscriber_by_identifier(subscriber_identifier) if subscriber is None: raise ftrack_api.exception.NotFoundError( - 'Cannot unsubscribe missing subscriber with identifier {0}' - .format(subscriber_identifier) + "Cannot unsubscribe missing subscriber with identifier {0}".format( + subscriber_identifier + ) ) self._subscribers.pop(self._subscribers.index(subscriber)) # Notify the server if possible. unsubscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.unsubscribe', - data=dict(subscriber=subscriber.metadata) + topic="ftrack.meta.unsubscribe", data=dict(subscriber=subscriber.metadata) ) try: self._publish( unsubscribe_event, - callback=functools.partial(self._on_unsubscribed, subscriber) + callback=functools.partial(self._on_unsubscribed, subscriber), ) except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server to unsubscribe subscriber {0} as ' - 'server not currently reachable.', subscriber.metadata['id'] - )) + self.logger.debug( + L( + "Failed to notify server to unsubscribe subscriber {0} as " + "server not currently reachable.", + subscriber.metadata["id"], + ) + ) def _on_unsubscribed(self, subscriber, response): - '''Handle acknowledgement of unsubscribing *subscriber*.''' - if response.get('success') is not True: - self.logger.warning(L( - 'Server failed to unsubscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) + """Handle acknowledgement of unsubscribing *subscriber*.""" + if response.get("success") is not True: + self.logger.warning( + L( + "Server failed to unsubscribe subscriber {0}: {1}", + subscriber.metadata["id"], + response.get("message"), + ) + ) def _prepare_event(self, event): - '''Prepare *event* for sending.''' - event['source'].setdefault('id', self.id) - event['source'].setdefault('user', { - 'username': self._api_user - }) + """Prepare *event* for sending.""" + event["source"].setdefault("id", self.id) + event["source"].setdefault("user", {"username": self._api_user}) def _prepare_reply_event(self, event, source_event, source=None): - '''Prepare *event* as a reply to another *source_event*. + """Prepare *event* as a reply to another *source_event*. Modify *event*, setting appropriate values to target event correctly as a reply. - ''' - event['target'] = 'id={0}'.format(source_event['source']['id']) - event['in_reply_to_event'] = source_event['id'] + """ + event["target"] = "id={0}".format(source_event["source"]["id"]) + event["in_reply_to_event"] = source_event["id"] if source is not None: - event['source'] = source + event["source"] = source - def publish( - self, event, synchronous=False, on_reply=None, on_error='raise' - ): - '''Publish *event*. + def publish(self, event, synchronous=False, on_reply=None, on_error="raise"): + """Publish *event*. If *synchronous* is specified as True then this method will wait and return a list of results from any called callbacks. @@ -639,34 +627,29 @@ def publish( If *on_error* is set to 'ignore' then errors raised during publish of event will be caught by this method and ignored. - ''' + """ try: - return self._publish( - event, synchronous=synchronous, on_reply=on_reply - ) + return self._publish(event, synchronous=synchronous, on_reply=on_reply) except Exception: - if on_error == 'ignore': + if on_error == "ignore": pass else: raise def publish_reply(self, source_event, data, source=None): - '''Publish a reply event to *source_event* with supplied *data*. + """Publish a reply event to *source_event* with supplied *data*. If *source* is specified it will be used for the source value of the sent event. - ''' - reply_event = ftrack_api.event.base.Event( - 'ftrack.meta.reply', - data=data - ) + """ + reply_event = ftrack_api.event.base.Event("ftrack.meta.reply", data=data) self._prepare_reply_event(reply_event, source_event, source=source) self.publish(reply_event) def _publish(self, event, synchronous=False, callback=None, on_reply=None): - '''Publish *event*. + """Publish *event*. If *synchronous* is specified as True then this method will wait and return a list of results from any called callbacks. @@ -688,7 +671,7 @@ def _publish(self, event, synchronous=False, callback=None, on_reply=None): Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not currently connected. - ''' + """ # Prepare event adding any relevant additional information. self._prepare_event(event) @@ -705,19 +688,15 @@ def _publish(self, event, synchronous=False, callback=None, on_reply=None): # This could also be a reconnection. - self._event_send_queue.put( - (event, synchronous, callback, on_reply) - ) + self._event_send_queue.put((event, synchronous, callback, on_reply)) self.logger.debug( - 'Connection is still initializing, adding message to ' - 'queue' + "Connection is still initializing, adding message to " "queue" ) return True raise ftrack_api.exception.EventHubConnectionError( - 'Cannot publish event asynchronously as not connected to ' - 'server.' + "Cannot publish event asynchronously as not connected to " "server." ) # Use standard callback if none specified. @@ -731,20 +710,16 @@ def _publish(self, event, synchronous=False, callback=None, on_reply=None): # TODO: Add cleanup process that runs after a set duration to # garbage collect old reply callbacks and prevent dictionary # growing too large. - self._reply_callbacks[event['id']] = on_reply + self._reply_callbacks[event["id"]] = on_reply try: - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) + self._emit_event_packet(self._event_namespace, event, callback=callback) except ftrack_api.exception.EventHubConnectionError: # Connection may have dropped temporarily. Wait a few moments to # see if background thread reconnects automatically. time.sleep(15) - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) + self._emit_event_packet(self._event_namespace, event, callback=callback) except: raise @@ -753,48 +728,52 @@ def _publish(self, event, synchronous=False, callback=None, on_reply=None): # TODO: This behaviour is inconsistent with the failing earlier on # lack of connection and also with the error handling parameter of # EventHub.publish. Consider refactoring. - self.logger.exception(L('Error sending event {0}.', event)) + self.logger.exception(L("Error sending event {0}.", event)) def _on_published(self, event, response): - '''Handle acknowledgement of published event.''' - if response.get('success', False) is False: - self.logger.error(L( - 'Server responded with error while publishing event {0}. ' - 'Error was: {1}', event, response.get('message') - )) + """Handle acknowledgement of published event.""" + if response.get("success", False) is False: + self.logger.error( + L( + "Server responded with error while publishing event {0}. " + "Error was: {1}", + event, + response.get("message"), + ) + ) def _handle(self, event, synchronous=False): - '''Handle *event*. + """Handle *event*. If *synchronous* is True, do not send any automatic reply events. - ''' + """ # Sort by priority, lower is higher. # TODO: Use a sorted list to avoid sorting each time in order to improve # performance. - subscribers = sorted( - self._subscribers, key=operator.attrgetter('priority') - ) + subscribers = sorted(self._subscribers, key=operator.attrgetter("priority")) results = [] - target = event.get('target', None) + target = event.get("target", None) target_expression = None if target: try: target_expression = self._expression_parser.parse(target) except Exception: - self.logger.exception(L( - 'Cannot handle event as failed to parse event target ' - 'information: {0}', event - )) + self.logger.exception( + L( + "Cannot handle event as failed to parse event target " + "information: {0}", + event, + ) + ) return for subscriber in subscribers: # Check if event is targeted to the subscriber. - if ( - target_expression is not None - and not target_expression.match(subscriber.metadata) + if target_expression is not None and not target_expression.match( + subscriber.metadata ): continue @@ -808,15 +787,13 @@ def _handle(self, event, synchronous=False): response = subscriber.callback(event) results.append(response) except Exception: - self.logger.exception(L( - 'Error calling subscriber {0} for event {1}.', - subscriber, event - )) + self.logger.exception( + L("Error calling subscriber {0} for event {1}.", subscriber, event) + ) # Automatically publish a non None response as a reply when not in # synchronous mode. if not synchronous: - if response is not None: try: self.publish_reply( @@ -824,38 +801,48 @@ def _handle(self, event, synchronous=False): ) except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) + self.logger.exception( + L( + "Error publishing response {0} from subscriber {1} " + "for event {2}.", + response, + subscriber, + event, + ) + ) # Check whether to continue processing topic event. if event.is_stopped(): - self.logger.debug(L( - 'Subscriber {0} stopped event {1}. Will not process ' - 'subsequent subscriber callbacks for this event.', - subscriber, event - )) + self.logger.debug( + L( + "Subscriber {0} stopped event {1}. Will not process " + "subsequent subscriber callbacks for this event.", + subscriber, + event, + ) + ) break return results def _handle_reply(self, event): - '''Handle reply *event*, passing it to any registered callback.''' - callback = self._reply_callbacks.get(event['in_reply_to_event'], None) + """Handle reply *event*, passing it to any registered callback.""" + callback = self._reply_callbacks.get(event["in_reply_to_event"], None) if callback is not None: callback(event) - def subscription(self, subscription, callback, subscriber=None, - priority=100): - '''Return context manager with *callback* subscribed to *subscription*. + def subscription(self, subscription, callback, subscriber=None, priority=100): + """Return context manager with *callback* subscribed to *subscription*. The subscribed callback will be automatically unsubscribed on exit of the context manager. - ''' + """ return _SubscriptionContext( - self, subscription, callback, subscriber=subscriber, + self, + subscription, + callback, + subscriber=subscriber, priority=priority, ) @@ -863,17 +850,14 @@ def subscription(self, subscription, callback, subscriber=None, # def _get_socket_io_session(self): - '''Connect to server and retrieve session information.''' - socket_io_url = ( - '{0}://{1}/socket.io/1/' - ).format( - self.server.scheme, - self.get_network_location() + """Connect to server and retrieve session information.""" + socket_io_url = ("{0}://{1}/socket.io/1/").format( + self.server.scheme, self.get_network_location() ) try: req_headers = { - 'ftrack-user': self._api_user, - 'ftrack-api-key': self._api_key + "ftrack-user": self._api_user, + "ftrack-api-key": self._api_key, } if self._headers: req_headers.update(self._headers) @@ -881,102 +865,94 @@ def _get_socket_io_session(self): socket_io_url, headers=req_headers, cookies=self._cookies, - timeout=60 # 60 seconds timeout to recieve errors faster. + timeout=60, # 60 seconds timeout to recieve errors faster. ) except requests.exceptions.Timeout as error: raise ftrack_api.exception.EventHubConnectionError( - 'Timed out connecting to server: {0}.'.format(error) + "Timed out connecting to server: {0}.".format(error) ) except requests.exceptions.SSLError as error: raise ftrack_api.exception.EventHubConnectionError( - 'Failed to negotiate SSL with server: {0}.'.format(error) + "Failed to negotiate SSL with server: {0}.".format(error) ) except requests.exceptions.ConnectionError as error: raise ftrack_api.exception.EventHubConnectionError( - 'Failed to connect to server: {0}.'.format(error) + "Failed to connect to server: {0}.".format(error) ) else: status = response.status_code if status != 200: raise ftrack_api.exception.EventHubConnectionError( - 'Received unexpected status code {0}.'.format(status) + "Received unexpected status code {0}.".format(status) ) # Parse result and return session information. - parts = response.text.split(':') - return SocketIoSession( - parts[0], - parts[1], - parts[3].split(',') - ) + parts = response.text.split(":") + return SocketIoSession(parts[0], parts[1], parts[3].split(",")) def _add_packet_callback(self, callback): - '''Store callback against a new unique packet ID. + """Store callback against a new unique packet ID. Return the unique packet ID. - ''' + """ with self._lock: self._unique_packet_id += 1 unique_identifier = self._unique_packet_id self._packet_callbacks[unique_identifier] = callback - return '{0}+'.format(unique_identifier) + return "{0}+".format(unique_identifier) def _pop_packet_callback(self, packet_identifier): - '''Pop and return callback for *packet_identifier*.''' + """Pop and return callback for *packet_identifier*.""" return self._packet_callbacks.pop(packet_identifier) def _emit_event_packet(self, namespace, event, callback): - '''Send *event* packet under *namespace*.''' - data = self._encode( - dict(name=namespace, args=[event]) - ) + """Send *event* packet under *namespace*.""" + data = self._encode(dict(name=namespace, args=[event])) self._send_packet( - self._code_name_mapping['event'], data=data, callback=callback + self._code_name_mapping["event"], data=data, callback=callback ) def _acknowledge_packet(self, packet_identifier, *args): - '''Send acknowledgement of packet with *packet_identifier*.''' - packet_identifier = packet_identifier.rstrip('+') + """Send acknowledgement of packet with *packet_identifier*.""" + packet_identifier = packet_identifier.rstrip("+") data = str(packet_identifier) if args: - data += '+{1}'.format(self._encode(args)) + data += "+{1}".format(self._encode(args)) - self._send_packet(self._code_name_mapping['acknowledge'], data=data) + self._send_packet(self._code_name_mapping["acknowledge"], data=data) - def _send_packet(self, code, data='', callback=None): - '''Send packet via connection.''' - path = '' - packet_identifier = ( - self._add_packet_callback(callback) if callback else '' - ) + def _send_packet(self, code, data="", callback=None): + """Send packet via connection.""" + path = "" + packet_identifier = self._add_packet_callback(callback) if callback else "" packet_parts = (str(code), packet_identifier, path, data) - packet = ':'.join(packet_parts) + packet = ":".join(packet_parts) try: self._connection.send(packet) - self.logger.debug(L(u'Sent packet: {0}', packet)) + self.logger.debug(L("Sent packet: {0}", packet)) except socket.error as error: raise ftrack_api.exception.EventHubConnectionError( - 'Failed to send packet: {0}'.format(error) + "Failed to send packet: {0}".format(error) ) def _receive_packet(self): - '''Receive and return packet via connection.''' + """Receive and return packet via connection.""" try: packet = self._connection.recv() except Exception as error: raise ftrack_api.exception.EventHubConnectionError( - 'Error receiving packet: {0}'.format(error) + "Error receiving packet: {0}".format(error) ) try: - parts = packet.split(':', 3) + parts = packet.split(":", 3) except AttributeError: raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) + "Received invalid packet {0}".format(packet) ) code, packet_identifier, path, data = None, None, None, None @@ -990,53 +966,51 @@ def _receive_packet(self): code = parts[0] else: raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) + "Received invalid packet {0}".format(packet) ) - self.logger.debug(L('Received packet: {0}', packet)) + self.logger.debug(L("Received packet: {0}", packet)) return code, packet_identifier, path, data def _handle_packet(self, code, packet_identifier, path, data): - '''Handle packet received from server.''' + """Handle packet received from server.""" code_name = self._code_name_mapping[code] - if code_name == 'connect': - self.logger.debug('Connected to event server.') - event = ftrack_api.event.base.Event('ftrack.meta.connected') + if code_name == "connect": + self.logger.debug("Connected to event server.") + event = ftrack_api.event.base.Event("ftrack.meta.connected") self._prepare_event(event) self._event_queue.put(event) - elif code_name == 'disconnect': - self.logger.debug('Disconnected from event server.') + elif code_name == "disconnect": + self.logger.debug("Disconnected from event server.") if not self._intentional_disconnect: - self.logger.debug( - 'Disconnected unexpectedly. Attempting to reconnect.' - ) + self.logger.debug("Disconnected unexpectedly. Attempting to reconnect.") try: self.reconnect( attempts=self._auto_reconnect_attempts, - delay=self._auto_reconnect_delay + delay=self._auto_reconnect_delay, ) except ftrack_api.exception.EventHubConnectionError: - self.logger.debug('Failed to reconnect automatically.') + self.logger.debug("Failed to reconnect automatically.") else: - self.logger.debug('Reconnected successfully.') + self.logger.debug("Reconnected successfully.") if not self.connected: - event = ftrack_api.event.base.Event('ftrack.meta.disconnected') + event = ftrack_api.event.base.Event("ftrack.meta.disconnected") self._prepare_event(event) self._event_queue.put(event) - elif code_name == 'heartbeat': + elif code_name == "heartbeat": # Reply with heartbeat. - self._send_packet(self._code_name_mapping['heartbeat']) + self._send_packet(self._code_name_mapping["heartbeat"]) - elif code_name == 'message': - self.logger.debug(L('Message received: {0}', data)) + elif code_name == "message": + self.logger.debug(L("Message received: {0}", data)) - elif code_name == 'event': + elif code_name == "event": payload = self._decode(data) - args = payload.get('args', []) + args = payload.get("args", []) if len(args) == 1: event_payload = args[0] @@ -1044,76 +1018,72 @@ def _handle_packet(self, code, packet_identifier, path, data): try: event = ftrack_api.event.base.Event(**event_payload) except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_payload - )) + self.logger.exception( + L( + "Failed to convert payload into event: {0}", + event_payload, + ) + ) return self._event_queue.put(event) - elif code_name == 'acknowledge': - parts = data.split('+', 1) + elif code_name == "acknowledge": + parts = data.split("+", 1) acknowledged_packet_identifier = int(parts[0]) args = [] if len(parts) == 2: args = self._decode(parts[1]) try: - callback = self._pop_packet_callback( - acknowledged_packet_identifier - ) + callback = self._pop_packet_callback(acknowledged_packet_identifier) except KeyError: pass else: callback(*args) - elif code_name == 'error': - self.logger.error(L('Event server reported error: {0}.', data)) + elif code_name == "error": + self.logger.error(L("Event server reported error: {0}.", data)) else: - self.logger.debug(L('{0}: {1}', code_name, data)) + self.logger.debug(L("{0}: {1}", code_name, data)) def _encode(self, data): - '''Return *data* encoded as JSON formatted string.''' - return json.dumps( - data, - default=self._encode_object_hook, - ensure_ascii=False - ) + """Return *data* encoded as JSON formatted string.""" + return json.dumps(data, default=self._encode_object_hook, ensure_ascii=False) def _encode_object_hook(self, item): - '''Return *item* transformed for encoding.''' + """Return *item* transformed for encoding.""" if isinstance(item, ftrack_api.event.base.Event): # Convert to dictionary for encoding. item = dict(**item) - if 'in_reply_to_event' in item: + if "in_reply_to_event" in item: # Convert keys to server convention. - item['inReplyToEvent'] = item.pop('in_reply_to_event') + item["inReplyToEvent"] = item.pop("in_reply_to_event") return item - raise TypeError('{0!r} is not JSON serializable'.format(item)) + raise TypeError("{0!r} is not JSON serializable".format(item)) def _decode(self, string): - '''Return decoded JSON *string* as Python object.''' + """Return decoded JSON *string* as Python object.""" return json.loads(string, object_hook=self._decode_object_hook) def _decode_object_hook(self, item): - '''Return *item* transformed.''' + """Return *item* transformed.""" if isinstance(item, collections_abc.Mapping): - if 'inReplyToEvent' in item: - item['in_reply_to_event'] = item.pop('inReplyToEvent') + if "inReplyToEvent" in item: + item["in_reply_to_event"] = item.pop("inReplyToEvent") return item class _SubscriptionContext(object): - '''Context manager for a one-off subscription.''' + """Context manager for a one-off subscription.""" def __init__(self, hub, subscription, callback, subscriber, priority): - '''Initialise context.''' + """Initialise context.""" self._hub = hub self._subscription = subscription self._callback = callback @@ -1122,40 +1092,40 @@ def __init__(self, hub, subscription, callback, subscriber, priority): self._subscriberIdentifier = None def __enter__(self): - '''Enter context subscribing callback to topic.''' + """Enter context subscribing callback to topic.""" self._subscriberIdentifier = self._hub.subscribe( - self._subscription, self._callback, subscriber=self._subscriber, - priority=self._priority + self._subscription, + self._callback, + subscriber=self._subscriber, + priority=self._priority, ) def __exit__(self, exception_type, exception_value, traceback): - '''Exit context unsubscribing callback from topic.''' + """Exit context unsubscribing callback from topic.""" self._hub.unsubscribe(self._subscriberIdentifier) class _ProcessorThread(threading.Thread): - '''Process messages from server.''' + """Process messages from server.""" daemon = True def __init__(self, client): - '''Initialise thread with Socket.IO *client* instance.''' + """Initialise thread with Socket.IO *client* instance.""" super(_ProcessorThread, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) self.client = client self.done = threading.Event() def run(self): - '''Perform work in thread.''' + """Perform work in thread.""" while not self.done.is_set(): try: code, packet_identifier, path, data = self.client._receive_packet() self.client._handle_packet(code, packet_identifier, path, data) except ftrack_api.exception.EventHubPacketError as error: - self.logger.debug(L('Ignoring invalid packet: {0}', error)) + self.logger.debug(L("Ignoring invalid packet: {0}", error)) continue except ftrack_api.exception.EventHubConnectionError: @@ -1163,15 +1133,15 @@ def run(self): # Fake a disconnection event in order to trigger reconnection # when necessary. - self.client._handle_packet('0', '', '', '') + self.client._handle_packet("0", "", "", "") break except Exception as error: - self.logger.debug(L('Aborting processor thread: {0}', error)) + self.logger.debug(L("Aborting processor thread: {0}", error)) self.cancel() break def cancel(self): - '''Cancel work as soon as possible.''' + """Cancel work as soon as possible.""" self.done.set() diff --git a/source/ftrack_api/event/subscriber.py b/source/ftrack_api/event/subscriber.py index 2afdaa78..2576ae20 100644 --- a/source/ftrack_api/event/subscriber.py +++ b/source/ftrack_api/event/subscriber.py @@ -6,23 +6,21 @@ class Subscriber(object): - '''Represent event subscriber.''' + """Represent event subscriber.""" def __init__(self, subscription, callback, metadata, priority): - '''Initialise subscriber.''' - self.subscription = ftrack_api.event.subscription.Subscription( - subscription - ) + """Initialise subscriber.""" + self.subscription = ftrack_api.event.subscription.Subscription(subscription) self.callback = callback self.metadata = metadata self.priority = priority def __str__(self): - '''Return string representation.''' + """Return string representation.""" return '<{0} metadata={1} subscription="{2}">'.format( self.__class__.__name__, self.metadata, self.subscription ) def interested_in(self, event): - '''Return whether subscriber interested in *event*.''' + """Return whether subscriber interested in *event*.""" return self.subscription.includes(event) diff --git a/source/ftrack_api/event/subscription.py b/source/ftrack_api/event/subscription.py index f3a839f5..525b497b 100644 --- a/source/ftrack_api/event/subscription.py +++ b/source/ftrack_api/event/subscription.py @@ -6,19 +6,19 @@ class Subscription(object): - '''Represent a subscription.''' + """Represent a subscription.""" parser = ftrack_api.event.expression.Parser() def __init__(self, subscription): - '''Initialise with *subscription*.''' + """Initialise with *subscription*.""" self._subscription = subscription self._expression = self.parser.parse(subscription) def __str__(self): - '''Return string representation.''' + """Return string representation.""" return self._subscription def includes(self, event): - '''Return whether subscription includes *event*.''' + """Return whether subscription includes *event*.""" return self._expression.match(event) diff --git a/source/ftrack_api/exception.py b/source/ftrack_api/exception.py index 77bdf5b1..0e2437c7 100644 --- a/source/ftrack_api/exception.py +++ b/source/ftrack_api/exception.py @@ -9,19 +9,19 @@ class Error(Exception): - '''ftrack specific error.''' + """ftrack specific error.""" - default_message = 'Unspecified error occurred.' + default_message = "Unspecified error occurred." def __init__(self, message=None, details=None): - '''Initialise exception with *message*. + """Initialise exception with *message*. If *message* is None, the class 'default_message' will be used. *details* should be a mapping of extra information that can be used in the message and also to provide more context. - ''' + """ if message is None: message = self.default_message @@ -33,7 +33,7 @@ def __init__(self, message=None, details=None): self.traceback = traceback.format_exc() def __str__(self): - '''Return string representation.''' + """Return string representation.""" keys = {} for key, value in self.details.items(): if isinstance(value, str): @@ -44,350 +44,333 @@ def __str__(self): class AuthenticationError(Error): - '''Raise when an authentication error occurs.''' + """Raise when an authentication error occurs.""" - default_message = 'Authentication error.' + default_message = "Authentication error." class ServerError(Error): - '''Raise when the server reports an error.''' + """Raise when the server reports an error.""" - default_message = 'Server reported error processing request.' + default_message = "Server reported error processing request." class ServerCompatibilityError(ServerError): - '''Raise when server appears incompatible.''' + """Raise when server appears incompatible.""" - default_message = 'Server incompatible.' + default_message = "Server incompatible." class NotFoundError(Error): - '''Raise when something that should exist is not found.''' + """Raise when something that should exist is not found.""" - default_message = 'Not found.' + default_message = "Not found." class NotUniqueError(Error): - '''Raise when unique value required and duplicate detected.''' + """Raise when unique value required and duplicate detected.""" - default_message = 'Non-unique value detected.' + default_message = "Non-unique value detected." class IncorrectResultError(Error): - '''Raise when a result is incorrect.''' + """Raise when a result is incorrect.""" - default_message = 'Incorrect result detected.' + default_message = "Incorrect result detected." class NoResultFoundError(IncorrectResultError): - '''Raise when a result was expected but no result was found.''' + """Raise when a result was expected but no result was found.""" - default_message = 'Expected result, but no result was found.' + default_message = "Expected result, but no result was found." class MultipleResultsFoundError(IncorrectResultError): - '''Raise when a single result expected, but multiple results found.''' + """Raise when a single result expected, but multiple results found.""" - default_message = 'Expected single result, but received multiple results.' + default_message = "Expected single result, but received multiple results." class EntityTypeError(Error): - '''Raise when an entity type error occurs.''' + """Raise when an entity type error occurs.""" - default_message = 'Entity type error.' + default_message = "Entity type error." class UnrecognisedEntityTypeError(EntityTypeError): - '''Raise when an unrecognised entity type detected.''' + """Raise when an unrecognised entity type detected.""" default_message = 'Entity type "{entity_type}" not recognised.' def __init__(self, entity_type, **kw): - '''Initialise with *entity_type* that is unrecognised.''' - kw.setdefault('details', {}).update(dict( - entity_type=entity_type - )) + """Initialise with *entity_type* that is unrecognised.""" + kw.setdefault("details", {}).update(dict(entity_type=entity_type)) super(UnrecognisedEntityTypeError, self).__init__(**kw) class OperationError(Error): - '''Raise when an operation error occurs.''' + """Raise when an operation error occurs.""" - default_message = 'Operation error.' + default_message = "Operation error." class InvalidStateError(Error): - '''Raise when an invalid state detected.''' + """Raise when an invalid state detected.""" - default_message = 'Invalid state.' + default_message = "Invalid state." class InvalidStateTransitionError(InvalidStateError): - '''Raise when an invalid state transition detected.''' + """Raise when an invalid state transition detected.""" default_message = ( - 'Invalid transition from {current_state!r} to {target_state!r} state ' - 'for entity {entity!r}' + "Invalid transition from {current_state!r} to {target_state!r} state " + "for entity {entity!r}" ) def __init__(self, current_state, target_state, entity, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - current_state=current_state, - target_state=target_state, - entity=entity - )) + """Initialise error.""" + kw.setdefault("details", {}).update( + dict(current_state=current_state, target_state=target_state, entity=entity) + ) super(InvalidStateTransitionError, self).__init__(**kw) class AttributeError(Error): - '''Raise when an error related to an attribute occurs.''' + """Raise when an error related to an attribute occurs.""" - default_message = 'Attribute error.' + default_message = "Attribute error." class ImmutableAttributeError(AttributeError): - '''Raise when modification of immutable attribute attempted.''' + """Raise when modification of immutable attribute attempted.""" - default_message = ( - 'Cannot modify value of immutable {attribute.name!r} attribute.' - ) + default_message = "Cannot modify value of immutable {attribute.name!r} attribute." def __init__(self, attribute, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - attribute=attribute - )) + """Initialise error.""" + kw.setdefault("details", {}).update(dict(attribute=attribute)) super(ImmutableAttributeError, self).__init__(**kw) class CollectionError(Error): - '''Raise when an error related to collections occurs.''' + """Raise when an error related to collections occurs.""" - default_message = 'Collection error.' + default_message = "Collection error." def __init__(self, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - collection=collection - )) + """Initialise error.""" + kw.setdefault("details", {}).update(dict(collection=collection)) super(CollectionError, self).__init__(**kw) class ImmutableCollectionError(CollectionError): - '''Raise when modification of immutable collection attempted.''' + """Raise when modification of immutable collection attempted.""" - default_message = ( - 'Cannot modify value of immutable collection {collection!r}.' - ) + default_message = "Cannot modify value of immutable collection {collection!r}." class DuplicateItemInCollectionError(CollectionError): - '''Raise when duplicate item in collection detected.''' + """Raise when duplicate item in collection detected.""" - default_message = ( - 'Item {item!r} already exists in collection {collection!r}.' - ) + default_message = "Item {item!r} already exists in collection {collection!r}." def __init__(self, item, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - item=item - )) + """Initialise error.""" + kw.setdefault("details", {}).update(dict(item=item)) super(DuplicateItemInCollectionError, self).__init__(collection, **kw) class ParseError(Error): - '''Raise when a parsing error occurs.''' + """Raise when a parsing error occurs.""" - default_message = 'Failed to parse.' + default_message = "Failed to parse." class EventHubError(Error): - '''Raise when issues related to event hub occur.''' + """Raise when issues related to event hub occur.""" - default_message = 'Event hub error occurred.' + default_message = "Event hub error occurred." class EventHubConnectionError(EventHubError): - '''Raise when event hub encounters connection problem.''' + """Raise when event hub encounters connection problem.""" - default_message = 'Event hub is not connected.' + default_message = "Event hub is not connected." class EventHubPacketError(EventHubError): - '''Raise when event hub encounters an issue with a packet.''' + """Raise when event hub encounters an issue with a packet.""" - default_message = 'Invalid packet.' + default_message = "Invalid packet." class PermissionDeniedError(Error): - '''Raise when permission is denied.''' + """Raise when permission is denied.""" - default_message = 'Permission denied.' + default_message = "Permission denied." class LocationError(Error): - '''Base for errors associated with locations.''' + """Base for errors associated with locations.""" - default_message = 'Unspecified location error' + default_message = "Unspecified location error" class ComponentNotInAnyLocationError(LocationError): - '''Raise when component not available in any location.''' + """Raise when component not available in any location.""" - default_message = 'Component not available in any location.' + default_message = "Component not available in any location." class ComponentNotInLocationError(LocationError): - '''Raise when component(s) not in location.''' + """Raise when component(s) not in location.""" default_message = ( - 'Component(s) {formatted_components} not found in location {location}.' + "Component(s) {formatted_components} not found in location {location}." ) def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' + """Initialise with *components* and *location*.""" if isinstance(components, ftrack_api.entity.base.Entity): components = [components] - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) + kw.setdefault("details", {}).update( + dict( + components=components, + formatted_components=", ".join( + [str(component) for component in components] + ), + location=location, + ) + ) super(ComponentNotInLocationError, self).__init__(**kw) class ComponentInLocationError(LocationError): - '''Raise when component(s) already exists in location.''' + """Raise when component(s) already exists in location.""" default_message = ( - 'Component(s) {formatted_components} already exist in location ' - '{location}.' + "Component(s) {formatted_components} already exist in location " "{location}." ) def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' + """Initialise with *components* and *location*.""" if isinstance(components, ftrack_api.entity.base.Entity): components = [components] - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) + kw.setdefault("details", {}).update( + dict( + components=components, + formatted_components=", ".join( + [str(component) for component in components] + ), + location=location, + ) + ) super(ComponentInLocationError, self).__init__(**kw) class AccessorError(Error): - '''Base for errors associated with accessors.''' + """Base for errors associated with accessors.""" - default_message = 'Unspecified accessor error' + default_message = "Unspecified accessor error" class AccessorOperationFailedError(AccessorError): - '''Base for failed operations on accessors.''' - - default_message = 'Operation {operation} failed: {error}' - - def __init__( - self, operation='', resource_identifier=None, error=None, **kw - ): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier, - error=error - )) + """Base for failed operations on accessors.""" + + default_message = "Operation {operation} failed: {error}" + + def __init__(self, operation="", resource_identifier=None, error=None, **kw): + kw.setdefault("details", {}).update( + dict( + operation=operation, + resource_identifier=resource_identifier, + error=error, + ) + ) super(AccessorOperationFailedError, self).__init__(**kw) class AccessorUnsupportedOperationError(AccessorOperationFailedError): - '''Raise when operation is unsupported.''' + """Raise when operation is unsupported.""" - default_message = 'Operation {operation} unsupported.' + default_message = "Operation {operation} unsupported." class AccessorPermissionDeniedError(AccessorOperationFailedError): - '''Raise when permission denied.''' + """Raise when permission denied.""" - default_message = ( - 'Cannot {operation} {resource_identifier}. Permission denied.' - ) + default_message = "Cannot {operation} {resource_identifier}. Permission denied." class AccessorResourceIdentifierError(AccessorError): - '''Raise when a error related to a resource_identifier occurs.''' + """Raise when a error related to a resource_identifier occurs.""" - default_message = 'Resource identifier is invalid: {resource_identifier}.' + default_message = "Resource identifier is invalid: {resource_identifier}." def __init__(self, resource_identifier, **kw): - kw.setdefault('details', {}).update(dict( - resource_identifier=resource_identifier - )) + kw.setdefault("details", {}).update( + dict(resource_identifier=resource_identifier) + ) super(AccessorResourceIdentifierError, self).__init__(**kw) class AccessorFilesystemPathError(AccessorResourceIdentifierError): - '''Raise when a error related to an accessor filesystem path occurs.''' + """Raise when a error related to an accessor filesystem path occurs.""" default_message = ( - 'Could not determine filesystem path from resource identifier: ' - '{resource_identifier}.' + "Could not determine filesystem path from resource identifier: " + "{resource_identifier}." ) class AccessorResourceError(AccessorError): - '''Base for errors associated with specific resource.''' + """Base for errors associated with specific resource.""" - default_message = 'Unspecified resource error: {resource_identifier}' + default_message = "Unspecified resource error: {resource_identifier}" - def __init__(self, operation='', resource_identifier=None, error=None, - **kw): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier - )) + def __init__(self, operation="", resource_identifier=None, error=None, **kw): + kw.setdefault("details", {}).update( + dict(operation=operation, resource_identifier=resource_identifier) + ) super(AccessorResourceError, self).__init__(**kw) class AccessorResourceNotFoundError(AccessorResourceError): - '''Raise when a required resource is not found.''' + """Raise when a required resource is not found.""" - default_message = 'Resource not found: {resource_identifier}' + default_message = "Resource not found: {resource_identifier}" class AccessorParentResourceNotFoundError(AccessorResourceError): - '''Raise when a parent resource (such as directory) is not found.''' + """Raise when a parent resource (such as directory) is not found.""" - default_message = 'Parent resource is missing: {resource_identifier}' + default_message = "Parent resource is missing: {resource_identifier}" class AccessorResourceInvalidError(AccessorResourceError): - '''Raise when a resource is not the right type.''' + """Raise when a resource is not the right type.""" - default_message = 'Resource invalid: {resource_identifier}' + default_message = "Resource invalid: {resource_identifier}" class AccessorContainerNotEmptyError(AccessorResourceError): - '''Raise when container is not empty.''' + """Raise when container is not empty.""" - default_message = 'Container is not empty: {resource_identifier}' + default_message = "Container is not empty: {resource_identifier}" class StructureError(Error): - '''Base for errors associated with structures.''' + """Base for errors associated with structures.""" - default_message = 'Unspecified structure error' + default_message = "Unspecified structure error" class ConnectionClosedError(Error): - '''Raise when attempt to use closed connection detected.''' + """Raise when attempt to use closed connection detected.""" default_message = "Connection closed." diff --git a/source/ftrack_api/formatter.py b/source/ftrack_api/formatter.py index 0c55ce89..85a36a6b 100644 --- a/source/ftrack_api/formatter.py +++ b/source/ftrack_api/formatter.py @@ -13,14 +13,12 @@ #: Useful filters to pass to :func:`format`.` FILTER = { - 'ignore_unset': ( - lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET - ) + "ignore_unset": (lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET) } def _can_do_colors(): - '''check if we are ( likely ) to be able to handle colors.''' + """check if we are ( likely ) to be able to handle colors.""" if "ANSI_COLORS_DISABLED" in os.environ: return False if "NO_COLOR" in os.environ: @@ -35,10 +33,15 @@ def _can_do_colors(): def format( - entity, formatters=None, attribute_filter=None, recursive=False, - indent=0, indent_first_line=True, _seen=None + entity, + formatters=None, + attribute_filter=None, + recursive=False, + indent=0, + indent_first_line=True, + _seen=None, ): - '''Return formatted string representing *entity*. + """Return formatted string representing *entity*. *formatters* can be used to customise formatting of elements. It should be a mapping with one or more of the following keys: @@ -66,26 +69,30 @@ def format( Iterates over all *entity* attributes which may cause multiple queries to the server. Turn off auto populating in the session to prevent this. - ''' + """ # Initialise default formatters. if formatters is None: formatters = dict() formatters.setdefault( - 'header', - lambda text: '\x1b[1m\x1b[44m\x1b[97m{}\x1b[0m\033[0m'.format(text) if _can_do_colors() else text + "header", + lambda text: "\x1b[1m\x1b[44m\x1b[97m{}\x1b[0m\033[0m".format(text) + if _can_do_colors() + else text, ) formatters.setdefault( - 'label', - lambda text: '\x1b[1m\x1b[34m{}\x1b[0m\033[0m'.format(text) if _can_do_colors() else text + "label", + lambda text: "\x1b[1m\x1b[34m{}\x1b[0m\033[0m".format(text) + if _can_do_colors() + else text, ) # Determine indents. - spacer = ' ' * indent + spacer = " " * indent if indent_first_line: first_line_spacer = spacer else: - first_line_spacer = '' + first_line_spacer = "" # Avoid infinite recursion on circular references. if _seen is None: @@ -93,17 +100,12 @@ def format( identifier = str(ftrack_api.inspection.identity(entity)) if identifier in _seen: - return ( - first_line_spacer + - formatters['header'](entity.entity_type) + '{...}' - ) + return first_line_spacer + formatters["header"](entity.entity_type) + "{...}" _seen.add(identifier) information = list() - information.append( - first_line_spacer + formatters['header'](entity.entity_type) - ) + information.append(first_line_spacer + formatters["header"](entity.entity_type)) for key, value in sorted(entity.items()): if attribute_filter is not None: if not attribute_filter(entity, key, value): @@ -119,7 +121,7 @@ def format( recursive=recursive, indent=child_indent, indent_first_line=False, - _seen=_seen.copy() + _seen=_seen.copy(), ) if isinstance(value, ftrack_api.collection.Collection): @@ -133,14 +135,12 @@ def format( recursive=recursive, indent=child_indent, indent_first_line=index != 0, - _seen=_seen.copy() + _seen=_seen.copy(), ) child_values.append(child_value) - value = '\n'.join(child_values) + value = "\n".join(child_values) - information.append( - spacer + u' {0}: {1}'.format(formatters['label'](key), value) - ) + information.append(spacer + " {0}: {1}".format(formatters["label"](key), value)) - return '\n'.join(information) + return "\n".join(information) diff --git a/source/ftrack_api/inspection.py b/source/ftrack_api/inspection.py index cde648d2..a9cd8d1e 100644 --- a/source/ftrack_api/inspection.py +++ b/source/ftrack_api/inspection.py @@ -10,28 +10,25 @@ def identity(entity): - '''Return unique identity of *entity*.''' - return ( - str(entity.entity_type), - list(primary_key(entity).values()) - ) + """Return unique identity of *entity*.""" + return (str(entity.entity_type), list(primary_key(entity).values())) def primary_key(entity): - '''Return primary key of *entity* as an ordered mapping of {field: value}. + """Return primary key of *entity* as an ordered mapping of {field: value}. To get just the primary key values:: primary_key(entity).values() - ''' + """ primary_key = collections.OrderedDict() for name in entity.primary_key_attributes: value = entity[name] if value is ftrack_api.symbol.NOT_SET: raise KeyError( 'Missing required value for primary key attribute "{0}" on ' - 'entity {1!r}.'.format(name, entity) + "entity {1!r}.".format(name, entity) ) # todo: Compatiblity fix, review for better implementation. @@ -41,37 +38,31 @@ def primary_key(entity): def _state(operation, state): - '''Return state following *operation* against current *state*.''' + """Return state following *operation* against current *state*.""" if ( - isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ) + isinstance(operation, ftrack_api.operation.CreateEntityOperation) and state is ftrack_api.symbol.NOT_SET ): state = ftrack_api.symbol.CREATED elif ( - isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ) + isinstance(operation, ftrack_api.operation.UpdateEntityOperation) and state is ftrack_api.symbol.NOT_SET ): state = ftrack_api.symbol.MODIFIED - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): + elif isinstance(operation, ftrack_api.operation.DeleteEntityOperation): state = ftrack_api.symbol.DELETED return state def state(entity): - '''Return current *entity* state. + """Return current *entity* state. .. seealso:: :func:`ftrack_api.inspection.states`. - ''' + """ value = ftrack_api.symbol.NOT_SET for operation in entity.session.recorded_operations: @@ -83,8 +74,8 @@ def state(entity): ( ftrack_api.operation.CreateEntityOperation, ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) + ftrack_api.operation.DeleteEntityOperation, + ), ) and operation.entity_type == entity.entity_type and operation.entity_key == primary_key(entity) @@ -95,7 +86,7 @@ def state(entity): def states(entities): - '''Return current states of *entities*. + """Return current states of *entities*. An optimised function for determining states of multiple entities in one go. @@ -106,7 +97,7 @@ def states(entities): .. seealso:: :func:`ftrack_api.inspection.state`. - ''' + """ if not entities: return [] @@ -118,15 +109,13 @@ def states(entities): entities_by_identity[key] = ftrack_api.symbol.NOT_SET for operation in session.recorded_operations: - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) + if isinstance( + operation, + ( + ftrack_api.operation.CreateEntityOperation, + ftrack_api.operation.UpdateEntityOperation, + ftrack_api.operation.DeleteEntityOperation, + ), ): key = (operation.entity_type, str(list(operation.entity_key.values()))) if key not in entities_by_identity: diff --git a/source/ftrack_api/logging.py b/source/ftrack_api/logging.py index b2870496..895d72fd 100644 --- a/source/ftrack_api/logging.py +++ b/source/ftrack_api/logging.py @@ -10,17 +10,16 @@ def deprecation_warning(message): def decorator(function): @functools.wraps(function) def wrapper(*args, **kwargs): - warnings.warn( - message, - PendingDeprecationWarning - ) + warnings.warn(message, PendingDeprecationWarning) return function(*args, **kwargs) + return wrapper return decorator + class LazyLogMessage(object): - '''A log message that can be evaluated lazily for improved performance. + """A log message that can be evaluated lazily for improved performance. Example:: @@ -29,15 +28,14 @@ class LazyLogMessage(object): 'Hello {0}', 'world' )) - ''' + """ def __init__(self, message, *args, **kwargs): - '''Initialise with *message* format string and arguments.''' + """Initialise with *message* format string and arguments.""" self.message = message self.args = args self.kwargs = kwargs def __str__(self): - '''Return string representation.''' + """Return string representation.""" return self.message.format(*self.args, **self.kwargs) - diff --git a/source/ftrack_api/operation.py b/source/ftrack_api/operation.py index 521712c0..5f817d68 100644 --- a/source/ftrack_api/operation.py +++ b/source/ftrack_api/operation.py @@ -6,43 +6,43 @@ class Operations(object): - '''Stack of operations.''' + """Stack of operations.""" def __init__(self): - '''Initialise stack.''' + """Initialise stack.""" self._stack = [] super(Operations, self).__init__() def clear(self): - '''Clear all operations.''' + """Clear all operations.""" del self._stack[:] def push(self, operation): - '''Push *operation* onto stack.''' + """Push *operation* onto stack.""" self._stack.append(operation) def pop(self): - '''Pop and return most recent operation from stack.''' + """Pop and return most recent operation from stack.""" return self._stack.pop() def __len__(self): - '''Return count of operations.''' + """Return count of operations.""" return len(self._stack) def __iter__(self): - '''Return iterator over operations.''' + """Return iterator over operations.""" return iter(self._stack) class Operation(object): - '''Represent an operation.''' + """Represent an operation.""" class CreateEntityOperation(Operation): - '''Represent create entity operation.''' + """Represent create entity operation.""" def __init__(self, entity_type, entity_key, entity_data): - '''Initialise operation. + """Initialise operation. *entity_type* should be the type of entity in string form (as returned from :attr:`ftrack_api.entity.base.Entity.entity_type`). @@ -57,7 +57,7 @@ def __init__(self, entity_type, entity_key, entity_data): Shallow copies will be made of each value in *entity_data*. - ''' + """ super(CreateEntityOperation, self).__init__() self.entity_type = entity_type self.entity_key = entity_key @@ -67,12 +67,10 @@ def __init__(self, entity_type, entity_key, entity_data): class UpdateEntityOperation(Operation): - '''Represent update entity operation.''' + """Represent update entity operation.""" - def __init__( - self, entity_type, entity_key, attribute_name, old_value, new_value - ): - '''Initialise operation. + def __init__(self, entity_type, entity_key, attribute_name, old_value, new_value): + """Initialise operation. *entity_type* should be the type of entity in string form (as returned from :attr:`ftrack_api.entity.base.Entity.entity_type`). @@ -88,7 +86,7 @@ def __init__( Shallow copies will be made of both *old_value* and *new_value*. - ''' + """ super(UpdateEntityOperation, self).__init__() self.entity_type = entity_type self.entity_key = entity_key @@ -98,10 +96,10 @@ def __init__( class DeleteEntityOperation(Operation): - '''Represent delete entity operation.''' + """Represent delete entity operation.""" def __init__(self, entity_type, entity_key): - '''Initialise operation. + """Initialise operation. *entity_type* should be the type of entity in string form (as returned from :attr:`ftrack_api.entity.base.Entity.entity_type`). @@ -109,8 +107,7 @@ def __init__(self, entity_type, entity_key): *entity_key* should be the unique key for the entity and should follow the form returned from :func:`ftrack_api.inspection.primary_key`. - ''' + """ super(DeleteEntityOperation, self).__init__() self.entity_type = entity_type self.entity_key = entity_key - diff --git a/source/ftrack_api/plugin.py b/source/ftrack_api/plugin.py index 918c28ef..2a28af7b 100644 --- a/source/ftrack_api/plugin.py +++ b/source/ftrack_api/plugin.py @@ -21,13 +21,20 @@ import inspect FullArgSpec = collections.namedtuple( - 'FullArgSpec', [ - 'args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', 'kwonlydefaults', 'annotations' - ] + "FullArgSpec", + [ + "args", + "varargs", + "varkw", + "defaults", + "kwonlyargs", + "kwonlydefaults", + "annotations", + ], ) def getfullargspec(func): - '''a python 2 version of `getfullargspec`.''' + """a python 2 version of `getfullargspec`.""" spec = inspect.getargspec(func) return FullArgSpec( @@ -37,9 +44,10 @@ def getfullargspec(func): defaults=spec.defaults, kwonlyargs=[], kwonlydefaults=None, - annotations={} + annotations={}, ) + try: from imp import load_source @@ -62,7 +70,7 @@ def load_source(modname, filename): def discover(paths, positional_arguments=None, keyword_arguments=None): - '''Find and load plugins in search *paths*. + """Find and load plugins in search *paths*. Each discovered module should implement a register function that accepts *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs @@ -71,8 +79,8 @@ def discover(paths, positional_arguments=None, keyword_arguments=None): If a register function does not accept variable arguments, then attempt to only pass accepted arguments to the function by inspecting its signature. - ''' - logger = logging.getLogger(__name__ + '.discover') + """ + logger = logging.getLogger(__name__ + ".discover") if positional_arguments is None: positional_arguments = [] @@ -89,7 +97,7 @@ def discover(paths, positional_arguments=None, keyword_arguments=None): for base, directories, filenames in os.walk(path): for filename in filenames: name, extension = os.path.splitext(filename) - if extension != '.py': + if extension != ".py": continue module_path = os.path.join(base, filename) @@ -99,20 +107,21 @@ def discover(paths, positional_arguments=None, keyword_arguments=None): module = load_source(unique_name, module_path) except Exception as error: logger.warning( - 'Failed to load plugin from "{0}": {1}' - .format(module_path, error) + 'Failed to load plugin from "{0}": {1}'.format( + module_path, error + ) ) - logger.debug( - traceback.format_exc()) + logger.debug(traceback.format_exc()) continue try: module.register except AttributeError: logger.warning( - 'Failed to load plugin that did not define a ' - '"register" function at the module level: {0}' - .format(module_path) + "Failed to load plugin that did not define a " + '"register" function at the module level: {0}'.format( + module_path + ) ) else: # Attempt to only pass arguments that are accepted by the @@ -122,45 +131,44 @@ def discover(paths, positional_arguments=None, keyword_arguments=None): selected_positional_arguments = positional_arguments selected_keyword_arguments = keyword_arguments - if ( - not specification.varargs and - len(positional_arguments) > len(specification.args) + if not specification.varargs and len(positional_arguments) > len( + specification.args ): logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' + "Culling passed arguments to match register " + "function signature." ) selected_positional_arguments = positional_arguments[ - len(specification.args): + len(specification.args) : ] selected_keyword_arguments = {} elif not specification.varkw: # Remove arguments that have been passed as positionals. - remainder = specification.args[ - len(positional_arguments): - ] + remainder = specification.args[len(positional_arguments) :] # Determine remaining available keyword arguments. defined_keyword_arguments = [] if specification.defaults: defined_keyword_arguments = specification.args[ - -len(specification.defaults): + -len(specification.defaults) : ] - remaining_keyword_arguments = set([ - keyword_argument for keyword_argument - in defined_keyword_arguments - if keyword_argument in remainder - ]) + remaining_keyword_arguments = set( + [ + keyword_argument + for keyword_argument in defined_keyword_arguments + if keyword_argument in remainder + ] + ) if not set(keyword_arguments.keys()).issubset( remaining_keyword_arguments ): logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' + "Culling passed arguments to match register " + "function signature." ) selected_keyword_arguments = { key: value @@ -169,6 +177,5 @@ def discover(paths, positional_arguments=None, keyword_arguments=None): } module.register( - *selected_positional_arguments, - **selected_keyword_arguments + *selected_positional_arguments, **selected_keyword_arguments ) diff --git a/source/ftrack_api/query.py b/source/ftrack_api/query.py index 30449aeb..62c7dd5b 100644 --- a/source/ftrack_api/query.py +++ b/source/ftrack_api/query.py @@ -8,13 +8,13 @@ class QueryResult(collections_abc.Sequence): - '''Results from a query.''' + """Results from a query.""" - OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') - LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') + OFFSET_EXPRESSION = re.compile("(?Poffset (?P\d+))") + LIMIT_EXPRESSION = re.compile("(?Plimit (?P\d+))") def __init__(self, session, expression, page_size=500): - '''Initialise result set. + """Initialise result set. *session* should be an instance of :class:`ftrack_api.session.Session` that will be used for executing the query *expression*. @@ -33,16 +33,14 @@ def __init__(self, session, expression, page_size=500): Setting *page_size* to a very large amount may negatively impact performance of not only the caller, but the server in general. - ''' + """ super(QueryResult, self).__init__() self._session = session self._results = [] - ( - self._expression, - self._offset, - self._limit - ) = self._extract_offset_and_limit(expression) + (self._expression, self._offset, self._limit) = self._extract_offset_and_limit( + expression + ) self._page_size = page_size if self._limit is not None and self._limit < self._page_size: @@ -56,55 +54,53 @@ def __init__(self, session, expression, page_size=500): self._next_offset = 0 def _extract_offset_and_limit(self, expression): - '''Process *expression* extracting offset and limit. + """Process *expression* extracting offset and limit. Return (expression, offset, limit). - ''' + """ offset = None match = self.OFFSET_EXPRESSION.search(expression) if match: - offset = int(match.group('value')) + offset = int(match.group("value")) expression = ( - expression[:match.start('offset')] + - expression[match.end('offset'):] + expression[: match.start("offset")] + expression[match.end("offset") :] ) limit = None match = self.LIMIT_EXPRESSION.search(expression) if match: - limit = int(match.group('value')) + limit = int(match.group("value")) expression = ( - expression[:match.start('limit')] + - expression[match.end('limit'):] + expression[: match.start("limit")] + expression[match.end("limit") :] ) return expression.strip(), offset, limit def __getitem__(self, index): - '''Return value at *index*.''' + """Return value at *index*.""" while self._can_fetch_more() and index >= len(self._results): self._fetch_more() return self._results[index] def __len__(self): - '''Return number of items.''' + """Return number of items.""" while self._can_fetch_more(): self._fetch_more() return len(self._results) def _can_fetch_more(self): - '''Return whether more results are available to fetch.''' + """Return whether more results are available to fetch.""" return self._next_offset is not None def _fetch_more(self): - '''Fetch next page of results if available.''' + """Fetch next page of results if available.""" if not self._can_fetch_more(): return - expression = '{0} offset {1} limit {2}'.format( + expression = "{0} offset {1} limit {2}".format( self._expression, self._next_offset, self._page_size ) records, metadata = self._session._query(expression) @@ -113,17 +109,17 @@ def _fetch_more(self): if self._limit is not None and (len(self._results) >= self._limit): # Original limit reached. self._next_offset = None - del self._results[self._limit:] + del self._results[self._limit :] else: # Retrieve next page offset from returned metadata. - self._next_offset = metadata.get('next', {}).get('offset', None) + self._next_offset = metadata.get("next", {}).get("offset", None) def all(self): - '''Fetch and return all data.''' + """Fetch and return all data.""" return list(self) def one(self): - '''Return exactly one single result from query by applying a limit. + """Return exactly one single result from query by applying a limit. Raise :exc:`ValueError` if an existing limit is already present in the expression. @@ -142,24 +138,22 @@ def one(self): :exc:`~ftrack_api.exception.IncorrectResultError` if you want to catch only one error type. - ''' + """ expression = self._expression if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) + raise ValueError("Expression already contains a limit clause.") if self._offset is not None: raise ValueError( - 'Expression contains an offset clause which does not make ' - 'sense when selecting a single item.' + "Expression contains an offset clause which does not make " + "sense when selecting a single item." ) # Apply custom limit as optimisation. A limit of 2 is used rather than # 1 so that it is possible to test for multiple matching entries # case. - expression += ' limit 2' + expression += " limit 2" results, metadata = self._session._query(expression) @@ -172,27 +166,25 @@ def one(self): return results[0] def first(self): - '''Return first matching result from query by applying a limit. + """Return first matching result from query by applying a limit. Raise :exc:`ValueError` if an existing limit is already present in the expression. If no matching result available return None. - ''' + """ expression = self._expression if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) + raise ValueError("Expression already contains a limit clause.") # Apply custom offset if present. if self._offset is not None: - expression += ' offset {0}'.format(self._offset) + expression += " offset {0}".format(self._offset) # Apply custom limit as optimisation. - expression += ' limit 1' + expression += " limit 1" results, metadata = self._session._query(expression) diff --git a/source/ftrack_api/resource_identifier_transformer/base.py b/source/ftrack_api/resource_identifier_transformer/base.py index b4cbbc3a..445b2467 100644 --- a/source/ftrack_api/resource_identifier_transformer/base.py +++ b/source/ftrack_api/resource_identifier_transformer/base.py @@ -3,8 +3,10 @@ from builtins import object + + class ResourceIdentifierTransformer(object): - '''Transform resource identifiers. + """Transform resource identifiers. Provide ability to modify resource identifier before it is stored centrally (:meth:`encode`), or after it has been retrieved, but before it is used @@ -20,32 +22,32 @@ class ResourceIdentifierTransformer(object): :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted towards common transformations. - ''' + """ def __init__(self, session): - '''Initialise resource identifier transformer. + """Initialise resource identifier transformer. *session* should be the :class:`ftrack_api.session.Session` instance to use for communication with the server. - ''' + """ self.session = session super(ResourceIdentifierTransformer, self).__init__() def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. + """Return encoded *resource_identifier* for storing centrally. A mapping of *context* values may be supplied to guide the transformation. - ''' + """ return resource_identifier def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. + """Return decoded *resource_identifier* for use locally. A mapping of *context* values may be supplied to guide the transformation. - ''' + """ return resource_identifier diff --git a/source/ftrack_api/session.py b/source/ftrack_api/session.py index 28e3ae20..d8a4f69e 100644 --- a/source/ftrack_api/session.py +++ b/source/ftrack_api/session.py @@ -62,33 +62,43 @@ class SessionAuthentication(requests.auth.AuthBase): - '''Attach ftrack session authentication information to requests.''' + """Attach ftrack session authentication information to requests.""" def __init__(self, api_key, api_user): - '''Initialise with *api_key* and *api_user*.''' + """Initialise with *api_key* and *api_user*.""" self.api_key = api_key self.api_user = api_user super(SessionAuthentication, self).__init__() def __call__(self, request): - '''Modify *request* to have appropriate headers.''' - request.headers.update({ - 'ftrack-api-key': self.api_key, - 'ftrack-user': self.api_user - }) + """Modify *request* to have appropriate headers.""" + request.headers.update( + {"ftrack-api-key": self.api_key, "ftrack-user": self.api_user} + ) return request class Session(object): - '''An isolated session for interaction with an ftrack server.''' + """An isolated session for interaction with an ftrack server.""" def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=False, schema_cache_path=None, - plugin_arguments=None, timeout=60, cookies=None, headers=None, strict_api=False + self, + server_url=None, + api_key=None, + api_user=None, + auto_populate=True, + plugin_paths=None, + cache=None, + cache_key_maker=None, + auto_connect_event_hub=False, + schema_cache_path=None, + plugin_arguments=None, + timeout=60, + cookies=None, + headers=None, + strict_api=False, ): - '''Initialise session. + """Initialise session. *server_url* should be the URL of the ftrack server to connect to including any port number. If not specified attempt to look up from @@ -170,41 +180,39 @@ def __init__( specified) indicating whether to add the 'ftrack-strict-api': 'true' header to the request or not. - ''' + """ super(Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) + self.logger = logging.getLogger(__name__ + "." + self.__class__.__name__) self._closed = False if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') + server_url = os.environ.get("FTRACK_SERVER") if not server_url: raise TypeError( 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' + "in environment variable FTRACK_SERVER." ) self._server_url = server_url.rstrip("/") if api_key is None: api_key = os.environ.get( - 'FTRACK_API_KEY', + "FTRACK_API_KEY", # Backwards compatibility - os.environ.get('FTRACK_APIKEY') + os.environ.get("FTRACK_APIKEY"), ) if not api_key: raise TypeError( 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' + "environment variable FTRACK_API_KEY." ) self._api_key = api_key if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') + api_user = os.environ.get("FTRACK_API_USER") if not api_user: try: api_user = getpass.getuser() @@ -214,17 +222,15 @@ def __init__( if not api_user: raise TypeError( 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' + "environment variable FTRACK_API_USER or one of the standard " + "environment variables used by Python's getpass module." ) self._api_user = api_user # Currently pending operations. self.recorded_operations = ftrack_api.operation.Operations() - self._record_operations = collections.defaultdict( - lambda: True - ) + self._record_operations = collections.defaultdict(lambda: True) self.cache_key_maker = cache_key_maker if self.cache_key_maker is None: @@ -232,9 +238,7 @@ def __init__( # Enforce always having a memory cache at top level so that the same # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) + self.cache = ftrack_api.cache.LayeredCache([ftrack_api.cache.MemoryCache()]) if cache is not None: if callable(cache): @@ -249,38 +253,32 @@ def __init__( self._managed_request = None self._request = requests.Session() - + if cookies: if not isinstance(cookies, collections_abc.Mapping): - raise TypeError('The cookies argument is required to be a mapping.') + raise TypeError("The cookies argument is required to be a mapping.") self._request.cookies.update(cookies) if headers: if not isinstance(headers, collections_abc.Mapping): - raise TypeError('The headers argument is required to be a mapping.') + raise TypeError("The headers argument is required to be a mapping.") headers = dict(headers) else: headers = {} - + if not isinstance(strict_api, bool): - raise TypeError('The strict_api argument is required to be a boolean.') + raise TypeError("The strict_api argument is required to be a boolean.") - headers.update( - {'ftrack-strict-api': 'true' if strict_api is True else 'false'} - ) + headers.update({"ftrack-strict-api": "true" if strict_api is True else "false"}) self._request.headers.update(headers) - self._request.auth = SessionAuthentication( - self._api_key, self._api_user - ) + self._request.auth = SessionAuthentication(self._api_key, self._api_user) self.request_timeout = timeout # Auto populating state is now thread-local - self._auto_populate = collections.defaultdict( - lambda: auto_populate - ) + self._auto_populate = collections.defaultdict(lambda: auto_populate) # Fetch server information and in doing so also check credentials. self._server_information = self._fetch_server_information() @@ -294,7 +292,7 @@ def __init__( self._api_user, self._api_key, headers=headers, - cookies=requests.utils.dict_from_cookiejar(self._request.cookies) + cookies=requests.utils.dict_from_cookiejar(self._request.cookies), ) self._auto_connect_event_hub_thread = None @@ -317,9 +315,9 @@ def __init__( self._plugin_paths = plugin_paths if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) + self._plugin_paths = os.environ.get("FTRACK_EVENT_PLUGIN_PATH", "").split( + os.pathsep + ) self._discover_plugins(plugin_arguments=plugin_arguments) @@ -329,11 +327,11 @@ def __init__( if schema_cache_path is None: schema_cache_path = platformdirs.user_cache_dir() schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', schema_cache_path + "FTRACK_API_SCHEMA_CACHE_PATH", schema_cache_path ) schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' + schema_cache_path, "ftrack_api_schema_cache.json" ) self.schemas = self._load_schemas(schema_cache_path) @@ -344,30 +342,27 @@ def __init__( self._configure_locations() self.event_hub.publish( ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) + topic="ftrack.api.session.ready", data=dict(session=self) ), - synchronous=True + synchronous=True, ) def __enter__(self): - '''Return session as context manager.''' + """Return session as context manager.""" return self def __exit__(self, exception_type, exception_value, traceback): - '''Exit session context, closing session in process.''' + """Exit session context, closing session in process.""" self.close() @property def _request(self): - '''Return request session. + """Return request session. Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has been closed and connection unavailable. - ''' + """ if self._managed_request is None: raise ftrack_api.exception.ConnectionClosedError() @@ -375,107 +370,104 @@ def _request(self): @_request.setter def _request(self, value): - '''Set request session to *value*.''' + """Set request session to *value*.""" self._managed_request = value @property def auto_populate(self): - '''The current state of auto populate, stored per thread.''' + """The current state of auto populate, stored per thread.""" return self._auto_populate[threading.current_thread().ident] @auto_populate.setter def auto_populate(self, value): - '''Setter for auto_populate, stored per thread.''' + """Setter for auto_populate, stored per thread.""" self._auto_populate[threading.current_thread().ident] = value @property def record_operations(self): - '''The current state of record operations, stored per thread.''' + """The current state of record operations, stored per thread.""" return self._record_operations[threading.current_thread().ident] @record_operations.setter def record_operations(self, value): - '''Setter for record operations, stored per thread.''' + """Setter for record operations, stored per thread.""" self._record_operations[threading.current_thread().ident] = value - @property def closed(self): - '''Return whether session has been closed.''' + """Return whether session has been closed.""" return self._closed @property def server_information(self): - '''Return server information such as server version.''' + """Return server information such as server version.""" return self._server_information.copy() @property def server_url(self): - '''Return server ulr used for session.''' + """Return server ulr used for session.""" return self._server_url @property def api_user(self): - '''Return username used for session.''' + """Return username used for session.""" return self._api_user @property def api_key(self): - '''Return API key used for session.''' + """Return API key used for session.""" return self._api_key @property def event_hub(self): - '''Return event hub.''' + """Return event hub.""" return self._event_hub @property def _local_cache(self): - '''Return top level memory cache.''' + """Return top level memory cache.""" return self.cache.caches[0] def check_server_compatibility(self): - '''Check compatibility with connected server.''' - server_version = self.server_information.get('version') + """Check compatibility with connected server.""" + server_version = self.server_information.get("version") if server_version is None: raise ftrack_api.exception.ServerCompatibilityError( - 'Could not determine server version.' + "Could not determine server version." ) # Perform basic version check. - if server_version != 'dev': - min_server_version = '3.3.11' - if ( - distutils.version.LooseVersion(min_server_version) - > distutils.version.LooseVersion(server_version) - ): + if server_version != "dev": + min_server_version = "3.3.11" + if distutils.version.LooseVersion( + min_server_version + ) > distutils.version.LooseVersion(server_version): raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0} incompatible with this version of the ' - 'API which requires a server version >= {1}'.format( - server_version, - min_server_version + "Server version {0} incompatible with this version of the " + "API which requires a server version >= {1}".format( + server_version, min_server_version ) ) def close(self): - '''Close session. + """Close session. Close connections to server. Clear any pending operations and local cache. Use this to ensure that session is cleaned up properly after use. - ''' + """ if self.closed: - self.logger.debug('Session already closed.') + self.logger.debug("Session already closed.") return self._closed = True - self.logger.debug('Closing session.') + self.logger.debug("Closing session.") if self.recorded_operations: self.logger.warning( - 'Closing session with pending operations not persisted.' + "Closing session with pending operations not persisted." ) # Clear pending operations. @@ -495,10 +487,10 @@ def close(self): except ftrack_api.exception.EventHubConnectionError: pass - self.logger.debug('Session closed.') + self.logger.debug("Session closed.") def reset(self): - '''Reset session clearing local state. + """Reset session clearing local state. Clear all pending operations and expunge all entities from session. @@ -515,10 +507,10 @@ def reset(self): Previously attached entities are not reset in memory and will retain their state, but should not be used. Doing so will cause errors. - ''' + """ if self.recorded_operations: self.logger.warning( - 'Resetting session with pending operations not persisted.' + "Resetting session with pending operations not persisted." ) # Clear pending operations. @@ -532,16 +524,13 @@ def reset(self): self.event_hub.publish( ftrack_api.event.base.Event( - topic='ftrack.api.session.reset', - data=dict( - session=self - ) + topic="ftrack.api.session.reset", data=dict(session=self) ), - synchronous=True + synchronous=True, ) def auto_populating(self, auto_populate): - '''Temporarily set auto populate to *auto_populate*. + """Temporarily set auto populate to *auto_populate*. The current setting will be restored automatically when done. @@ -550,11 +539,11 @@ def auto_populating(self, auto_populate): with session.auto_populating(False): print entity['name'] - ''' + """ return AutoPopulatingContext(self, auto_populate) def operation_recording(self, record_operations): - '''Temporarily set operation recording to *record_operations*. + """Temporarily set operation recording to *record_operations*. The current setting will be restored automatically when done. @@ -563,71 +552,68 @@ def operation_recording(self, record_operations): with session.operation_recording(False): entity['name'] = 'change_not_recorded' - ''' + """ return OperationRecordingContext(self, record_operations) @property def created(self): - '''Return list of newly created entities.''' + """Return list of newly created entities.""" entities = list(self._local_cache.values()) states = ftrack_api.inspection.states(entities) return [ - entity for (entity, state) in zip(entities, states) + entity + for (entity, state) in zip(entities, states) if state is ftrack_api.symbol.CREATED ] @property def modified(self): - '''Return list of locally modified entities.''' + """Return list of locally modified entities.""" entities = list(self._local_cache.values()) states = ftrack_api.inspection.states(entities) return [ - entity for (entity, state) in zip(entities, states) + entity + for (entity, state) in zip(entities, states) if state is ftrack_api.symbol.MODIFIED ] @property def deleted(self): - '''Return list of deleted entities.''' + """Return list of deleted entities.""" entities = list(self._local_cache.values()) states = ftrack_api.inspection.states(entities) return [ - entity for (entity, state) in zip(entities, states) + entity + for (entity, state) in zip(entities, states) if state is ftrack_api.symbol.DELETED ] def reset_remote(self, reset_type, entity=None): - '''Perform a server side reset. + """Perform a server side reset. *reset_type* is a server side supported reset type, passing the optional *entity* to perform the option upon. Please refer to ftrack documentation for a complete list of supported server side reset types. - ''' + """ - payload = { - 'action': 'reset_remote', - 'reset_type': reset_type - } + payload = {"action": "reset_remote", "reset_type": reset_type} if entity is not None: - payload.update({ - 'entity_type': entity.entity_type, - 'entity_key': entity.get('id') - }) + payload.update( + {"entity_type": entity.entity_type, "entity_key": entity.get("id")} + ) - result = self.call( - [payload] - ) + result = self.call([payload]) - return result[0]['data'] + return result[0]["data"] def create(self, entity_type, data=None, reconstructing=False): - '''Create and return an entity of *entity_type* with initial *data*. + """Create and return an entity of *entity_type* with initial *data*. If specified, *data* should be a dictionary of key, value pairs that should be used to populate attributes on the entity. @@ -639,13 +625,13 @@ def create(self, entity_type, data=None, reconstructing=False): Constructed entity will be automatically :meth:`merged ` into the session. - ''' + """ entity = self._create(entity_type, data, reconstructing=reconstructing) entity = self.merge(entity) return entity def _create(self, entity_type, data, reconstructing): - '''Create and return an entity of *entity_type* with initial *data*.''' + """Create and return an entity of *entity_type* with initial *data*.""" try: EntityTypeClass = self.types[entity_type] except KeyError: @@ -654,7 +640,7 @@ def _create(self, entity_type, data, reconstructing): return EntityTypeClass(self, data=data, reconstructing=reconstructing) def ensure(self, entity_type, data, identifying_keys=None): - '''Retrieve entity of *entity_type* with *data*, creating if necessary. + """Retrieve entity of *entity_type* with *data*, creating if necessary. *data* should be a dictionary of the same form passed to :meth:`create`. @@ -704,23 +690,28 @@ def ensure(self, entity_type, data, identifying_keys=None): 'User', {'username': 'martin', 'email': 'martin@example.com'} ) - ''' + """ if not identifying_keys: identifying_keys = list(data.keys()) - self.logger.debug(L( - 'Ensuring entity {0!r} with data {1!r} using identifying keys ' - '{2!r}', entity_type, data, identifying_keys - )) + self.logger.debug( + L( + "Ensuring entity {0!r} with data {1!r} using identifying keys " "{2!r}", + entity_type, + data, + identifying_keys, + ) + ) if not identifying_keys: raise ValueError( - 'Could not determine any identifying data to check against ' - 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' - .format(entity_type, data, identifying_keys) + "Could not determine any identifying data to check against " + "when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}".format( + entity_type, data, identifying_keys + ) ) - expression = '{0} where'.format(entity_type) + expression = "{0} where".format(entity_type) criteria = [] for identifying_key in identifying_keys: value = data[identifying_key] @@ -728,35 +719,29 @@ def ensure(self, entity_type, data, identifying_keys=None): if isinstance(value, string_types): value = '"{0}"'.format(value) - elif isinstance( - value, (arrow.Arrow, datetime.datetime, datetime.date) - ): + elif isinstance(value, (arrow.Arrow, datetime.datetime, datetime.date)): # Server does not store microsecond or timezone currently so # need to strip from query. # TODO: When datetime handling improved, update this logic. - value = ( - arrow.get(value).naive.replace(microsecond=0).isoformat() - ) + value = arrow.get(value).naive.replace(microsecond=0).isoformat() value = '"{0}"'.format(value) - criteria.append('{0} is {1}'.format(identifying_key, value)) + criteria.append("{0} is {1}".format(identifying_key, value)) - expression = '{0} {1}'.format( - expression, ' and '.join(criteria) - ) + expression = "{0} {1}".format(expression, " and ".join(criteria)) try: entity = self.query(expression).one() except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Creating entity as did not already exist.') + self.logger.debug("Creating entity as did not already exist.") # Create entity. entity = self.create(entity_type, data) self.commit() else: - self.logger.debug('Retrieved matching existing entity.') + self.logger.debug("Retrieved matching existing entity.") # Update entity if required. updated = False @@ -766,31 +751,30 @@ def ensure(self, entity_type, data, identifying_keys=None): updated = True if updated: - self.logger.debug('Updating existing entity to match new data.') + self.logger.debug("Updating existing entity to match new data.") self.commit() return entity def delete(self, entity): - '''Mark *entity* for deletion.''' + """Mark *entity* for deletion.""" if self.record_operations: self.recorded_operations.push( ftrack_api.operation.DeleteEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity) + entity.entity_type, ftrack_api.inspection.primary_key(entity) ) ) def get(self, entity_type, entity_key): - '''Return entity of *entity_type* with unique *entity_key*. + """Return entity of *entity_type* with unique *entity_key*. First check for an existing entry in the configured cache, otherwise issue a query to the server. If no matching entity found, return None. - ''' - self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) + """ + self.logger.debug(L("Get {0} with key {1}", entity_type, entity_key)) primary_key_definition = self.types[entity_type].primary_key_attributes if isinstance(entity_key, string_types): @@ -798,11 +782,12 @@ def get(self, entity_type, entity_key): if len(entity_key) != len(primary_key_definition): raise ValueError( - 'Incompatible entity_key {0!r} supplied. Entity type {1} ' - 'expects a primary key composed of {2} values ({3}).' - .format( - entity_key, entity_type, len(primary_key_definition), - ', '.join(primary_key_definition) + "Incompatible entity_key {0!r} supplied. Entity type {1} " + "expects a primary key composed of {2} values ({3}).".format( + entity_key, + entity_type, + len(primary_key_definition), + ", ".join(primary_key_definition), ) ) @@ -810,20 +795,14 @@ def get(self, entity_type, entity_key): try: entity = self._get(entity_type, entity_key) - except KeyError: - # Query for matching entity. - self.logger.debug( - 'Entity not present in cache. Issuing new query.' - ) + self.logger.debug("Entity not present in cache. Issuing new query.") condition = [] for key, value in zip(primary_key_definition, entity_key): condition.append('{0} is "{1}"'.format(key, value)) - expression = '{0} where ({1})'.format( - entity_type, ' and '.join(condition) - ) + expression = "{0} where ({1})".format(entity_type, " and ".join(condition)) results = self.query(expression).all() if results: @@ -832,29 +811,26 @@ def get(self, entity_type, entity_key): return entity def _get(self, entity_type, entity_key): - '''Return cached entity of *entity_type* with unique *entity_key*. + """Return cached entity of *entity_type* with unique *entity_key*. Raise :exc:`KeyError` if no such entity in the cache. - ''' + """ # Check cache for existing entity emulating # ftrack_api.inspection.identity result object to pass to key maker. cache_key = self.cache_key_maker.key( (str(entity_type), list(map(str, entity_key))) ) - self.logger.debug(L( - 'Checking cache for entity with key {0}', cache_key - )) + self.logger.debug(L("Checking cache for entity with key {0}", cache_key)) entity = self.cache.get(cache_key) - self.logger.debug(L( - 'Retrieved existing entity from cache: {0} at {1}', - entity, id(entity) - )) + self.logger.debug( + L("Retrieved existing entity from cache: {0} at {1}", entity, id(entity)) + ) return entity def query(self, expression, page_size=500): - '''Query against remote data according to *expression*. + """Query against remote data according to *expression*. *expression* is not executed directly. Instead return an :class:`ftrack_api.query.QueryResult` instance that will execute remote @@ -865,21 +841,20 @@ def query(self, expression, page_size=500): .. seealso:: :ref:`querying` - ''' - self.logger.debug(L('Query {0!r}', expression)) + """ + self.logger.debug(L("Query {0!r}", expression)) # Add in sensible projections if none specified. Note that this is # done here rather than on the server to allow local modification of the # schema setting to include commonly used custom attributes for example. # TODO: Use a proper parser perhaps? - if not expression.startswith('select'): - entity_type = expression.split(' ', 1)[0] + if not expression.startswith("select"): + entity_type = expression.split(" ", 1)[0] EntityTypeClass = self.types[entity_type] projections = EntityTypeClass.default_projections - expression = 'select {0} from {1}'.format( - ', '.join(projections), - expression + expression = "select {0} from {1}".format( + ", ".join(projections), expression ) query_result = ftrack_api.query.QueryResult( @@ -888,18 +863,15 @@ def query(self, expression, page_size=500): return query_result def _query(self, expression): - '''Execute *query* and return (records, metadata). + """Execute *query* and return (records, metadata). Records will be a list of entities retrieved via the query and metadata a dictionary of accompanying information about the result set. - ''' + """ # TODO: Actually support batching several queries together. # TODO: Should batches have unique ids to match them up later. - batch = [{ - 'action': 'query', - 'expression': expression - }] + batch = [{"action": "query", "expression": expression}] # TODO: When should this execute? How to handle background=True? results = self.call(batch) @@ -907,19 +879,19 @@ def _query(self, expression): # Merge entities into local cache and return merged entities. data = [] merged = dict() - for entity in results[0]['data']: + for entity in results[0]["data"]: data.append(self._merge_recursive(entity, merged)) - return data, results[0]['metadata'] + return data, results[0]["metadata"] def merge(self, value, merged=None): - '''Merge *value* into session and return merged value. + """Merge *value* into session and return merged value. *merged* should be a mapping to record merges during run and should be used to avoid infinite recursion. If not set will default to a dictionary. - ''' + """ if merged is None: merged = {} @@ -927,43 +899,40 @@ def merge(self, value, merged=None): return self._merge(value, merged) def _merge(self, value, merged): - '''Return merged *value*.''' + """Return merged *value*.""" log_debug = self.logger.isEnabledFor(logging.DEBUG) with self.merge_lock: if isinstance(value, ftrack_api.entity.base.Entity): log_debug and self.logger.debug( - 'Merging entity into session: {0} at {1}' - .format(value, id(value)) + "Merging entity into session: {0} at {1}".format(value, id(value)) ) return self._merge_entity(value, merged=merged) elif isinstance(value, ftrack_api.collection.Collection): log_debug and self.logger.debug( - 'Merging collection into session: {0!r} at {1}' - .format(value, id(value)) + "Merging collection into session: {0!r} at {1}".format( + value, id(value) + ) ) merged_collection = [] for entry in value: - merged_collection.append( - self._merge(entry, merged=merged) - ) + merged_collection.append(self._merge(entry, merged=merged)) return merged_collection elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): log_debug and self.logger.debug( - 'Merging mapped collection into session: {0!r} at {1}' - .format(value, id(value)) + "Merging mapped collection into session: {0!r} at {1}".format( + value, id(value) + ) ) merged_collection = [] for entry in value.collection: - merged_collection.append( - self._merge(entry, merged=merged) - ) + merged_collection.append(self._merge(entry, merged=merged)) return merged_collection @@ -971,7 +940,7 @@ def _merge(self, value, merged): return value def _merge_recursive(self, entity, merged=None): - '''Merge *entity* and all its attributes recursivly.''' + """Merge *entity* and all its attributes recursivly.""" log_debug = self.logger.isEnabledFor(logging.DEBUG) if merged is None: @@ -988,19 +957,17 @@ def _merge_recursive(self, entity, merged=None): ( ftrack_api.entity.base.Entity, ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) + ftrack_api.collection.MappedCollectionProxy, + ), ): log_debug and self.logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) + "Merging remote value for attribute {0}.".format(attribute) ) if isinstance(remote_value, ftrack_api.entity.base.Entity): self._merge_recursive(remote_value, merged=merged) - elif isinstance( - remote_value, ftrack_api.collection.Collection - ): + elif isinstance(remote_value, ftrack_api.collection.Collection): for entry in remote_value: self._merge_recursive(entry, merged=merged) @@ -1013,7 +980,7 @@ def _merge_recursive(self, entity, merged=None): return attached def _merge_entity(self, entity, merged=None): - '''Merge *entity* into session returning merged entity. + """Merge *entity* into session returning merged entity. Merge is recursive so any references to other entities will also be merged. @@ -1021,7 +988,7 @@ def _merge_entity(self, entity, merged=None): *entity* will never be modified in place. Ensure that the returned merged entity instance is used. - ''' + """ log_debug = self.logger.isEnabledFor(logging.DEBUG) if merged is None: @@ -1036,27 +1003,28 @@ def _merge_entity(self, entity, merged=None): attached_entity = merged.get(entity_key) if attached_entity is not None: log_debug and self.logger.debug( - 'Entity already processed for key {0} as {1} at {2}' - .format(entity_key, attached_entity, id(attached_entity)) + "Entity already processed for key {0} as {1} at {2}".format( + entity_key, attached_entity, id(attached_entity) + ) ) return attached_entity else: log_debug and self.logger.debug( - 'Entity not already processed for key {0}.' - .format(entity_key) + "Entity not already processed for key {0}.".format(entity_key) ) # Check for existing instance of entity in cache. log_debug and self.logger.debug( - 'Checking for entity in cache with key {0}'.format(entity_key) + "Checking for entity in cache with key {0}".format(entity_key) ) try: attached_entity = self.cache.get(entity_key) log_debug and self.logger.debug( - 'Retrieved existing entity from cache: {0} at {1}' - .format(attached_entity, id(attached_entity)) + "Retrieved existing entity from cache: {0} at {1}".format( + attached_entity, id(attached_entity) + ) ) except KeyError: @@ -1066,8 +1034,8 @@ def _merge_entity(self, entity, merged=None): ) log_debug and self.logger.debug( - 'Entity not present in cache. Constructed new instance: ' - '{0} at {1}'.format(attached_entity, id(attached_entity)) + "Entity not present in cache. Constructed new instance: " + "{0} at {1}".format(attached_entity, id(attached_entity)) ) # Mark entity as seen to avoid infinite loops. @@ -1076,18 +1044,18 @@ def _merge_entity(self, entity, merged=None): changes = attached_entity.merge(entity, merged=merged) if changes: self.cache.set(entity_key, attached_entity) - self.logger.debug('Cache updated with merged entity.') + self.logger.debug("Cache updated with merged entity.") else: self.logger.debug( - 'Cache not updated with merged entity as no differences ' - 'detected.' + "Cache not updated with merged entity as no differences " + "detected." ) return attached_entity def populate(self, entities, projections): - '''Populate *entities* with attributes specified by *projections*. + """Populate *entities* with attributes specified by *projections*. Any locally set values included in the *projections* will not be overwritten with the retrieved remote value. If this 'synchronise' @@ -1106,14 +1074,12 @@ def populate(self, entities, projections): Entities that have been created and not yet persisted will be skipped as they have no remote values to fetch. - ''' - self.logger.debug(L( - 'Populate {0!r} projections for {1}.', projections, entities - )) + """ + self.logger.debug( + L("Populate {0!r} projections for {1}.", projections, entities) + ) - if not isinstance( - entities, (list, tuple, ftrack_api.query.QueryResult) - ): + if not isinstance(entities, (list, tuple, ftrack_api.query.QueryResult)): entities = [entities] # TODO: How to handle a mixed collection of different entity types @@ -1129,10 +1095,13 @@ def populate(self, entities, projections): # values. Don't raise an error here as it is reasonable to # iterate over an entities properties and see that some of them # are NOT_SET. - self.logger.debug(L( - 'Skipping newly created entity {0!r} for population as no ' - 'data will exist in the remote for this entity yet.', entity - )) + self.logger.debug( + L( + "Skipping newly created entity {0!r} for population as no " + "data will exist in the remote for this entity yet.", + entity, + ) + ) continue entities_to_process.append(entity) @@ -1140,7 +1109,7 @@ def populate(self, entities, projections): if entities_to_process: reference_entity = entities_to_process[0] entity_type = reference_entity.entity_type - query = 'select {0} from {1}'.format(projections, entity_type) + query = "select {0} from {1}".format(projections, entity_type) primary_key_definition = reference_entity.primary_key_attributes entity_keys = [ @@ -1156,22 +1125,21 @@ def populate(self, entities, projections): for key, value in zip(primary_key_definition, entity_key): condition.append('{0} is "{1}"'.format(key, value)) - conditions.append('({0})'.format('and '.join(condition))) + conditions.append("({0})".format("and ".join(condition))) - query = '{0} where {1}'.format(query, ' or '.join(conditions)) + query = "{0} where {1}".format(query, " or ".join(conditions)) else: primary_key = primary_key_definition[0] if len(entity_keys) > 1: - query = '{0} where {1} in ({2})'.format( - query, primary_key, - ','.join([ - str(entity_key[0]) for entity_key in entity_keys - ]) + query = "{0} where {1} in ({2})".format( + query, + primary_key, + ",".join([str(entity_key[0]) for entity_key in entity_keys]), ) else: - query = '{0} where {1} is {2}'.format( + query = "{0} where {1} is {2}".format( query, primary_key, str(entity_keys[0][0]) ) @@ -1187,61 +1155,60 @@ def populate(self, entities, projections): # TODO: Make atomic. def commit(self): - '''Commit all local changes to the server.''' + """Commit all local changes to the server.""" batch = [] with self.auto_populating(False): for operation in self.recorded_operations: - # Convert operation to payload. - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): + if isinstance(operation, ftrack_api.operation.CreateEntityOperation): # At present, data payload requires duplicating entity # type in data and also ensuring primary key added. entity_data = { - '__entity_type__': operation.entity_type, + "__entity_type__": operation.entity_type, } entity_data.update(operation.entity_key) entity_data.update(operation.entity_data) - payload = OperationPayload({ - 'action': 'create', - 'entity_type': operation.entity_type, - 'entity_key': list(operation.entity_key.values()), - 'entity_data': entity_data - }) + payload = OperationPayload( + { + "action": "create", + "entity_type": operation.entity_type, + "entity_key": list(operation.entity_key.values()), + "entity_data": entity_data, + } + ) - elif isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ): + elif isinstance(operation, ftrack_api.operation.UpdateEntityOperation): entity_data = { # At present, data payload requires duplicating entity # type. - '__entity_type__': operation.entity_type, - operation.attribute_name: operation.new_value + "__entity_type__": operation.entity_type, + operation.attribute_name: operation.new_value, } - payload = OperationPayload({ - 'action': 'update', - 'entity_type': operation.entity_type, - 'entity_key': list(operation.entity_key.values()), - 'entity_data': entity_data - }) + payload = OperationPayload( + { + "action": "update", + "entity_type": operation.entity_type, + "entity_key": list(operation.entity_key.values()), + "entity_data": entity_data, + } + ) - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - payload = OperationPayload({ - 'action': 'delete', - 'entity_type': operation.entity_type, - 'entity_key': list(operation.entity_key.values()) - }) + elif isinstance(operation, ftrack_api.operation.DeleteEntityOperation): + payload = OperationPayload( + { + "action": "delete", + "entity_type": operation.entity_type, + "entity_key": list(operation.entity_key.values()), + } + ) else: raise ValueError( - 'Cannot commit. Unrecognised operation type {0} ' - 'detected.'.format(type(operation)) + "Cannot commit. Unrecognised operation type {0} " + "detected.".format(type(operation)) ) batch.append(payload) @@ -1256,22 +1223,18 @@ def commit(self): deleted = set() for payload in batch: - if payload['action'] == 'create': - created.add( - (payload['entity_type'], str(payload['entity_key'])) - ) + if payload["action"] == "create": + created.add((payload["entity_type"], str(payload["entity_key"]))) - elif payload['action'] == 'delete': - deleted.add( - (payload['entity_type'], str(payload['entity_key'])) - ) + elif payload["action"] == "delete": + deleted.add((payload["entity_type"], str(payload["entity_key"]))) created_then_deleted = deleted.intersection(created) if created_then_deleted: optimised_batch = [] for payload in batch: - entity_type = payload.get('entity_type') - entity_key = str(payload.get('entity_key')) + entity_type = payload.get("entity_type") + entity_key = str(payload.get("entity_key")) if (entity_type, entity_key) in created_then_deleted: continue @@ -1284,22 +1247,20 @@ def commit(self): # attribute is applied server side. updates_map = set() for payload in reversed(batch): - if payload['action'] in ('update', ): - for key, value in list(payload['entity_data'].items()): - if key == '__entity_type__': + if payload["action"] in ("update",): + for key, value in list(payload["entity_data"].items()): + if key == "__entity_type__": continue - identity = ( - payload['entity_type'], str(payload['entity_key']), key - ) + identity = (payload["entity_type"], str(payload["entity_key"]), key) if identity in updates_map: - del payload['entity_data'][key] + del payload["entity_data"][key] else: updates_map.add(identity) # Remove NOT_SET values from entity_data. for payload in batch: - entity_data = payload.get('entity_data', {}) + entity_data = payload.get("entity_data", {}) for key, value in list(entity_data.items()): if value is ftrack_api.symbol.NOT_SET: del entity_data[key] @@ -1307,10 +1268,10 @@ def commit(self): # Remove payloads with redundant entity_data. optimised_batch = [] for payload in batch: - entity_data = payload.get('entity_data') + entity_data = payload.get("entity_data") if entity_data is not None: keys = list(entity_data.keys()) - if not keys or keys == ['__entity_type__']: + if not keys or keys == ["__entity_type__"]: continue optimised_batch.append(payload) @@ -1325,12 +1286,12 @@ def commit(self): for payload in batch: if ( previous_payload is not None - and payload['action'] == 'update' - and previous_payload['action'] in ('create', 'update') - and previous_payload['entity_type'] == payload['entity_type'] - and previous_payload['entity_key'] == payload['entity_key'] + and payload["action"] == "update" + and previous_payload["action"] in ("create", "update") + and previous_payload["entity_type"] == payload["entity_type"] + and previous_payload["entity_key"] == payload["entity_key"] ): - previous_payload['entity_data'].update(payload['entity_data']) + previous_payload["entity_data"].update(payload["entity_data"]) continue else: @@ -1358,12 +1319,11 @@ def commit(self): # Process results merging into cache relevant data. for entry in result: - - if entry['action'] in ('create', 'update'): + if entry["action"] in ("create", "update"): # Merge returned entities into local cache. - self.merge(entry['data']) + self.merge(entry["data"]) - elif entry['action'] == 'delete': + elif entry["action"] == "delete": # TODO: Detach entity - need identity returned? # TODO: Expunge entity from cache. pass @@ -1375,7 +1335,7 @@ def commit(self): entity.clear() def rollback(self): - '''Clear all recorded operations and local state. + """Clear all recorded operations and local state. Typically this would be used following a failed :meth:`commit` in order to revert the session to a known good state. @@ -1385,10 +1345,9 @@ def rollback(self): objects are not deleted from memory. They should no longer be used and doing so could cause errors. - ''' + """ with self.auto_populating(False): with self.operation_recording(False): - # Detach all newly created entities and remove from cache. This # is done because simply clearing the local values of newly # created entities would result in entities with no identity as @@ -1399,10 +1358,12 @@ def rollback(self): if isinstance( operation, ftrack_api.operation.CreateEntityOperation ): - entity_key = str(( - str(operation.entity_type), - list(operation.entity_key.values()) - )) + entity_key = str( + ( + str(operation.entity_type), + list(operation.entity_key.values()), + ) + ) try: self.cache.remove(entity_key) except KeyError: @@ -1415,12 +1376,12 @@ def rollback(self): self.recorded_operations.clear() def _fetch_server_information(self): - '''Return server information.''' - result = self.call([{'action': 'query_server_information'}]) + """Return server information.""" + result = self.call([{"action": "query_server_information"}]) return result[0] def _discover_plugins(self, plugin_arguments=None): - '''Find and load plugins in search paths. + """Find and load plugins in search paths. Each discovered module should implement a register function that accepts this session as first argument. Typically the function should @@ -1435,54 +1396,48 @@ def register(session): *plugin_arguments* should be an optional mapping of keyword arguments and values to pass to plugin register functions upon discovery. - ''' + """ plugin_arguments = plugin_arguments or {} - ftrack_api.plugin.discover( - self._plugin_paths, [self], plugin_arguments - ) + ftrack_api.plugin.discover(self._plugin_paths, [self], plugin_arguments) def _read_schemas_from_cache(self, schema_cache_path): - '''Return schemas and schema hash from *schema_cache_path*. + """Return schemas and schema hash from *schema_cache_path*. *schema_cache_path* should be the path to the file containing the schemas in JSON format. - ''' - self.logger.debug(L( - 'Reading schemas from cache {0!r}', schema_cache_path - )) + """ + self.logger.debug(L("Reading schemas from cache {0!r}", schema_cache_path)) if not os.path.exists(schema_cache_path): - self.logger.info(L( - 'Cache file not found at {0!r}.', schema_cache_path - )) + self.logger.info(L("Cache file not found at {0!r}.", schema_cache_path)) return [], None - with open(schema_cache_path, 'r') as schema_file: + with open(schema_cache_path, "r") as schema_file: schemas = json.load(schema_file) hash_ = hashlib.md5( - json.dumps(schemas, sort_keys=True).encode('utf-8') + json.dumps(schemas, sort_keys=True).encode("utf-8") ).hexdigest() return schemas, hash_ def _write_schemas_to_cache(self, schemas, schema_cache_path): - '''Write *schemas* to *schema_cache_path*. + """Write *schemas* to *schema_cache_path*. *schema_cache_path* should be a path to a file that the schemas can be written to in JSON format. - ''' - self.logger.debug(L( - 'Updating schema cache {0!r} with new schemas.', schema_cache_path - )) + """ + self.logger.debug( + L("Updating schema cache {0!r} with new schemas.", schema_cache_path) + ) - with open(schema_cache_path, 'w') as local_cache_file: + with open(schema_cache_path, "w") as local_cache_file: json.dump(schemas, local_cache_file, indent=4) def _load_schemas(self, schema_cache_path): - '''Load schemas. + """Load schemas. First try to load schemas from cache at *schema_cache_path*. If the cache is not available or the cache appears outdated then load schemas @@ -1491,7 +1446,7 @@ def _load_schemas(self, schema_cache_path): If *schema_cache_path* is set to `False`, always load schemas from server bypassing cache. - ''' + """ local_schema_hash = None schemas = [] @@ -1503,70 +1458,69 @@ def _load_schemas(self, schema_cache_path): except (IOError, TypeError, AttributeError, ValueError): # Catch any known exceptions when trying to read the local # schema cache to prevent API from being unusable. - self.logger.exception(L( - 'Schema cache could not be loaded from {0!r}', - schema_cache_path - )) + self.logger.exception( + L("Schema cache could not be loaded from {0!r}", schema_cache_path) + ) # Use `dictionary.get` to retrieve hash to support older version of # ftrack server not returning a schema hash. - server_hash = self._server_information.get( - 'schema_hash', False - ) + server_hash = self._server_information.get("schema_hash", False) if local_schema_hash != server_hash: - self.logger.debug(L( - 'Loading schemas from server due to hash not matching.' - 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash - )) - schemas = self.call([{'action': 'query_schemas'}])[0] + self.logger.debug( + L( + "Loading schemas from server due to hash not matching." + "Local: {0!r} != Server: {1!r}", + local_schema_hash, + server_hash, + ) + ) + schemas = self.call([{"action": "query_schemas"}])[0] if schema_cache_path: try: self._write_schemas_to_cache(schemas, schema_cache_path) except (IOError, TypeError): - self.logger.exception(L( - 'Failed to update schema cache {0!r}.', - schema_cache_path - )) + self.logger.exception( + L("Failed to update schema cache {0!r}.", schema_cache_path) + ) else: - self.logger.debug(L( - 'Using cached schemas from {0!r}', schema_cache_path - )) + self.logger.debug(L("Using cached schemas from {0!r}", schema_cache_path)) return schemas def _build_entity_type_classes(self, schemas): - '''Build default entity type classes.''' + """Build default entity type classes.""" fallback_factory = ftrack_api.entity.factory.StandardFactory() classes = {} for schema in schemas: results = self.event_hub.publish( ftrack_api.event.base.Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) + topic="ftrack.api.session.construct-entity-type", + data=dict(schema=schema, schemas=schemas), ), - synchronous=True + synchronous=True, ) results = [result for result in results if result is not None] if not results: - self.logger.debug(L( - 'Using default StandardFactory to construct entity type ' - 'class for "{0}"', schema['id'] - )) + self.logger.debug( + L( + "Using default StandardFactory to construct entity type " + 'class for "{0}"', + schema["id"], + ) + ) entity_type_class = fallback_factory.create(schema) elif len(results) > 1: raise ValueError( 'Expected single entity type to represent schema "{0}" but ' - 'received {1} entity types instead.' - .format(schema['id'], len(results)) + "received {1} entity types instead.".format( + schema["id"], len(results) + ) ) else: @@ -1577,40 +1531,38 @@ def _build_entity_type_classes(self, schemas): return classes def _configure_locations(self): - '''Configure locations.''' + """Configure locations.""" # First configure builtin locations, by injecting them into local cache. # Origin. location = self.create( - 'Location', - data=dict( - name='ftrack.origin', - id=ftrack_api.symbol.ORIGIN_LOCATION_ID - ), - reconstructing=True + "Location", + data=dict(name="ftrack.origin", id=ftrack_api.symbol.ORIGIN_LOCATION_ID), + reconstructing=True, ) ftrack_api.mixin( - location, ftrack_api.entity.location.OriginLocationMixin, - name='OriginLocation' + location, + ftrack_api.entity.location.OriginLocationMixin, + name="OriginLocation", ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix="") location.structure = ftrack_api.structure.origin.OriginStructure() location.priority = 100 # Unmanaged. location = self.create( - 'Location', + "Location", data=dict( - name='ftrack.unmanaged', - id=ftrack_api.symbol.UNMANAGED_LOCATION_ID + name="ftrack.unmanaged", id=ftrack_api.symbol.UNMANAGED_LOCATION_ID ), - reconstructing=True + reconstructing=True, ) ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' + location, + ftrack_api.entity.location.UnmanagedLocationMixin, + name="UnmanagedLocation", ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix="") location.structure = ftrack_api.structure.origin.OriginStructure() # location.resource_identifier_transformer = ( # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) @@ -1619,78 +1571,61 @@ def _configure_locations(self): # Review. location = self.create( - 'Location', - data=dict( - name='ftrack.review', - id=ftrack_api.symbol.REVIEW_LOCATION_ID - ), - reconstructing=True + "Location", + data=dict(name="ftrack.review", id=ftrack_api.symbol.REVIEW_LOCATION_ID), + reconstructing=True, ) ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' + location, + ftrack_api.entity.location.UnmanagedLocationMixin, + name="UnmanagedLocation", ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix="") location.structure = ftrack_api.structure.origin.OriginStructure() location.priority = 110 # Server. location = self.create( - 'Location', - data=dict( - name='ftrack.server', - id=ftrack_api.symbol.SERVER_LOCATION_ID - ), - reconstructing=True + "Location", + data=dict(name="ftrack.server", id=ftrack_api.symbol.SERVER_LOCATION_ID), + reconstructing=True, ) ftrack_api.mixin( - location, ftrack_api.entity.location.ServerLocationMixin, - name='ServerLocation' - ) - location.accessor = ftrack_api.accessor.server._ServerAccessor( - session=self + location, + ftrack_api.entity.location.ServerLocationMixin, + name="ServerLocation", ) + location.accessor = ftrack_api.accessor.server._ServerAccessor(session=self) location.structure = ftrack_api.structure.entity_id.EntityIdStructure() location.priority = 150 # Master location based on server scenario. - storage_scenario = self.server_information.get('storage_scenario') + storage_scenario = self.server_information.get("storage_scenario") - if ( - storage_scenario and - storage_scenario.get('scenario') - ): + if storage_scenario and storage_scenario.get("scenario"): self.event_hub.publish( ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.activate', - data=dict( - storage_scenario=storage_scenario - ) + topic="ftrack.storage-scenario.activate", + data=dict(storage_scenario=storage_scenario), ), - synchronous=True + synchronous=True, ) # Next, allow further configuration of locations via events. self.event_hub.publish( ftrack_api.event.base.Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) + topic="ftrack.api.session.configure-location", data=dict(session=self) ), - synchronous=True + synchronous=True, ) def call(self, data): - '''Make request to server with *data* batch describing the actions.''' - url = self._server_url + '/api' - headers = { - 'content-type': 'application/json', - 'accept': 'application/json' - } - data = self.encode(data, entity_attribute_strategy='modified_only') + """Make request to server with *data* batch describing the actions.""" + url = self._server_url + "/api" + headers = {"content-type": "application/json", "accept": "application/json"} + data = self.encode(data, entity_attribute_strategy="modified_only") - self.logger.debug(L('Calling server {0} with {1!r}', url, data)) + self.logger.debug(L("Calling server {0} with {1!r}", url, data)) try: result = {} @@ -1700,18 +1635,18 @@ def call(self, data): data=data, timeout=self.request_timeout, ) - self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) - self.logger.debug(L('Response: {0!r}', response.text)) - + self.logger.debug(L("Call took: {0}", response.elapsed.total_seconds())) + self.logger.debug(L("Response: {0!r}", response.text)) + result = self.decode(response.text) response.raise_for_status() # handle response exceptions and / or other http exceptions # (strict api used => status code returned => raise_for_status() => HTTPError) except requests.exceptions.HTTPError as exc: - if 'exception' in result: - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] + if "exception" in result: + error_message = "Server reported error: {0}({1})".format( + result["exception"], result["content"] ) self._raise_server_error(error_message) else: @@ -1720,17 +1655,18 @@ def call(self, data): # JSON response decoding exception except (TypeError, ValueError): error_message = ( - 'Server reported error in unexpected format. Raw error was: {0}' - .format(response.text) + "Server reported error in unexpected format. Raw error was: {0}".format( + response.text + ) ) self._raise_server_error(error_message) - + # handle possible response exceptions # (strict api not used => 200 returned) else: - if 'exception' in result: - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] + if "exception" in result: + error_message = "Server reported error: {0}({1})".format( + result["exception"], result["content"] ) self._raise_server_error(error_message) return result @@ -1739,8 +1675,8 @@ def _raise_server_error(self, error_message): self.logger.exception(error_message) raise ftrack_api.exception.ServerError(error_message) - def encode(self, data, entity_attribute_strategy='set_only'): - '''Return *data* encoded as JSON formatted string. + def encode(self, data, entity_attribute_strategy="set_only"): + """Return *data* encoded as JSON formatted string. *entity_attribute_strategy* specifies how entity attributes should be handled. The following strategies are available: @@ -1752,16 +1688,18 @@ def encode(self, data, entity_attribute_strategy='set_only'): locally. * *persisted_only* - Encode only remote (persisted) attribute values. - ''' + """ entity_attribute_strategies = ( - 'all', 'set_only', 'modified_only', 'persisted_only' + "all", + "set_only", + "modified_only", + "persisted_only", ) if entity_attribute_strategy not in entity_attribute_strategies: raise ValueError( 'Unsupported entity_attribute_strategy "{0}". Must be one of ' - '{1}'.format( - entity_attribute_strategy, - ', '.join(entity_attribute_strategies) + "{1}".format( + entity_attribute_strategy, ", ".join(entity_attribute_strategies) ) ) @@ -1769,23 +1707,19 @@ def encode(self, data, entity_attribute_strategy='set_only'): data, sort_keys=True, default=functools.partial( - self._encode, - entity_attribute_strategy=entity_attribute_strategy - ) + self._encode, entity_attribute_strategy=entity_attribute_strategy + ), ) - def _encode(self, item, entity_attribute_strategy='set_only'): - '''Return JSON encodable version of *item*. + def _encode(self, item, entity_attribute_strategy="set_only"): + """Return JSON encodable version of *item*. *entity_attribute_strategy* specifies how entity attributes should be handled. See :meth:`Session.encode` for available strategies. - ''' + """ if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): - return { - '__type__': 'datetime', - 'value': item.isoformat() - } + return {"__type__": "datetime", "value": item.isoformat()} if isinstance(item, OperationPayload): data = dict(list(item.items())) @@ -1800,24 +1734,23 @@ def _encode(self, item, entity_attribute_strategy='set_only'): data = self.entity_reference(item) with self.auto_populating(True): - for attribute in item.attributes: value = ftrack_api.symbol.NOT_SET - if entity_attribute_strategy == 'all': + if entity_attribute_strategy == "all": value = attribute.get_value(item) - elif entity_attribute_strategy == 'set_only': + elif entity_attribute_strategy == "set_only": if attribute.is_set(item): value = attribute.get_local_value(item) if value is ftrack_api.symbol.NOT_SET: value = attribute.get_remote_value(item) - elif entity_attribute_strategy == 'modified_only': + elif entity_attribute_strategy == "modified_only": if attribute.is_modified(item): value = attribute.get_local_value(item) - elif entity_attribute_strategy == 'persisted_only': + elif entity_attribute_strategy == "persisted_only": if not attribute.computed: value = attribute.get_remote_value(item) @@ -1832,9 +1765,7 @@ def _encode(self, item, entity_attribute_strategy='set_only'): return data - if isinstance( - item, ftrack_api.collection.MappedCollectionProxy - ): + if isinstance(item, ftrack_api.collection.MappedCollectionProxy): # Use proxied collection for serialisation. item = item.collection @@ -1845,65 +1776,59 @@ def _encode(self, item, entity_attribute_strategy='set_only'): return data - raise TypeError('{0!r} is not JSON serializable'.format(item)) + raise TypeError("{0!r} is not JSON serializable".format(item)) def entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. + """Return entity reference that uniquely identifies *entity*. Return a mapping containing the __entity_type__ of the entity along with the key, value pairs that make up it's primary key. - ''' - reference = { - '__entity_type__': entity.entity_type - } + """ + reference = {"__entity_type__": entity.entity_type} with self.auto_populating(False): reference.update(ftrack_api.inspection.primary_key(entity)) return reference def decode(self, string): - '''Return decoded JSON *string* as Python object.''' + """Return decoded JSON *string* as Python object.""" with self.operation_recording(False): return json.loads(string, object_hook=self._decode) def _decode(self, item): - '''Return *item* transformed into appropriate representation.''' + """Return *item* transformed into appropriate representation.""" if isinstance(item, collections_abc.Mapping): - if '__type__' in item: - if item['__type__'] == 'datetime': - item = arrow.get(item['value']) + if "__type__" in item: + if item["__type__"] == "datetime": + item = arrow.get(item["value"]) - elif '__entity_type__' in item: - item = self._create( - item['__entity_type__'], item, reconstructing=True - ) + elif "__entity_type__" in item: + item = self._create(item["__entity_type__"], item, reconstructing=True) return item def _get_locations(self, filter_inaccessible=True): - '''Helper to returns locations ordered by priority. + """Helper to returns locations ordered by priority. If *filter_inaccessible* is True then only accessible locations will be included in result. - ''' + """ # Optimise this call. - locations = self.query('Location') + locations = self.query("Location") # Filter. if filter_inaccessible: locations = [location for location in locations if location.accessor] # Sort by priority. - locations = sorted( - locations, key=lambda location: location.priority - ) + locations = sorted(locations, key=lambda location: location.priority) return locations def pick_location(self, component=None): - '''Return suitable location to use. + """Return suitable location to use. If no *component* specified then return highest priority accessible location. Otherwise, return highest priority accessible location that @@ -1911,7 +1836,7 @@ def pick_location(self, component=None): Return None if no suitable location could be picked. - ''' + """ if component: return self.pick_locations([component])[0] @@ -1923,14 +1848,14 @@ def pick_location(self, component=None): return None def pick_locations(self, components): - '''Return suitable locations for *components*. + """Return suitable locations for *components*. Return list of locations corresponding to *components* where each picked location is the highest priority accessible location for that component. If a component has no location available then its corresponding entry will be None. - ''' + """ candidate_locations = self._get_locations() availabilities = self.get_component_availabilities( components, locations=candidate_locations @@ -1941,7 +1866,7 @@ def pick_locations(self, components): location = None for candidate_location in candidate_locations: - if availability.get(candidate_location['id']) > 0.0: + if availability.get(candidate_location["id"]) > 0.0: location = candidate_location break @@ -1949,10 +1874,8 @@ def pick_locations(self, components): return locations - def create_component( - self, path, data=None, location='auto' - ): - '''Create a new component from *path* with additional *data* + def create_component(self, path, data=None, location="auto"): + """Create a new component from *path* with additional *data* .. note:: @@ -1985,40 +1908,36 @@ def create_component( A :meth:`Session.commit` may be automatically issued as part of the components registration in the location. - ''' + """ if data is None: data = {} - if location == 'auto': + if location == "auto": # Check if the component name matches one of the ftrackreview # specific names. Add the component to the ftrack.review location if # so. This is used to not break backwards compatibility. - if data.get('name') in ( - 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' + if data.get("name") in ( + "ftrackreview-mp4", + "ftrackreview-webm", + "ftrackreview-image", ): - location = self.get( - 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID - ) + location = self.get("Location", ftrack_api.symbol.REVIEW_LOCATION_ID) else: location = self.pick_location() def retrieve_file_type(_path): - '''try to retrive the file type from any registered plugins. If - none are available fall back to os.path.splitext''' + """try to retrive the file type from any registered plugins. If + none are available fall back to os.path.splitext""" response = self.event_hub.publish( ftrack_api.event.base.Event( - topic='ftrack.api.session.get-file-type-from-string', - data=dict( - file_path=_path - ) + topic="ftrack.api.session.get-file-type-from-string", + data=dict(file_path=_path), ), - synchronous=True + synchronous=True, ) - _file_type = next( - (result for result in response if result), None - ) + _file_type = next((result for result in response if result), None) if not _file_type: return os.path.splitext(_path)[-1] @@ -2030,29 +1949,23 @@ def retrieve_file_type(_path): except ValueError: # Assume is a single file. - if 'size' not in data: - data['size'] = self._get_filesystem_size(path) + if "size" not in data: + data["size"] = self._get_filesystem_size(path) - file_type = retrieve_file_type( - path - ) + file_type = retrieve_file_type(path) - data.setdefault('file_type', file_type) + data.setdefault("file_type", file_type) - return self._create_component( - 'FileComponent', path, data, location - ) + return self._create_component("FileComponent", path, data, location) else: # Calculate size of container and members. member_sizes = {} - container_size = data.get('size') + container_size = data.get("size") if container_size is not None: if len(collection.indexes) > 0: - member_size = int( - round(container_size / len(collection.indexes)) - ) + member_size = int(round(container_size / len(collection.indexes))) for item in collection: member_sizes[item] = member_size @@ -2064,56 +1977,50 @@ def retrieve_file_type(_path): # Create sequence component - container_path = collection.format('{head}{padding}{tail}') - file_type = retrieve_file_type( - container_path - ) + container_path = collection.format("{head}{padding}{tail}") + file_type = retrieve_file_type(container_path) - data.setdefault('padding', collection.padding) - data.setdefault('file_type', file_type) - data.setdefault('size', container_size) + data.setdefault("padding", collection.padding) + data.setdefault("file_type", file_type) + data.setdefault("size", container_size) container = self._create_component( - 'SequenceComponent', container_path, data, location=None + "SequenceComponent", container_path, data, location=None ) # Create member components for sequence. for member_path in collection: member_data = { - 'name': collection.match(member_path).group('index'), - 'container': container, - 'size': member_sizes[member_path], - 'file_type': file_type + "name": collection.match(member_path).group("index"), + "container": container, + "size": member_sizes[member_path], + "file_type": file_type, } component = self._create_component( - 'FileComponent', member_path, member_data, location=None + "FileComponent", member_path, member_data, location=None ) - container['members'].append(component) + container["members"].append(component) if location: origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - location.add_component( - container, origin_location, recursive=True + "Location", ftrack_api.symbol.ORIGIN_LOCATION_ID ) + location.add_component(container, origin_location, recursive=True) return container def _create_component(self, entity_type, path, data, location): - '''Create and return component. + """Create and return component. See public function :py:func:`createComponent` for argument details. - ''' + """ component = self.create(entity_type, data) # Add to special origin location so that it is possible to add to other # locations. - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) + origin_location = self.get("Location", ftrack_api.symbol.ORIGIN_LOCATION_ID) origin_location.add_component(component, path, recursive=False) if location: @@ -2122,7 +2029,7 @@ def _create_component(self, entity_type, path, data, location): return component def _get_filesystem_size(self, path): - '''Return size from *path*''' + """Return size from *path*""" try: size = os.path.getsize(path) except OSError: @@ -2131,20 +2038,18 @@ def _get_filesystem_size(self, path): return size def get_component_availability(self, component, locations=None): - '''Return availability of *component*. + """Return availability of *component*. If *locations* is set then limit result to availability of *component* in those *locations*. Return a dictionary of {location_id:percentage_availability} - ''' - return self.get_component_availabilities( - [component], locations=locations - )[0] + """ + return self.get_component_availabilities([component], locations=locations)[0] def get_component_availabilities(self, components, locations=None): - '''Return availabilities of *components*. + """Return availabilities of *components*. If *locations* is set then limit result to availabilities of *components* in those *locations*. @@ -2152,11 +2057,11 @@ def get_component_availabilities(self, components, locations=None): Return a list of dictionaries of {location_id:percentage_availability}. The list indexes correspond to those of *components*. - ''' + """ availabilities = [] if locations is None: - locations = self.query('Location') + locations = self.query("Location") # Separate components into two lists, those that are containers and # those that are not, so that queries can be optimised. @@ -2164,47 +2069,42 @@ def get_component_availabilities(self, components, locations=None): container_components = [] for component in components: - if 'members' in list(component.keys()): + if "members" in list(component.keys()): container_components.append(component) else: standard_components.append(component) # Perform queries. if standard_components: - self.populate( - standard_components, 'component_locations.location_id' - ) + self.populate(standard_components, "component_locations.location_id") if container_components: self.populate( - container_components, - 'members, component_locations.location_id' + container_components, "members, component_locations.location_id" ) base_availability = {} for location in locations: - base_availability[location['id']] = 0.0 + base_availability[location["id"]] = 0.0 for component in components: availability = base_availability.copy() availabilities.append(availability) - is_container = 'members' in list(component.keys()) - if is_container and len(component['members']): + is_container = "members" in list(component.keys()) + if is_container and len(component["members"]): member_availabilities = self.get_component_availabilities( - component['members'], locations=locations + component["members"], locations=locations ) - multiplier = 1.0 / len(component['members']) + multiplier = 1.0 / len(component["members"]) for member, member_availability in zip( - component['members'], member_availabilities + component["members"], member_availabilities ): for location_id, ratio in list(member_availability.items()): - availability[location_id] += ( - ratio * multiplier - ) + availability[location_id] += ratio * multiplier else: - for component_location in component['component_locations']: - location_id = component_location['location_id'] + for component_location in component["component_locations"]: + location_id = component_location["location_id"] if location_id in availability: availability[location_id] = 100.0 @@ -2218,7 +2118,7 @@ def get_component_availabilities(self, components, locations=None): return availabilities def get_widget_url(self, name, entity=None, theme=None): - '''Return an authenticated URL for widget with *name* and given options. + """Return an authenticated URL for widget with *name* and given options. The returned URL will be authenticated using a token which will expire after 6 minutes. @@ -2232,16 +2132,12 @@ def get_widget_url(self, name, entity=None, theme=None): *theme* sets the theme of the widget and can be either 'light' or 'dark' (defaulting to 'dark' if an invalid option given). - ''' - operation = { - 'action': 'get_widget_url', - 'name': name, - 'theme': theme - } + """ + operation = {"action": "get_widget_url", "name": name, "theme": theme} if entity: - operation['entity_type'] = entity.entity_type - operation['entity_key'] = ( - list(ftrack_api.inspection.primary_key(entity).values()) + operation["entity_type"] = entity.entity_type + operation["entity_key"] = list( + ftrack_api.inspection.primary_key(entity).values() ) try: @@ -2249,21 +2145,21 @@ def get_widget_url(self, name, entity=None, theme=None): except ftrack_api.exception.ServerError as error: # Raise informative error if the action is not supported. - if 'Invalid action u\'get_widget_url\'' in error.message: + if "Invalid action u'get_widget_url'" in error.message: raise ftrack_api.exception.ServerCompatibilityError( 'Server version {0!r} does not support "get_widget_url", ' - 'please update server and try again.'.format( - self.server_information.get('version') + "please update server and try again.".format( + self.server_information.get("version") ) ) else: raise else: - return result[0]['widget_url'] + return result[0]["widget_url"] - def encode_media(self, media, version_id=None, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. + def encode_media(self, media, version_id=None, keep_original="auto"): + """Return a new Job that encode *media* to make it playable in browsers. *media* can be a path to a file or a FileComponent in the ftrack.server location. @@ -2303,13 +2199,11 @@ def encode_media(self, media, version_id=None, keep_original='auto'): If *keep_original* is not set, the original media will be kept if it is a FileComponent, and deleted if it is a file path. You can specify True or False to change this behavior. - ''' + """ if isinstance(media, string_types): # Media is a path to a file. - server_location = self.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - if keep_original == 'auto': + server_location = self.get("Location", ftrack_api.symbol.SERVER_LOCATION_ID) + if keep_original == "auto": keep_original = False component_data = None @@ -2317,33 +2211,26 @@ def encode_media(self, media, version_id=None, keep_original='auto'): component_data = dict(version_id=version_id) component = self.create_component( - path=media, - data=component_data, - location=server_location + path=media, data=component_data, location=server_location ) # Auto commit to ensure component exists when sent to server. self.commit() - elif ( - hasattr(media, 'entity_type') and - media.entity_type in ('FileComponent',) - ): + elif hasattr(media, "entity_type") and media.entity_type in ("FileComponent",): # Existing file component. component = media - if keep_original == 'auto': + if keep_original == "auto": keep_original = True else: - raise ValueError( - 'Unable to encode media of type: {0}'.format(type(media)) - ) + raise ValueError("Unable to encode media of type: {0}".format(type(media))) operation = { - 'action': 'encode_media', - 'component_id': component['id'], - 'version_id': version_id, - 'keep_original': keep_original + "action": "encode_media", + "component_id": component["id"], + "version_id": version_id, + "keep_original": keep_original, } try: @@ -2351,22 +2238,20 @@ def encode_media(self, media, version_id=None, keep_original='auto'): except ftrack_api.exception.ServerError as error: # Raise informative error if the action is not supported. - if 'Invalid action u\'encode_media\'' in error.message: + if "Invalid action u'encode_media'" in error.message: raise ftrack_api.exception.ServerCompatibilityError( 'Server version {0!r} does not support "encode_media", ' - 'please update server and try again.'.format( - self.server_information.get('version') + "please update server and try again.".format( + self.server_information.get("version") ) ) else: raise - return self.get('Job', result[0]['job_id']) + return self.get("Job", result[0]["job_id"]) - def get_upload_metadata( - self, component_id, file_name, file_size, checksum=None - ): - '''Return URL and headers used to upload data for *component_id*. + def get_upload_metadata(self, component_id, file_name, file_size, checksum=None): + """Return URL and headers used to upload data for *component_id*. *file_name* and *file_size* should match the components details. @@ -2377,13 +2262,13 @@ def get_upload_metadata( the base64-encoded 128-bit MD5 digest of the message (without the headers) according to RFC 1864. This can be used as a message integrity check to verify that the data is the same data that was originally sent. - ''' + """ operation = { - 'action': 'get_upload_metadata', - 'component_id': component_id, - 'file_name': file_name, - 'file_size': file_size, - 'checksum': checksum + "action": "get_upload_metadata", + "component_id": component_id, + "file_name": file_name, + "file_size": file_size, + "checksum": checksum, } try: @@ -2391,13 +2276,11 @@ def get_upload_metadata( except ftrack_api.exception.ServerError as error: # Raise informative error if the action is not supported. - if 'Invalid action u\'get_upload_metadata\'' in error.message: + if "Invalid action u'get_upload_metadata'" in error.message: raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' + "Server version {0!r} does not support " '"get_upload_metadata", please update server and try ' - 'again.'.format( - self.server_information.get('version') - ) + "again.".format(self.server_information.get("version")) ) else: raise @@ -2405,51 +2288,42 @@ def get_upload_metadata( return result[0] def send_user_invite(self, user): - '''Send a invitation to the provided *user*. + """Send a invitation to the provided *user*. *user* is a User instance - ''' + """ - self.send_user_invites( - [user] - ) + self.send_user_invites([user]) def send_user_invites(self, users): - '''Send a invitation to the provided *user*. + """Send a invitation to the provided *user*. *users* is a list of User instances - ''' + """ operations = [] for user in users: - operations.append( - { - 'action':'send_user_invite', - 'user_id': user['id'] - } - ) + operations.append({"action": "send_user_invite", "user_id": user["id"]}) try: self.call(operations) except ftrack_api.exception.ServerError as error: # Raise informative error if the action is not supported. - if 'Invalid action u\'send_user_invite\'' in error.message: + if "Invalid action u'send_user_invite'" in error.message: raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' + "Server version {0!r} does not support " '"send_user_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) + "try again.".format(self.server_information.get("version")) ) else: raise def send_review_session_invite(self, invitee): - '''Send an invite to a review session to *invitee*. + """Send an invite to a review session to *invitee*. *invitee* is a instance of ReviewSessionInvitee. @@ -2457,11 +2331,11 @@ def send_review_session_invite(self, invitee): The *invitee* must be committed. - ''' + """ self.send_review_session_invites([invitee]) def send_review_session_invites(self, invitees): - '''Send an invite to a review session to a list of *invitees*. + """Send an invite to a review session to a list of *invitees*. *invitee* is a list of ReviewSessionInvitee objects. @@ -2469,14 +2343,14 @@ def send_review_session_invites(self, invitees): All *invitees* must be committed. - ''' + """ operations = [] for invitee in invitees: operations.append( { - 'action': 'send_review_session_invite', - 'review_session_invitee_id': invitee['id'] + "action": "send_review_session_invite", + "review_session_invitee_id": invitee["id"], } ) @@ -2484,89 +2358,85 @@ def send_review_session_invites(self, invitees): self.call(operations) except ftrack_api.exception.ServerError as error: # Raise informative error if the action is not supported. - if 'Invalid action u\'send_review_session_invite\'' in error.message: + if "Invalid action u'send_review_session_invite'" in error.message: raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' + "Server version {0!r} does not support " '"send_review_session_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) + "try again.".format(self.server_information.get("version")) ) else: raise class AutoPopulatingContext(object): - '''Context manager for temporary change of session auto_populate value.''' + """Context manager for temporary change of session auto_populate value.""" def __init__(self, session, auto_populate): - '''Initialise context.''' + """Initialise context.""" super(AutoPopulatingContext, self).__init__() self._session = session self._auto_populate = auto_populate self._current_auto_populate = None def __enter__(self): - '''Enter context switching to desired auto populate setting.''' + """Enter context switching to desired auto populate setting.""" self._current_auto_populate = self._session.auto_populate self._session.auto_populate = self._auto_populate def __exit__(self, exception_type, exception_value, traceback): - '''Exit context resetting auto populate to original setting.''' + """Exit context resetting auto populate to original setting.""" self._session.auto_populate = self._current_auto_populate class OperationRecordingContext(object): - '''Context manager for temporary change of session record_operations.''' + """Context manager for temporary change of session record_operations.""" def __init__(self, session, record_operations): - '''Initialise context.''' + """Initialise context.""" super(OperationRecordingContext, self).__init__() self._session = session self._record_operations = record_operations self._current_record_operations = None def __enter__(self): - '''Enter context.''' + """Enter context.""" self._current_record_operations = self._session.record_operations self._session.record_operations = self._record_operations def __exit__(self, exception_type, exception_value, traceback): - '''Exit context.''' + """Exit context.""" self._session.record_operations = self._current_record_operations class OperationPayload(collections_abc.MutableMapping): - '''Represent operation payload.''' + """Represent operation payload.""" def __init__(self, *args, **kwargs): - '''Initialise payload.''' + """Initialise payload.""" super(OperationPayload, self).__init__() self._data = dict() self.update(dict(*args, **kwargs)) def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) + """Return string representation.""" + return "<{0} {1}>".format(self.__class__.__name__, str(self._data)) def __getitem__(self, key): - '''Return value for *key*.''' + """Return value for *key*.""" return self._data[key] def __setitem__(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" self._data[key] = value def __delitem__(self, key): - '''Remove *key*.''' + """Remove *key*.""" del self._data[key] def __iter__(self): - '''Iterate over all keys.''' + """Iterate over all keys.""" return iter(self._data) def __len__(self): - '''Return count of keys.''' + """Return count of keys.""" return len(self._data) diff --git a/source/ftrack_api/structure/base.py b/source/ftrack_api/structure/base.py index de3335f9..bf4a4a4f 100644 --- a/source/ftrack_api/structure/base.py +++ b/source/ftrack_api/structure/base.py @@ -7,32 +7,32 @@ class Structure(with_metaclass(ABCMeta, object)): - '''Structure plugin interface. + """Structure plugin interface. A structure plugin should compute appropriate paths for data. - ''' + """ - def __init__(self, prefix=''): - '''Initialise structure.''' + def __init__(self, prefix=""): + """Initialise structure.""" self.prefix = prefix - self.path_separator = '/' + self.path_separator = "/" super(Structure, self).__init__() @abstractmethod def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. + """Return a resource identifier for supplied *entity*. *context* can be a mapping that supplies additional information. - ''' + """ def _get_sequence_expression(self, sequence): - '''Return a sequence expression for *sequence* component.''' - padding = sequence['padding'] + """Return a sequence expression for *sequence* component.""" + padding = sequence["padding"] if padding: - expression = '%0{0}d'.format(padding) + expression = "%0{0}d".format(padding) else: - expression = '%d' + expression = "%d" return expression diff --git a/source/ftrack_api/structure/entity_id.py b/source/ftrack_api/structure/entity_id.py index ae466bf6..21e9a740 100644 --- a/source/ftrack_api/structure/entity_id.py +++ b/source/ftrack_api/structure/entity_id.py @@ -5,8 +5,8 @@ class EntityIdStructure(ftrack_api.structure.base.Structure): - '''Entity id pass-through structure.''' + """Entity id pass-through structure.""" def get_resource_identifier(self, entity, context=None): - '''Return a *resourceIdentifier* for supplied *entity*.''' - return entity['id'] + """Return a *resourceIdentifier* for supplied *entity*.""" + return entity["id"] diff --git a/source/ftrack_api/structure/id.py b/source/ftrack_api/structure/id.py index acc3e21b..e01af390 100644 --- a/source/ftrack_api/structure/id.py +++ b/source/ftrack_api/structure/id.py @@ -8,7 +8,7 @@ class IdStructure(ftrack_api.structure.base.Structure): - '''Id based structure supporting Components only. + """Id based structure supporting Components only. A components unique id will be used to form a path to store the data at. To avoid millions of entries in one directory each id is chunked into four @@ -32,60 +32,57 @@ class IdStructure(ftrack_api.structure.base.Structure): /prefix/1/2/3/4/56789/file.0001.exr /prefix/1/2/3/4/56789/file.0002.exr - ''' + """ def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. + """Return a resource identifier for supplied *entity*. *context* can be a mapping that supplies additional information. - ''' - if entity.entity_type in ('FileComponent',): + """ + if entity.entity_type in ("FileComponent",): # When in a container, place the file inside a directory named # after the container. - container = entity['container'] + container = entity["container"] if container and container is not ftrack_api.symbol.NOT_SET: path = self.get_resource_identifier(container) - if container.entity_type in ('SequenceComponent',): + if container.entity_type in ("SequenceComponent",): # Label doubles as index for now. - name = 'file.{0}{1}'.format( - entity['name'], entity['file_type'] - ) + name = "file.{0}{1}".format(entity["name"], entity["file_type"]) parts = [os.path.dirname(path), name] else: # Just place uniquely identified file into directory - name = entity['id'] + entity['file_type'] + name = entity["id"] + entity["file_type"] parts = [path, name] else: - name = entity['id'][4:] + entity['file_type'] - parts = ([self.prefix] + list(entity['id'][:4]) + [name]) + name = entity["id"][4:] + entity["file_type"] + parts = [self.prefix] + list(entity["id"][:4]) + [name] - elif entity.entity_type in ('SequenceComponent',): - name = 'file' + elif entity.entity_type in ("SequenceComponent",): + name = "file" # Add a sequence identifier. sequence_expression = self._get_sequence_expression(entity) - name += '.{0}'.format(sequence_expression) + name += ".{0}".format(sequence_expression) if ( - entity['file_type'] and - entity['file_type'] is not ftrack_api.symbol.NOT_SET + entity["file_type"] + and entity["file_type"] is not ftrack_api.symbol.NOT_SET ): - name += entity['file_type'] + name += entity["file_type"] - parts = ([self.prefix] + list(entity['id'][:4]) - + [entity['id'][4:]] + [name]) + parts = [self.prefix] + list(entity["id"][:4]) + [entity["id"][4:]] + [name] - elif entity.entity_type in ('ContainerComponent',): + elif entity.entity_type in ("ContainerComponent",): # Just an id directory - parts = ([self.prefix] + - list(entity['id'][:4]) + [entity['id'][4:]]) + parts = [self.prefix] + list(entity["id"][:4]) + [entity["id"][4:]] else: - raise NotImplementedError('Cannot generate path for unsupported ' - 'entity {0}'.format(entity)) + raise NotImplementedError( + "Cannot generate path for unsupported " "entity {0}".format(entity) + ) - return self.path_separator.join(parts).strip('/') + return self.path_separator.join(parts).strip("/") diff --git a/source/ftrack_api/structure/origin.py b/source/ftrack_api/structure/origin.py index 0d4d3a57..c381915d 100644 --- a/source/ftrack_api/structure/origin.py +++ b/source/ftrack_api/structure/origin.py @@ -5,24 +5,24 @@ class OriginStructure(Structure): - '''Origin structure that passes through existing resource identifier.''' + """Origin structure that passes through existing resource identifier.""" def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. + """Return a resource identifier for supplied *entity*. *context* should be a mapping that includes at least a 'source_resource_identifier' key that refers to the resource identifier to pass through. - ''' + """ if context is None: context = {} - resource_identifier = context.get('source_resource_identifier') + resource_identifier = context.get("source_resource_identifier") if resource_identifier is None: raise ValueError( - 'Could not generate resource identifier as no source resource ' - 'identifier found in passed context.' + "Could not generate resource identifier as no source resource " + "identifier found in passed context." ) return resource_identifier diff --git a/source/ftrack_api/structure/standard.py b/source/ftrack_api/structure/standard.py index f1833f3f..388e1a10 100644 --- a/source/ftrack_api/structure/standard.py +++ b/source/ftrack_api/structure/standard.py @@ -11,7 +11,7 @@ class StandardStructure(ftrack_api.structure.base.Structure): - '''Project hierarchy based structure that only supports Components. + """Project hierarchy based structure that only supports Components. The resource identifier is generated from the project code, the name of objects in the project structure, asset name and version number:: @@ -43,12 +43,10 @@ class StandardStructure(ftrack_api.structure.base.Structure): my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf - ''' + """ - def __init__( - self, project_versions_prefix=None, illegal_character_substitute='_' - ): - '''Initialise structure. + def __init__(self, project_versions_prefix=None, illegal_character_substitute="_"): + """Initialise structure. If *project_versions_prefix* is defined, insert after the project code for versions published directly under the project:: @@ -62,51 +60,43 @@ def __init__( Nested component containers/sequences are not supported. - ''' + """ super(StandardStructure, self).__init__() self.project_versions_prefix = project_versions_prefix self.illegal_character_substitute = illegal_character_substitute def _get_parts(self, entity): - '''Return resource identifier parts from *entity*.''' + """Return resource identifier parts from *entity*.""" session = entity.session - version = entity['version'] + version = entity["version"] - if version is ftrack_api.symbol.NOT_SET and entity['version_id']: - version = session.get('AssetVersion', entity['version_id']) + if version is ftrack_api.symbol.NOT_SET and entity["version_id"]: + version = session.get("AssetVersion", entity["version_id"]) error_message = ( - 'Component {0!r} must be attached to a committed ' - 'version and a committed asset with a parent context.'.format( - entity - ) + "Component {0!r} must be attached to a committed " + "version and a committed asset with a parent context.".format(entity) ) - if ( - version is ftrack_api.symbol.NOT_SET or - version in session.created - ): + if version is ftrack_api.symbol.NOT_SET or version in session.created: raise ftrack_api.exception.StructureError(error_message) - link = version['link'] + link = version["link"] if not link: raise ftrack_api.exception.StructureError(error_message) - structure_names = [ - item['name'] - for item in link[1:-1] - ] + structure_names = [item["name"] for item in link[1:-1]] - project_id = link[0]['id'] - project = session.get('Project', project_id) - asset = version['asset'] + project_id = link[0]["id"] + project = session.get("Project", project_id) + asset = version["asset"] - version_number = self._format_version(version['version']) + version_number = self._format_version(version["version"]) parts = [] - parts.append(project['name']) + parts.append(project["name"]) if structure_names: parts.extend(structure_names) @@ -115,17 +105,17 @@ def _get_parts(self, entity): # published directly under the project. parts.append(self.project_versions_prefix) - parts.append(asset['name']) + parts.append(asset["name"]) parts.append(version_number) return [self.sanitise_for_filesystem(part) for part in parts] def _format_version(self, number): - '''Return a formatted string representing version *number*.''' - return 'v{0:03d}'.format(number) + """Return a formatted string representing version *number*.""" + return "v{0:03d}".format(number) def sanitise_for_filesystem(self, value): - '''Return *value* with illegal filesystem characters replaced. + """Return *value* with illegal filesystem characters replaced. An illegal character is one that is not typically valid for filesystem usage, such as non ascii characters, or can be awkward to use in a @@ -134,16 +124,18 @@ def sanitise_for_filesystem(self, value): initialisation. If no character was specified as substitute then return *value* unmodified. - ''' + """ if self.illegal_character_substitute is None: return value - value = unicodedata.normalize('NFKD', str(value)).encode('ascii', 'ignore') - value = re.sub('[^\w\.-]', self.illegal_character_substitute, value.decode('utf-8')) + value = unicodedata.normalize("NFKD", str(value)).encode("ascii", "ignore") + value = re.sub( + "[^\w\.-]", self.illegal_character_substitute, value.decode("utf-8") + ) return str(value.strip().lower()) def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. + """Return a resource identifier for supplied *entity*. *context* can be a mapping that supplies additional information, but is unused in this implementation. @@ -153,63 +145,61 @@ def get_resource_identifier(self, entity, context=None): attached to a committed version and a committed asset with a parent context. - ''' - if entity.entity_type in ('FileComponent',): - container = entity['container'] + """ + if entity.entity_type in ("FileComponent",): + container = entity["container"] if container: # Get resource identifier for container. container_path = self.get_resource_identifier(container) - if container.entity_type in ('SequenceComponent',): + if container.entity_type in ("SequenceComponent",): # Strip the sequence component expression from the parent # container and back the correct filename, i.e. # /sequence/component/sequence_component_name.0012.exr. - name = '{0}.{1}{2}'.format( - container['name'], entity['name'], entity['file_type'] + name = "{0}.{1}{2}".format( + container["name"], entity["name"], entity["file_type"] ) parts = [ os.path.dirname(container_path), - self.sanitise_for_filesystem(name) + self.sanitise_for_filesystem(name), ] else: # Container is not a sequence component so add it as a # normal component inside the container. - name = entity['name'] + entity['file_type'] - parts = [ - container_path, self.sanitise_for_filesystem(name) - ] + name = entity["name"] + entity["file_type"] + parts = [container_path, self.sanitise_for_filesystem(name)] else: # File component does not have a container, construct name from # component name and file type. parts = self._get_parts(entity) - name = entity['name'] + entity['file_type'] + name = entity["name"] + entity["file_type"] parts.append(self.sanitise_for_filesystem(name)) - elif entity.entity_type in ('SequenceComponent',): + elif entity.entity_type in ("SequenceComponent",): # Create sequence expression for the sequence component and add it # to the parts. parts = self._get_parts(entity) sequence_expression = self._get_sequence_expression(entity) parts.append( - '{0}.{1}{2}'.format( - self.sanitise_for_filesystem(entity['name']), + "{0}.{1}{2}".format( + self.sanitise_for_filesystem(entity["name"]), sequence_expression, - self.sanitise_for_filesystem(entity['file_type']) + self.sanitise_for_filesystem(entity["file_type"]), ) ) - elif entity.entity_type in ('ContainerComponent',): + elif entity.entity_type in ("ContainerComponent",): # Add the name of the container to the resource identifier parts. parts = self._get_parts(entity) - parts.append(self.sanitise_for_filesystem(entity['name'])) + parts.append(self.sanitise_for_filesystem(entity["name"])) else: raise NotImplementedError( - 'Cannot generate resource identifier for unsupported ' - 'entity {0!r}'.format(entity) + "Cannot generate resource identifier for unsupported " + "entity {0!r}".format(entity) ) return self.path_separator.join(parts) diff --git a/source/ftrack_api/symbol.py b/source/ftrack_api/symbol.py index 4906c4e7..79c9b012 100644 --- a/source/ftrack_api/symbol.py +++ b/source/ftrack_api/symbol.py @@ -5,74 +5,76 @@ from builtins import object + + class Symbol(object): - '''A constant symbol.''' + """A constant symbol.""" def __init__(self, name, value=True): - '''Initialise symbol with unique *name* and *value*. + """Initialise symbol with unique *name* and *value*. *value* is used for nonzero testing. - ''' + """ self.name = name self.value = value def __str__(self): - '''Return string representation.''' + """Return string representation.""" return self.name def __repr__(self): - '''Return representation.''' - return '{0}({1})'.format(self.__class__.__name__, self.name) + """Return representation.""" + return "{0}({1})".format(self.__class__.__name__, self.name) def __bool__(self): - '''Return whether symbol represents non-zero value.''' + """Return whether symbol represents non-zero value.""" return bool(self.value) def __copy__(self): - '''Return shallow copy. + """Return shallow copy. Overridden to always return same instance. - ''' + """ return self #: Symbol representing that no value has been set or loaded. -NOT_SET = Symbol('NOT_SET', False) +NOT_SET = Symbol("NOT_SET", False) #: Symbol representing created state. -CREATED = Symbol('CREATED') +CREATED = Symbol("CREATED") #: Symbol representing modified state. -MODIFIED = Symbol('MODIFIED') +MODIFIED = Symbol("MODIFIED") #: Symbol representing deleted state. -DELETED = Symbol('DELETED') +DELETED = Symbol("DELETED") #: Topic published when component added to a location. -COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' +COMPONENT_ADDED_TO_LOCATION_TOPIC = "ftrack.location.component-added" #: Topic published when component removed from a location. -COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' +COMPONENT_REMOVED_FROM_LOCATION_TOPIC = "ftrack.location.component-removed" #: Identifier of builtin origin location. -ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' +ORIGIN_LOCATION_ID = "ce9b348f-8809-11e3-821c-20c9d081909b" #: Identifier of builtin unmanaged location. -UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' +UNMANAGED_LOCATION_ID = "cb268ecc-8809-11e3-a7e2-20c9d081909b" #: Identifier of builtin review location. -REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' +REVIEW_LOCATION_ID = "cd41be70-8809-11e3-b98a-20c9d081909b" #: Identifier of builtin connect location. -CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' +CONNECT_LOCATION_ID = "07b82a97-8cf9-11e3-9383-20c9d081909b" #: Identifier of builtin server location. -SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' +SERVER_LOCATION_ID = "3a372bde-05bc-11e4-8908-20c9d081909b" #: Chunk size used when working with data, default to 1Mb. -CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 +CHUNK_SIZE = int(os.getenv("FTRACK_API_FILE_CHUNK_SIZE", 0)) or 1024 * 1024 #: Symbol representing syncing users with ldap -JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') +JOB_SYNC_USERS_LDAP = Symbol("SYNC_USERS_LDAP") diff --git a/test/fixture/plugin/configure_locations.py b/test/fixture/plugin/configure_locations.py index 5fcf034b..1e106e77 100644 --- a/test/fixture/plugin/configure_locations.py +++ b/test/fixture/plugin/configure_locations.py @@ -9,32 +9,29 @@ def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] + """Configure locations for session.""" + session = event["data"]["session"] # Find location(s) and customise instances. - location = session.ensure('Location', {'name': 'test.location'}) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location = session.ensure("Location", {"name": "test.location"}) + ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix="") def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:configure_locations.register') + """Register plugin with *session*.""" + logger = logging.getLogger("ftrack_plugin:configure_locations.register") # Validate that session is an instance of ftrack_api.Session. If not, assume # that register is being called from an old or incompatible API and return # without doing anything. if not isinstance(session, ftrack_api.Session): logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0} is not an " + "ftrack_api.Session instance.".format(session) ) return session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations + "topic=ftrack.api.session.configure-location", configure_locations ) diff --git a/test/fixture/plugin/construct_entity_type.py b/test/fixture/plugin/construct_entity_type.py index bb2f8c42..c086cb43 100644 --- a/test/fixture/plugin/construct_entity_type.py +++ b/test/fixture/plugin/construct_entity_type.py @@ -7,46 +7,45 @@ def stub(self): - '''A stub method for testing only.''' + """A stub method for testing only.""" class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' + """Entity class factory.""" def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' + """Create and return entity class from *schema*.""" # Optionally change bases for class to be generated. cls = super(Factory, self).create(schema, bases=bases) # Further customise cls before returning. - if schema['id'] == 'User': + if schema["id"] == "User": cls.stub = stub return cls def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') + """Register plugin with *session*.""" + logger = logging.getLogger("ftrack_plugin:construct_entity_type.register") # Validate that session is an instance of ftrack_api.Session. If not, assume # that register is being called from an old or incompatible API and return # without doing anything. if not isinstance(session, ftrack_api.Session): logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0!r} is not an " + "ftrack_api.Session instance.".format(session) ) return factory = Factory() def construct_entity_type(event): - '''Return class to represent entity type specified by *event*.''' - schema = event['data']['schema'] + """Return class to represent entity type specified by *event*.""" + schema = event["data"]["schema"] return factory.create(schema) session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type + "topic=ftrack.api.session.construct-entity-type", construct_entity_type ) diff --git a/test/fixture/plugin/count_session_event.py b/test/fixture/plugin/count_session_event.py index 37938ae8..a8275edc 100644 --- a/test/fixture/plugin/count_session_event.py +++ b/test/fixture/plugin/count_session_event.py @@ -7,35 +7,29 @@ def count_session_event(event): - '''Called when session is ready to be used.''' - logger = logging.getLogger('com.ftrack.test-session-event-plugin') - event_topic = event['topic'] - logger.debug(u'Event received: {}'.format(event_topic)) - session = event['data']['session'] + """Called when session is ready to be used.""" + logger = logging.getLogger("com.ftrack.test-session-event-plugin") + event_topic = event["topic"] + logger.debug("Event received: {}".format(event_topic)) + session = event["data"]["session"] session._test_called_events[event_topic] += 1 def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.ftrack.test-session-event-plugin') + """Register plugin. Called when used as an plugin.""" + logger = logging.getLogger("com.ftrack.test-session-event-plugin") # Validate that session is an instance of ftrack_api.Session. If not, # assume that register is being called from an old or incompatible API and # return without doing anything. if not isinstance(session, ftrack_api.session.Session): logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0!r} is not an " + "ftrack_api.Session instance.".format(session) ) return session._test_called_events = collections.defaultdict(int) - session.event_hub.subscribe( - 'topic=ftrack.api.session.ready', - count_session_event - ) - session.event_hub.subscribe( - 'topic=ftrack.api.session.reset', - count_session_event - ) - logger.debug('Plugin registered') + session.event_hub.subscribe("topic=ftrack.api.session.ready", count_session_event) + session.event_hub.subscribe("topic=ftrack.api.session.reset", count_session_event) + logger.debug("Plugin registered") diff --git a/test/fixture/plugin/get_file_type.py b/test/fixture/plugin/get_file_type.py index e6a4d8b4..fd8cf8f8 100644 --- a/test/fixture/plugin/get_file_type.py +++ b/test/fixture/plugin/get_file_type.py @@ -8,27 +8,27 @@ def get_file_type(event): - '''return extension from the provided **file_path**.''' - path = event['data']['file_path'] + """return extension from the provided **file_path**.""" + path = event["data"]["file_path"] - # Get Filename. + # Get Filename. filename = os.path.basename(path) # If no extension is found return to fall back on defaut session code. - if '.' not in filename: + if "." not in filename: return None # Identify sequence searching for : %d, ####, %d, min two numeral digits. - sequence_finder_regexp = re.compile(r'((%+\d+d)|(#+)|(%d)|(\d{2,}))') + sequence_finder_regexp = re.compile(r"((%+\d+d)|(#+)|(%d)|(\d{2,}))") # Result extension container. results = [] # Split by dot and get the last three occurences. - tokens = filename.split('.')[-3:] + tokens = filename.split(".")[-3:] # Limit tokens to be taken in accoutn based on the numner of them. - split = len(tokens) - 1 + split = len(tokens) - 1 tokens = tokens[-split:] for token in tokens: @@ -36,29 +36,28 @@ def get_file_type(event): sequence_match = sequence_finder_regexp.match(token) if not sequence_match: - # If is not a sequence identifier, + # If is not a sequence identifier, # make it part of the extension. results.append(token) # Return composed extension. - return '.{}'.format('.'.join(results)) + return ".{}".format(".".join(results)) def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin.get_file_type.register') + """Register plugin with *session*.""" + logger = logging.getLogger("ftrack_plugin.get_file_type.register") # Validate that session is an instance of ftrack_api.Session. If not, assume # that register is being called from an old or incompatible API and return # without doing anything. if not isinstance(session, ftrack_api.Session): logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) + "Not subscribing plugin as passed argument {0} is not an " + "ftrack_api.Session instance.".format(session) ) return session.event_hub.subscribe( - 'topic=ftrack.api.session.get-file-type-from-string', - get_file_type + "topic=ftrack.api.session.get-file-type-from-string", get_file_type ) diff --git a/test/unit/accessor/test_disk.py b/test/unit/accessor/test_disk.py index 82895d92..d18d16b9 100644 --- a/test/unit/accessor/test_disk.py +++ b/test/unit/accessor/test_disk.py @@ -13,73 +13,71 @@ def test_get_filesystem_path(temporary_path): - '''Convert paths to filesystem paths.''' + """Convert paths to filesystem paths.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) # Absolute paths outside of configured prefix fail. with pytest.raises(ftrack_api.exception.AccessorFilesystemPathError): - accessor.get_filesystem_path(os.path.join('/', 'test', 'foo.txt')) + accessor.get_filesystem_path(os.path.join("/", "test", "foo.txt")) # Absolute root path. assert accessor.get_filesystem_path(temporary_path) == temporary_path # Absolute path within prefix. - assert ( - accessor.get_filesystem_path( - os.path.join(temporary_path, 'test.txt') - ) == - os.path.join(temporary_path, 'test.txt') - ) + assert accessor.get_filesystem_path( + os.path.join(temporary_path, "test.txt") + ) == os.path.join(temporary_path, "test.txt") # Relative root path - assert accessor.get_filesystem_path('') == temporary_path + assert accessor.get_filesystem_path("") == temporary_path # Relative path for file at root - assert (accessor.get_filesystem_path('test.txt') == - os.path.join(temporary_path, 'test.txt')) + assert accessor.get_filesystem_path("test.txt") == os.path.join( + temporary_path, "test.txt" + ) # Relative path for file in subdirectory - assert (accessor.get_filesystem_path('test/foo.txt') == - os.path.join(temporary_path, 'test', 'foo.txt')) + assert accessor.get_filesystem_path("test/foo.txt") == os.path.join( + temporary_path, "test", "foo.txt" + ) # Relative path non-collapsed - assert (accessor.get_filesystem_path('test/../foo.txt') == - os.path.join(temporary_path, 'foo.txt')) + assert accessor.get_filesystem_path("test/../foo.txt") == os.path.join( + temporary_path, "foo.txt" + ) # Relative directory path without trailing slash - assert (accessor.get_filesystem_path('test') == - os.path.join(temporary_path, 'test')) + assert accessor.get_filesystem_path("test") == os.path.join(temporary_path, "test") # Relative directory path with trailing slash - assert (accessor.get_filesystem_path('test/') == - os.path.join(temporary_path, 'test')) + assert accessor.get_filesystem_path("test/") == os.path.join(temporary_path, "test") def test_list(temporary_path): - '''List entries.''' + """List entries.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) # File in root directory - assert accessor.list('') == [] - data = accessor.open('test.txt', 'w+') + assert accessor.list("") == [] + data = accessor.open("test.txt", "w+") data.close() - assert accessor.list('') == ['test.txt'] + assert accessor.list("") == ["test.txt"] # File in subdirectory - accessor.make_container('test_dir') - assert accessor.list('test_dir') == [] - data = accessor.open('test_dir/test.txt', 'w+') + accessor.make_container("test_dir") + assert accessor.list("test_dir") == [] + data = accessor.open("test_dir/test.txt", "w+") data.close() - listing = accessor.list('test_dir') - assert listing == [os.path.join('test_dir', 'test.txt')] + listing = accessor.list("test_dir") + assert listing == [os.path.join("test_dir", "test.txt")] # Is a valid resource assert accessor.exists(listing[0]) is True def test_exists(temporary_path): - '''Valid path exists.''' + """Valid path exists.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) _, temporary_file = tempfile.mkstemp(dir=temporary_path) @@ -87,13 +85,13 @@ def test_exists(temporary_path): def test_missing_does_not_exist(temporary_path): - '''Missing path does not exist.''' + """Missing path does not exist.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - assert accessor.exists('non-existant.txt') is False + assert accessor.exists("non-existant.txt") is False def test_is_file(temporary_path): - '''Valid file is considered a file.''' + """Valid file is considered a file.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) _, temporary_file = tempfile.mkstemp(dir=temporary_path) @@ -101,13 +99,13 @@ def test_is_file(temporary_path): def test_missing_is_not_file(temporary_path): - '''Missing path is not considered a file.''' + """Missing path is not considered a file.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - assert accessor.is_file('non_existant.txt') is False + assert accessor.is_file("non_existant.txt") is False def test_container_is_not_file(temporary_path): - '''Valid container is not considered a file.''' + """Valid container is not considered a file.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) temporary_directory = tempfile.mkdtemp(dir=temporary_path) @@ -115,7 +113,7 @@ def test_container_is_not_file(temporary_path): def test_is_container(temporary_path): - '''Valid container is considered a container.''' + """Valid container is considered a container.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) temporary_directory = tempfile.mkdtemp(dir=temporary_path) @@ -123,13 +121,13 @@ def test_is_container(temporary_path): def test_missing_is_not_container(temporary_path): - '''Missing path is not considered a container.''' + """Missing path is not considered a container.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - assert accessor.is_container('non_existant') is False + assert accessor.is_container("non_existant") is False def test_file_is_not_container(temporary_path): - '''Valid file is not considered a container.''' + """Valid file is not considered a container.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) _, temporary_file = tempfile.mkstemp(dir=temporary_path) @@ -137,35 +135,33 @@ def test_file_is_not_container(temporary_path): def test_is_sequence(temporary_path): - '''Sequence detection unsupported.''' + """Sequence detection unsupported.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - with pytest.raises( - ftrack_api.exception.AccessorUnsupportedOperationError - ): - accessor.is_sequence('foo.%04d.exr') + with pytest.raises(ftrack_api.exception.AccessorUnsupportedOperationError): + accessor.is_sequence("foo.%04d.exr") def test_open(temporary_path): - '''Open file.''' + """Open file.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) with pytest.raises(ftrack_api.exception.AccessorResourceNotFoundError): - accessor.open('test.txt', 'r') + accessor.open("test.txt", "r") - data = accessor.open('test.txt', 'w+') + data = accessor.open("test.txt", "w+") assert isinstance(data, ftrack_api.data.Data) is True - assert data.read() == '' - data.write('test data') + assert data.read() == "" + data.write("test data") data.close() - data = accessor.open('test.txt', 'r') - assert (data.read() == 'test data') + data = accessor.open("test.txt", "r") + assert data.read() == "test data" data.close() def test_remove_file(temporary_path): - '''Delete file at path.''' + """Delete file at path.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) file_handle, temporary_file = tempfile.mkstemp(dir=temporary_path) @@ -175,7 +171,7 @@ def test_remove_file(temporary_path): def test_remove_container(temporary_path): - '''Delete container at path.''' + """Delete container at path.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) temporary_directory = tempfile.mkdtemp(dir=temporary_path) @@ -184,84 +180,59 @@ def test_remove_container(temporary_path): def test_remove_missing(temporary_path): - '''Fail to remove path that does not exist.''' + """Fail to remove path that does not exist.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) with pytest.raises(ftrack_api.exception.AccessorResourceNotFoundError): - accessor.remove('non_existant') + accessor.remove("non_existant") def test_make_container(temporary_path): - '''Create container.''' + """Create container.""" accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - accessor.make_container('test') - assert os.path.isdir(os.path.join(temporary_path, 'test')) is True + accessor.make_container("test") + assert os.path.isdir(os.path.join(temporary_path, "test")) is True # Recursive - accessor.make_container('test/a/b/c') - assert ( - os.path.isdir( - os.path.join(temporary_path, 'test', 'a', 'b', 'c') - ) is - True - ) + accessor.make_container("test/a/b/c") + assert os.path.isdir(os.path.join(temporary_path, "test", "a", "b", "c")) is True # Non-recursive fail - with pytest.raises( - ftrack_api.exception.AccessorParentResourceNotFoundError - ): - accessor.make_container('test/d/e/f', recursive=False) + with pytest.raises(ftrack_api.exception.AccessorParentResourceNotFoundError): + accessor.make_container("test/d/e/f", recursive=False) # Existing succeeds - accessor.make_container('test/a/b/c') + accessor.make_container("test/a/b/c") def test_get_container(temporary_path): - '''Get container from resource_identifier.''' + """Get container from resource_identifier.""" # With prefix. accessor = ftrack_api.accessor.disk.DiskAccessor(prefix=temporary_path) - assert ( - accessor.get_container(os.path.join('test', 'a')) == - 'test' - ) + assert accessor.get_container(os.path.join("test", "a")) == "test" - assert ( - accessor.get_container(os.path.join('test', 'a/')) == - 'test' - ) + assert accessor.get_container(os.path.join("test", "a/")) == "test" - assert ( - accessor.get_container('test') == - '' - ) + assert accessor.get_container("test") == "" - with pytest.raises( - ftrack_api.exception.AccessorParentResourceNotFoundError - ): - accessor.get_container('') + with pytest.raises(ftrack_api.exception.AccessorParentResourceNotFoundError): + accessor.get_container("") - with pytest.raises( - ftrack_api.exception.AccessorParentResourceNotFoundError - ): + with pytest.raises(ftrack_api.exception.AccessorParentResourceNotFoundError): accessor.get_container(temporary_path) # Without prefix. - accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + accessor = ftrack_api.accessor.disk.DiskAccessor(prefix="") - assert ( - accessor.get_container(os.path.join(temporary_path, 'test', 'a')) == - os.path.join(temporary_path, 'test') - ) + assert accessor.get_container( + os.path.join(temporary_path, "test", "a") + ) == os.path.join(temporary_path, "test") - assert ( - accessor.get_container( - os.path.join(temporary_path, 'test', 'a/') - ) == - os.path.join(temporary_path, 'test') - ) + assert accessor.get_container( + os.path.join(temporary_path, "test", "a/") + ) == os.path.join(temporary_path, "test") assert ( - accessor.get_container(os.path.join(temporary_path, 'test')) == - temporary_path + accessor.get_container(os.path.join(temporary_path, "test")) == temporary_path ) diff --git a/test/unit/accessor/test_server.py b/test/unit/accessor/test_server.py index b9c00fab..bf366c29 100644 --- a/test/unit/accessor/test_server.py +++ b/test/unit/accessor/test_server.py @@ -12,30 +12,30 @@ def test_read_and_write(new_component, session): - '''Read and write data from server accessor.''' + """Read and write data from server accessor.""" random_data = uuid.uuid1().hex.encode() accessor = ftrack_api.accessor.server._ServerAccessor(session) - http_file = accessor.open(new_component['id'], mode='wb') + http_file = accessor.open(new_component["id"], mode="wb") http_file.write(random_data) http_file.close() - data = accessor.open(new_component['id'], 'r') - assert data.read() == random_data, 'Read data is the same as written.' + data = accessor.open(new_component["id"], "r") + assert data.read() == random_data, "Read data is the same as written." data.close() def test_remove_data(new_component, session): - '''Remove data using server accessor.''' + """Remove data using server accessor.""" random_data = uuid.uuid1().hex accessor = ftrack_api.accessor.server._ServerAccessor(session) - http_file = accessor.open(new_component['id'], mode='wb') + http_file = accessor.open(new_component["id"], mode="wb") http_file.write(random_data) http_file.close() - accessor.remove(new_component['id']) + accessor.remove(new_component["id"]) - data = accessor.open(new_component['id'], 'r') + data = accessor.open(new_component["id"], "r") with pytest.raises(ftrack_api.exception.AccessorOperationFailedError): data.read() diff --git a/test/unit/conftest.py b/test/unit/conftest.py index e2130bec..cb3cdecf 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -14,7 +14,7 @@ def pytest_generate_tests(metafunc): - '''Parametrize tests dynamically. + """Parametrize tests dynamically. If a test function has a corresponding parametrize function then call it passing along the *metafunc*. For example, for a "test_foo" function, look @@ -23,20 +23,20 @@ def pytest_generate_tests(metafunc): This is useful when more complex dynamic parametrization is needed than the standard pytest.mark.parametrize decorator can provide. - ''' - generator_name = 'parametrize_{}'.format(metafunc.function.__name__) + """ + generator_name = "parametrize_{}".format(metafunc.function.__name__) generator = getattr(metafunc.module, generator_name, None) if callable(generator): generator(metafunc) def _temporary_file(request, **kwargs): - '''Return temporary file.''' + """Return temporary file.""" file_handle, path = tempfile.mkstemp(**kwargs) os.close(file_handle) def cleanup(): - '''Remove temporary file.''' + """Remove temporary file.""" try: os.remove(path) except OSError: @@ -48,23 +48,23 @@ def cleanup(): @pytest.fixture() def temporary_file(request): - '''Return temporary file.''' + """Return temporary file.""" return _temporary_file(request) @pytest.fixture() def temporary_image(request): - '''Return temporary file.''' - return _temporary_file(request, suffix='.jpg') + """Return temporary file.""" + return _temporary_file(request, suffix=".jpg") @pytest.fixture() def temporary_directory(request): - '''Return temporary directory.''' + """Return temporary directory.""" path = tempfile.mkdtemp() def cleanup(): - '''Remove temporary directory.''' + """Remove temporary directory.""" shutil.rmtree(path) request.addfinalizer(cleanup) @@ -74,20 +74,18 @@ def cleanup(): @pytest.fixture() def temporary_sequence(temporary_directory): - '''Return temporary sequence of three files. + """Return temporary sequence of three files. Return the path using the `clique `_ format, for example:: /tmp/asfjsfjoj3/%04d.jpg [1-3] - ''' + """ items = [] for index in range(3): - item_path = os.path.join( - temporary_directory, '{0:04d}.jpg'.format(index) - ) - with open(item_path, 'w') as file_descriptor: + item_path = os.path.join(temporary_directory, "{0:04d}.jpg".format(index)) + with open(item_path, "w") as file_descriptor: file_descriptor.write(uuid.uuid4().hex) file_descriptor.close() @@ -101,14 +99,10 @@ def temporary_sequence(temporary_directory): @pytest.fixture() def video_path(): - '''Return a path to a video file.''' + """Return a path to a video file.""" video = os.path.abspath( os.path.join( - os.path.dirname(__file__), - '..', - 'fixture', - 'media', - 'colour_wheel.mov' + os.path.dirname(__file__), "..", "fixture", "media", "colour_wheel.mov" ) ) @@ -117,11 +111,9 @@ def video_path(): @pytest.fixture() def session(request): - '''Return session instance.''' + """Return session instance.""" session = ftrack_api.Session( - schema_cache_path=tempfile.mkdtemp( - suffix='ftrack_cache' - ) + schema_cache_path=tempfile.mkdtemp(suffix="ftrack_cache") ) def cleanup(): @@ -130,27 +122,27 @@ def cleanup(): request.addfinalizer(cleanup) return session - + @pytest.fixture() def session_no_autoconnect_hub(): - '''Return session instance not auto connected to hub.''' + """Return session instance not auto connected to hub.""" return ftrack_api.Session(auto_connect_event_hub=False) @pytest.fixture() def unique_name(): - '''Return a unique name.''' - return 'test-{0}'.format(uuid.uuid4()) + """Return a unique name.""" + return "test-{0}".format(uuid.uuid4()) @pytest.fixture() def temporary_path(request): - '''Return temporary path.''' + """Return temporary path.""" path = tempfile.mkdtemp() def cleanup(): - '''Remove created path.''' + """Remove created path.""" try: shutil.rmtree(path) except OSError: @@ -163,12 +155,12 @@ def cleanup(): @pytest.fixture() def new_user(request, session, unique_name): - '''Return a newly created unique user.''' - entity = session.create('User', {'username': unique_name}) + """Return a newly created unique user.""" + entity = session.create("User", {"username": unique_name}) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(entity) session.commit() @@ -179,9 +171,9 @@ def cleanup(): @pytest.fixture() def user(session): - '''Return the same user entity for entire session.''' + """Return the same user entity for entire session.""" # Jenkins user - entity = session.get('User', 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb') + entity = session.get("User", "d07ae5d0-66e1-11e1-b5e9-f23c91df25eb") assert entity is not None return entity @@ -189,63 +181,69 @@ def user(session): @pytest.fixture() def project_schema(session): - '''Return project schema.''' + """Return project schema.""" # VFX Scheme - entity = session.get( - 'ProjectSchema', '69cb7f92-4dbf-11e1-9902-f23c91df25eb' - ) + entity = session.get("ProjectSchema", "69cb7f92-4dbf-11e1-9902-f23c91df25eb") assert entity is not None return entity @pytest.fixture() def new_project_tree(request, session, user): - '''Return new project with basic tree.''' - project_schema = session.query('ProjectSchema').first() - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) + """Return new project with basic tree.""" + project_schema = session.query("ProjectSchema").first() + default_shot_status = project_schema.get_statuses("Shot")[0] + default_task_type = project_schema.get_types("Task")[0] + default_task_status = project_schema.get_statuses("Task", default_task_type["id"])[ + 0 + ] + + project_name = "python_api_test_{0}".format(uuid.uuid1().hex) + project = session.create( + "Project", + { + "name": project_name, + "full_name": project_name + "_full", + "project_schema": project_schema, + }, + ) for sequence_number in range(1): - sequence = session.create('Sequence', { - 'name': 'sequence_{0:03d}'.format(sequence_number), - 'parent': project - }) + sequence = session.create( + "Sequence", + {"name": "sequence_{0:03d}".format(sequence_number), "parent": project}, + ) for shot_number in range(1): - shot = session.create('Shot', { - 'name': 'shot_{0:03d}'.format(shot_number * 10), - 'parent': sequence, - 'status': default_shot_status - }) + shot = session.create( + "Shot", + { + "name": "shot_{0:03d}".format(shot_number * 10), + "parent": sequence, + "status": default_shot_status, + }, + ) for task_number in range(1): - task = session.create('Task', { - 'name': 'task_{0:03d}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - session.create('Appointment', { - 'type': 'assignment', - 'context': task, - 'resource': user - }) + task = session.create( + "Task", + { + "name": "task_{0:03d}".format(task_number), + "parent": shot, + "status": default_task_status, + "type": default_task_type, + }, + ) + + session.create( + "Appointment", + {"type": "assignment", "context": task, "resource": user}, + ) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(project) session.commit() @@ -256,19 +254,22 @@ def cleanup(): @pytest.fixture() def new_project(request, session, user): - '''Return new empty project.''' - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) + """Return new empty project.""" + project_schema = session.query("ProjectSchema").first() + project_name = "python_api_test_{0}".format(uuid.uuid1().hex) + project = session.create( + "Project", + { + "name": project_name, + "full_name": project_name + "_full", + "project_schema": project_schema, + }, + ) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(project) session.commit() @@ -279,9 +280,9 @@ def cleanup(): @pytest.fixture() def project(session): - '''Return same project for entire session.''' + """Return same project for entire session.""" # Test project. - entity = session.get('Project', '5671dcb0-66de-11e1-8e6e-f23c91df25eb') + entity = session.get("Project", "5671dcb0-66de-11e1-8e6e-f23c91df25eb") assert entity is not None return entity @@ -289,27 +290,30 @@ def project(session): @pytest.fixture() def new_task(request, session, unique_name): - '''Return a new task.''' + """Return a new task.""" project = session.query( - 'Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb' + "Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb" ).one() - project_schema = project['project_schema'] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - task = session.create('Task', { - 'name': unique_name, - 'parent': project, - 'status': default_task_status, - 'type': default_task_type - }) + project_schema = project["project_schema"] + default_task_type = project_schema.get_types("Task")[0] + default_task_status = project_schema.get_statuses("Task", default_task_type["id"])[ + 0 + ] + + task = session.create( + "Task", + { + "name": unique_name, + "parent": project, + "status": default_task_status, + "type": default_task_type, + }, + ) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(task) session.commit() @@ -320,9 +324,9 @@ def cleanup(): @pytest.fixture() def task(session): - '''Return same task for entire session.''' + """Return same task for entire session.""" # Tests/python_api/tasks/t1 - entity = session.get('Task', 'adb4ad6c-7679-11e2-8df2-f23c91df25eb') + entity = session.get("Task", "adb4ad6c-7679-11e2-8df2-f23c91df25eb") assert entity is not None return entity @@ -330,15 +334,13 @@ def task(session): @pytest.fixture() def new_scope(request, session, unique_name): - '''Return a new scope.''' - scope = session.create('Scope', { - 'name': unique_name - }) + """Return a new scope.""" + scope = session.create("Scope", {"name": unique_name}) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(scope) session.commit() @@ -349,16 +351,13 @@ def cleanup(): @pytest.fixture() def new_job(request, session, unique_name, user): - '''Return a new scope.''' - job = session.create('Job', { - 'type': 'api_job', - 'user': user - }) + """Return a new scope.""" + job = session.create("Job", {"type": "api_job", "user": user}) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(job) session.commit() @@ -369,12 +368,12 @@ def cleanup(): @pytest.fixture() def new_note(request, session, unique_name, new_task, user): - '''Return a new note attached to a task.''' + """Return a new note attached to a task.""" note = new_task.create_note(unique_name, user) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(note) session.commit() @@ -385,33 +384,32 @@ def cleanup(): @pytest.fixture() def new_asset_version_with_component(request, session, new_task, unique_name): - '''Return a new asset version with one component attached.''' - asset_parent = new_task['parent'] - asset_type = session.query('AssetType').first() - - asset = session.create('Asset', { - 'name': unique_name, - 'type': asset_type, - 'parent': asset_parent - }) - asset_version = session.create('AssetVersion', { - 'asset_id': asset['id'], - 'asset': asset, - 'task': new_task - }) - component = session.create('Component', { - 'name': unique_name, - 'version_id': asset_version['id'], - }) + """Return a new asset version with one component attached.""" + asset_parent = new_task["parent"] + asset_type = session.query("AssetType").first() + + asset = session.create( + "Asset", {"name": unique_name, "type": asset_type, "parent": asset_parent} + ) + asset_version = session.create( + "AssetVersion", {"asset_id": asset["id"], "asset": asset, "task": new_task} + ) + component = session.create( + "Component", + { + "name": unique_name, + "version_id": asset_version["id"], + }, + ) session.commit() def cleanup(): - '''Remove created entities.''' + """Remove created entities.""" session.delete(component) session.delete(asset_version) session.delete(asset) session.commit() - + request.addfinalizer(cleanup) return asset_version @@ -419,28 +417,24 @@ def cleanup(): @pytest.fixture() def new_asset_version(request, session, new_task, unique_name): - '''Return a new asset version.''' - asset_parent = new_task['parent'] - asset_type = session.query('AssetType').first() - - asset = session.create('Asset', { - 'name': unique_name, - 'type': asset_type, - 'parent': asset_parent - }) - asset_version = session.create('AssetVersion', { - 'asset_id': asset['id'], - 'asset': asset, - 'task': new_task - }) + """Return a new asset version.""" + asset_parent = new_task["parent"] + asset_type = session.query("AssetType").first() + + asset = session.create( + "Asset", {"name": unique_name, "type": asset_type, "parent": asset_parent} + ) + asset_version = session.create( + "AssetVersion", {"asset_id": asset["id"], "asset": asset, "task": new_task} + ) session.commit() def cleanup(): - '''Remove created entities.''' + """Remove created entities.""" session.delete(asset_version) session.delete(asset) session.commit() - + request.addfinalizer(cleanup) return asset_version @@ -448,12 +442,12 @@ def cleanup(): @pytest.fixture() def new_component(request, session, temporary_file): - '''Return a new component not in any location except origin.''' + """Return a new component not in any location except origin.""" component = session.create_component(temporary_file, location=None) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(component) session.commit() @@ -464,22 +458,18 @@ def cleanup(): @pytest.fixture() def new_container_component(request, session, temporary_directory): - '''Return a new container component not in any location except origin.''' - component = session.create('ContainerComponent') + """Return a new container component not in any location except origin.""" + component = session.create("ContainerComponent") # Add to special origin location so that it is possible to add to other # locations. - origin_location = session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component( - component, temporary_directory, recursive=False - ) + origin_location = session.get("Location", ftrack_api.symbol.ORIGIN_LOCATION_ID) + origin_location.add_component(component, temporary_directory, recursive=False) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(component) session.commit() @@ -490,12 +480,12 @@ def cleanup(): @pytest.fixture() def new_sequence_component(request, session, temporary_sequence): - '''Return a new sequence component not in any location except origin.''' + """Return a new sequence component not in any location except origin.""" component = session.create_component(temporary_sequence, location=None) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(component) session.commit() @@ -506,88 +496,53 @@ def cleanup(): @pytest.fixture def mocked_schemas(): - '''Return a list of mocked schemas.''' - return [{ - 'id': 'Foo', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' + """Return a list of mocked schemas.""" + return [ + { + "id": "Foo", + "type": "object", + "properties": { + "id": {"type": "string"}, + "string": {"type": "string"}, + "integer": {"type": "integer"}, + "number": {"type": "number"}, + "boolean": {"type": "boolean"}, + "bars": {"type": "array", "items": {"ref": "$Bar"}}, + "date": {"type": "string", "format": "date-time"}, }, - 'string': { - 'type': 'string' - }, - 'integer': { - 'type': 'integer' - }, - 'number': { - 'type': 'number' - }, - 'boolean': { - 'type': 'boolean' - }, - 'bars': { - 'type': 'array', - 'items': { - 'ref': '$Bar' - } - }, - 'date': { - 'type': 'string', - 'format': 'date-time' - } + "immutable": ["id"], + "primary_key": ["id"], + "required": ["id"], + "default_projections": ["id"], }, - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }, { - 'id': 'Bar', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'name': { - 'type': 'string' + { + "id": "Bar", + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string"}, + "computed_value": { + "type": "string", + }, }, - 'computed_value': { - 'type': 'string', - } + "computed": ["computed_value"], + "immutable": ["id"], + "primary_key": ["id"], + "required": ["id"], + "default_projections": ["id"], }, - 'computed': [ - 'computed_value' - ], - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }] + ] @pytest.fixture def mocked_schema_session(mocker, mocked_schemas): - '''Return a session instance with mocked schemas.''' - mocker.patch.object(ftrack_api.Session,'_load_schemas',return_value=mocked_schemas) + """Return a session instance with mocked schemas.""" + mocker.patch.object( + ftrack_api.Session, "_load_schemas", return_value=mocked_schemas + ) # Mock _configure_locations since it will fail if no location schemas # exist. - mocker.patch.object(ftrack_api.Session, '_configure_locations') + mocker.patch.object(ftrack_api.Session, "_configure_locations") patched_session = ftrack_api.Session() yield patched_session @@ -603,8 +558,10 @@ def __init__(self, *args, **kwargs): def run(self): self.exc = None try: - if hasattr(self, '_Thread__target'): - self.ret = self._Thread__target(*self._Thread__args, **self._Thread__kwargs) + if hasattr(self, "_Thread__target"): + self.ret = self._Thread__target( + *self._Thread__args, **self._Thread__kwargs + ) else: self.ret = self._target(*self._args, **self._kwargs) except BaseException as e: diff --git a/test/unit/entity/test_asset_version.py b/test/unit/entity/test_asset_version.py index 6960adef..e5690f06 100644 --- a/test/unit/entity/test_asset_version.py +++ b/test/unit/entity/test_asset_version.py @@ -7,12 +7,10 @@ def test_create_component(new_asset_version, temporary_file): - '''Create component on asset version.''' + """Create component on asset version.""" session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None - ) - assert component['version'] is new_asset_version + component = new_asset_version.create_component(temporary_file, location=None) + assert component["version"] is new_asset_version # Have to delete component before can delete asset version. session.delete(component) @@ -21,16 +19,17 @@ def test_create_component(new_asset_version, temporary_file): def test_create_component_specifying_different_version( new_asset_version, temporary_file ): - '''Create component on asset version ignoring specified version.''' + """Create component on asset version ignoring specified version.""" session = new_asset_version.session component = new_asset_version.create_component( - temporary_file, location=None, + temporary_file, + location=None, data=dict( - version_id='this-value-should-be-ignored', - version='this-value-should-be-overridden' - ) + version_id="this-value-should-be-ignored", + version="this-value-should-be-overridden", + ), ) - assert component['version'] is new_asset_version + assert component["version"] is new_asset_version # Have to delete component before can delete asset version. session.delete(component) @@ -38,24 +37,24 @@ def test_create_component_specifying_different_version( @pytest.mark.xfail( raises=ftrack_api.exception.ServerError, - reason='Testing environment does not support encoding' + reason="Testing environment does not support encoding", ) def test_encode_media(new_asset_version, video_path): - '''Encode media based on a file path + """Encode media based on a file path Encoded components should be associated with the version. - ''' + """ session = new_asset_version.session job = new_asset_version.encode_media(video_path) - assert job.entity_type == 'Job' + assert job.entity_type == "Job" - job_data = json.loads(job['data']) - assert 'output' in job_data - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] + job_data = json.loads(job["data"]) + assert "output" in job_data + assert len(job_data["output"]) + assert "component_id" in job_data["output"][0] - component_id = job_data['output'][0]['component_id'] - component = session.get('FileComponent', component_id) + component_id = job_data["output"][0]["component_id"] + component = session.get("FileComponent", component_id) # Component should be associated with the version. - assert component['version_id'] == new_asset_version['id'] + assert component["version_id"] == new_asset_version["id"] diff --git a/test/unit/entity/test_base.py b/test/unit/entity/test_base.py index aff456e2..152cccb9 100644 --- a/test/unit/entity/test_base.py +++ b/test/unit/entity/test_base.py @@ -5,7 +5,7 @@ def test_hash(project, task, user): - '''Entities can be hashed.''' + """Entities can be hashed.""" test_set = set() test_set.add(project) test_set.add(task) diff --git a/test/unit/entity/test_component.py b/test/unit/entity/test_component.py index acb74ad0..e57d9fbe 100644 --- a/test/unit/entity/test_component.py +++ b/test/unit/entity/test_component.py @@ -6,7 +6,7 @@ def test_get_availability(new_component): - '''Retrieve availability in locations.''' + """Retrieve availability in locations.""" session = new_component.session availability = new_component.get_availability() @@ -16,13 +16,9 @@ def test_get_availability(new_component): assert set(availability.values()) == set([0.0]) # Add to a location. - source_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() + source_location = session.query('Location where name is "ftrack.origin"').one() - target_location = session.query( - 'Location where name is "ftrack.unmanaged"' - ).one() + target_location = session.query('Location where name is "ftrack.unmanaged"').one() target_location.add_component(new_component, source_location) @@ -30,41 +26,38 @@ def test_get_availability(new_component): # Currently have to manually expire the related attribute. This should be # solved in future by bi-directional relationship updating. - del new_component['component_locations'] + del new_component["component_locations"] availability = new_component.get_availability() - target_availability = availability.pop(target_location['id']) + target_availability = availability.pop(target_location["id"]) assert target_availability == 100.0 # All other locations should still be 0. assert set(availability.values()) == set([0.0]) + @pytest.fixture() def image_path(): - '''Return a path to an image file.''' + """Return a path to an image file.""" image_path = os.path.abspath( os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' + os.path.dirname(__file__), "..", "..", "fixture", "media", "image.png" ) ) return image_path + def test_create_task_thumbnail(new_task, image_path): - '''Successfully create thumbnail component and set as task thumbnail.''' + """Successfully create thumbnail component and set as task thumbnail.""" component = new_task.create_thumbnail(image_path) component.session.commit() - assert component['id'] == new_task['thumbnail_id'] + assert component["id"] == new_task["thumbnail_id"] def test_create_thumbnail_with_data(task, image_path, unique_name): - '''Successfully create thumbnail component with custom data.''' - data = {'name': unique_name} + """Successfully create thumbnail component with custom data.""" + data = {"name": unique_name} component = task.create_thumbnail(image_path, data=data) component.session.commit() - assert component['name'] == unique_name + assert component["name"] == unique_name diff --git a/test/unit/entity/test_factory.py b/test/unit/entity/test_factory.py index 5d5a0baa..7c9ddeef 100644 --- a/test/unit/entity/test_factory.py +++ b/test/unit/entity/test_factory.py @@ -5,21 +5,19 @@ class CustomUser(ftrack_api.entity.base.Entity): - '''Represent custom user.''' + """Represent custom user.""" def test_extend_standard_factory_with_bases(session): - '''Successfully add extra bases to standard factory.''' + """Successfully add extra bases to standard factory.""" standard_factory = ftrack_api.entity.factory.StandardFactory() schemas = session._load_schemas(False) - user_schema = [ - schema for schema in schemas if schema['id'] == 'User' - ].pop() + user_schema = [schema for schema in schemas if schema["id"] == "User"].pop() user_class = standard_factory.create(user_schema, bases=[CustomUser]) session.types[user_class.entity_type] = user_class - user = session.query('User').first() + user = session.query("User").first() assert CustomUser in type(user).__mro__ diff --git a/test/unit/entity/test_job.py b/test/unit/entity/test_job.py index 52ddbda0..125e17bc 100644 --- a/test/unit/entity/test_job.py +++ b/test/unit/entity/test_job.py @@ -5,38 +5,30 @@ def test_create_job(session, user): - '''Create job.''' - job = session.create('Job', { - 'user': user - }) + """Create job.""" + job = session.create("Job", {"user": user}) assert job session.commit() - assert job['type'] == 'api_job' + assert job["type"] == "api_job" session.delete(job) session.commit() def test_create_job_with_valid_type(session, user): - '''Create job explicitly specifying valid type.''' - job = session.create('Job', { - 'user': user, - 'type': 'api_job' - }) + """Create job explicitly specifying valid type.""" + job = session.create("Job", {"user": user, "type": "api_job"}) assert job session.commit() - assert job['type'] == 'api_job' + assert job["type"] == "api_job" session.delete(job) session.commit() def test_create_job_using_faulty_type(session, user): - '''Fail to create job with faulty type.''' + """Fail to create job with faulty type.""" with pytest.raises(ValueError): - session.create('Job', { - 'user': user, - 'type': 'not-allowed-type' - }) + session.create("Job", {"user": user, "type": "not-allowed-type"}) diff --git a/test/unit/entity/test_location.py b/test/unit/entity/test_location.py index a9820a69..fee756be 100644 --- a/test/unit/entity/test_location.py +++ b/test/unit/entity/test_location.py @@ -17,51 +17,44 @@ import ftrack_api.symbol -class Base64ResourceIdentifierTransformer( - _transformer.ResourceIdentifierTransformer -): - '''Resource identifier transformer for test purposes. +class Base64ResourceIdentifierTransformer(_transformer.ResourceIdentifierTransformer): + """Resource identifier transformer for test purposes. Store resource identifier as base 64 encoded string. - ''' + """ def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. + """Return encoded *resource_identifier* for storing centrally. A mapping of *context* values may be supplied to guide the transformation. - ''' - return base64.encodebytes( - resource_identifier.encode() - ).decode('utf-8') + """ + return base64.encodebytes(resource_identifier.encode()).decode("utf-8") def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. + """Return decoded *resource_identifier* for use locally. A mapping of *context* values may be supplied to guide the transformation. - ''' - return base64.decodebytes( - resource_identifier.encode() - ).decode('utf-8') + """ + return base64.decodebytes(resource_identifier.encode()).decode("utf-8") @pytest.fixture() def new_location(request, session, unique_name, temporary_directory): - '''Return new managed location.''' - - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) + """Return new managed location.""" + location = session.create( + "Location", {"name": "test-location-{}".format(unique_name)} + ) session.commit() location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=os.path.join(temporary_directory, 'location') + prefix=os.path.join(temporary_directory, "location") ) location.structure = ftrack_api.structure.id.IdStructure() location.priority = 10 @@ -69,12 +62,10 @@ def new_location(request, session, unique_name, temporary_directory): session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" location_components = session.query( - 'ComponentLocation where location_id is {0}'.format( - location['id'] - ) + "ComponentLocation where location_id is {0}".format(location["id"]) ).all() # First auto-remove all components in location. @@ -95,28 +86,27 @@ def cleanup(): @pytest.fixture() def new_unmanaged_location(request, session, unique_name): - '''Return new unmanaged location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) + """Return new unmanaged location.""" + location = session.create( + "Location", {"name": "test-location-{}".format(unique_name)} + ) # TODO: Change to managed and use a temporary directory cleaned up after. ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedTestLocation' + location, + ftrack_api.entity.location.UnmanagedLocationMixin, + name="UnmanagedTestLocation", ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix="") location.structure = ftrack_api.structure.origin.OriginStructure() location.priority = 10 session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" location_components = session.query( - 'ComponentLocation where location_id is {0}'.format( - location['id'] - ) + "ComponentLocation where location_id is {0}".format(location["id"]) ).all() # First auto-remove all components in location. @@ -137,66 +127,55 @@ def cleanup(): @pytest.fixture() def origin_location(session): - '''Return origin location.''' + """Return origin location.""" return session.query('Location where name is "ftrack.origin"').one() @pytest.fixture() def server_location(session): - '''Return server location.''' - return session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) + """Return server location.""" + return session.get("Location", ftrack_api.symbol.SERVER_LOCATION_ID) @pytest.fixture() def server_image_component(request, session, server_location): image_file = os.path.abspath( os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' + os.path.dirname(__file__), "..", "..", "fixture", "media", "image.png" ) ) - component = session.create_component( - image_file, location=server_location - ) + component = session.create_component(image_file, location=server_location) def cleanup(): server_location.remove_component(component) + request.addfinalizer(cleanup) return component -@pytest.mark.parametrize('name', [ - pytest.param('named', id='named'), - pytest.param(None, id='unnamed') -]) +@pytest.mark.parametrize( + "name", [pytest.param("named", id="named"), pytest.param(None, id="unnamed")] +) def test_string_representation(session, name): - '''Return string representation.''' - location = session.create('Location', {'id': '1'}) + """Return string representation.""" + location = session.create("Location", {"id": "1"}) if name: - location['name'] = name + location["name"] = name assert str(location) == '' else: - assert str(location) == '' + assert str(location) == "" def test_add_components(new_location, origin_location, session, temporary_file): - '''Add components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) + """Add components.""" + component_a = session.create_component(temporary_file, location=None) + component_b = session.create_component(temporary_file, location=None) - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) + assert new_location.get_component_availabilities([component_a, component_b]) == [ + 0.0, + 0.0, + ] new_location.add_components( [component_a, component_b], [origin_location, origin_location] @@ -206,30 +185,26 @@ def test_add_components(new_location, origin_location, session, temporary_file): # Currently have to manually expire the related attribute. This should be # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] + del component_a["component_locations"] + del component_b["component_locations"] - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) + assert new_location.get_component_availabilities([component_a, component_b]) == [ + 100.0, + 100.0, + ] def test_add_components_from_single_location( new_location, origin_location, session, temporary_file ): - '''Add components from single location.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) + """Add components from single location.""" + component_a = session.create_component(temporary_file, location=None) + component_b = session.create_component(temporary_file, location=None) - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) + assert new_location.get_component_availabilities([component_a, component_b]) == [ + 0.0, + 0.0, + ] new_location.add_components([component_a, component_b], origin_location) @@ -237,24 +212,24 @@ def test_add_components_from_single_location( # Currently have to manually expire the related attribute. This should be # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] + del component_a["component_locations"] + del component_b["component_locations"] - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) + assert new_location.get_component_availabilities([component_a, component_b]) == [ + 100.0, + 100.0, + ] def test_add_components_with_mismatching_sources(new_location, new_component): - '''Fail to add components when sources mismatched.''' + """Fail to add components when sources mismatched.""" with pytest.raises(ValueError): new_location.add_components([new_component], []) def test_add_components_with_undefined_structure(new_location, mocker): - '''Fail to add components when location structure undefined.''' - mocker.patch.object(new_location, 'structure', None) + """Fail to add components when location structure undefined.""" + mocker.patch.object(new_location, "structure", None) with pytest.raises(ftrack_api.exception.LocationError): new_location.add_components([], []) @@ -263,12 +238,10 @@ def test_add_components_with_undefined_structure(new_location, mocker): def test_add_components_already_in_location( session, temporary_file, new_location, new_component, origin_location ): - '''Fail to add components already in location.''' + """Fail to add components already in location.""" new_location.add_component(new_component, origin_location) - another_new_component = session.create_component( - temporary_file, location=None - ) + another_new_component = session.create_component(temporary_file, location=None) with pytest.raises(ftrack_api.exception.ComponentInLocationError): new_location.add_components( @@ -279,14 +252,12 @@ def test_add_components_already_in_location( def test_add_component_when_data_already_exists( new_location, new_component, origin_location ): - '''Fail to add component when data already exists.''' + """Fail to add component when data already exists.""" # Inject pre-existing data on disk. - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) + resource_identifier = new_location.structure.get_resource_identifier(new_component) container = new_location.accessor.get_container(resource_identifier) new_location.accessor.make_container(container) - data = new_location.accessor.open(resource_identifier, 'w') + data = new_location.accessor.open(resource_identifier, "w") data.close() with pytest.raises(ftrack_api.exception.LocationError): @@ -296,8 +267,8 @@ def test_add_component_when_data_already_exists( def test_add_component_missing_source_accessor( new_location, new_component, origin_location, mocker ): - '''Fail to add component when source is missing accessor.''' - mocker.patch.object(origin_location, 'accessor', None) + """Fail to add component when source is missing accessor.""" + mocker.patch.object(origin_location, "accessor", None) with pytest.raises(ftrack_api.exception.LocationError): new_location.add_component(new_component, origin_location) @@ -306,8 +277,8 @@ def test_add_component_missing_source_accessor( def test_add_component_missing_target_accessor( new_location, new_component, origin_location, mocker ): - '''Fail to add component when target is missing accessor.''' - mocker.patch.object(new_location, 'accessor', None) + """Fail to add component when target is missing accessor.""" + mocker.patch.object(new_location, "accessor", None) with pytest.raises(ftrack_api.exception.LocationError): new_location.add_component(new_component, origin_location) @@ -316,119 +287,84 @@ def test_add_component_missing_target_accessor( def test_add_container_component( new_container_component, new_location, origin_location ): - '''Add container component.''' + """Add container component.""" new_location.add_component(new_container_component, origin_location) - assert ( - new_location.get_component_availability(new_container_component) - == 100.0 - ) + assert new_location.get_component_availability(new_container_component) == 100.0 def test_add_sequence_component_recursively( new_sequence_component, new_location, origin_location ): - '''Add sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) + """Add sequence component recursively.""" + new_location.add_component(new_sequence_component, origin_location, recursive=True) - assert ( - new_location.get_component_availability(new_sequence_component) - == 100.0 - ) + assert new_location.get_component_availability(new_sequence_component) == 100.0 def test_add_sequence_component_non_recursively( new_sequence_component, new_location, origin_location ): - '''Add sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) + """Add sequence component non recursively.""" + new_location.add_component(new_sequence_component, origin_location, recursive=False) - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) + assert new_location.get_component_availability(new_sequence_component) == 0.0 -def test_remove_components( - session, new_location, origin_location, temporary_file -): - '''Remove components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) +def test_remove_components(session, new_location, origin_location, temporary_file): + """Remove components.""" + component_a = session.create_component(temporary_file, location=None) + component_b = session.create_component(temporary_file, location=None) new_location.add_components([component_a, component_b], origin_location) - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) + assert new_location.get_component_availabilities([component_a, component_b]) == [ + 100.0, + 100.0, + ] - new_location.remove_components([ - component_a, component_b - ]) + new_location.remove_components([component_a, component_b]) # Recalculate availability. # Currently have to manually expire the related attribute. This should be # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] + del component_a["component_locations"] + del component_b["component_locations"] - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) + assert new_location.get_component_availabilities([component_a, component_b]) == [ + 0.0, + 0.0, + ] def test_remove_sequence_component_recursively( new_sequence_component, new_location, origin_location ): - '''Remove sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) + """Remove sequence component recursively.""" + new_location.add_component(new_sequence_component, origin_location, recursive=True) - new_location.remove_component( - new_sequence_component, recursive=True - ) + new_location.remove_component(new_sequence_component, recursive=True) - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) + assert new_location.get_component_availability(new_sequence_component) == 0.0 def test_remove_sequence_component_non_recursively( new_sequence_component, new_location, origin_location ): - '''Remove sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) + """Remove sequence component non recursively.""" + new_location.add_component(new_sequence_component, origin_location, recursive=False) - new_location.remove_component( - new_sequence_component, recursive=False - ) + new_location.remove_component(new_sequence_component, recursive=False) - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) + assert new_location.get_component_availability(new_sequence_component) == 0.0 def test_remove_component_missing_accessor( new_location, new_component, origin_location, mocker ): - '''Fail to remove component when location is missing accessor.''' + """Fail to remove component when location is missing accessor.""" new_location.add_component(new_component, origin_location) - mocker.patch.object(new_location, 'accessor', None) + mocker.patch.object(new_location, "accessor", None) with pytest.raises(ftrack_api.exception.LocationError): new_location.remove_component(new_component) @@ -437,12 +373,12 @@ def test_remove_component_missing_accessor( def test_resource_identifier_transformer( new_component, new_unmanaged_location, origin_location, mocker ): - '''Transform resource identifier.''' + """Transform resource identifier.""" session = new_unmanaged_location.session transformer = Base64ResourceIdentifierTransformer(session) mocker.patch.object( - new_unmanaged_location, 'resource_identifier_transformer', transformer + new_unmanaged_location, "resource_identifier_transformer", transformer ) new_unmanaged_location.add_component(new_component, origin_location) @@ -450,10 +386,9 @@ def test_resource_identifier_transformer( original_resource_identifier = origin_location.get_resource_identifier( new_component ) - assert ( - new_component['component_locations'][0]['resource_identifier'] - == base64.encodebytes(original_resource_identifier.encode()).decode('utf-8') - ) + assert new_component["component_locations"][0][ + "resource_identifier" + ] == base64.encodebytes(original_resource_identifier.encode()).decode("utf-8") assert ( new_unmanaged_location.get_resource_identifier(new_component) @@ -462,11 +397,9 @@ def test_resource_identifier_transformer( def test_get_filesystem_path(new_component, new_location, origin_location): - '''Retrieve filesystem path.''' + """Retrieve filesystem path.""" new_location.add_component(new_component, origin_location) - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) + resource_identifier = new_location.structure.get_resource_identifier(new_component) expected = os.path.normpath( os.path.join(new_location.accessor.prefix, resource_identifier) ) @@ -474,48 +407,39 @@ def test_get_filesystem_path(new_component, new_location, origin_location): def test_get_context(new_component, new_location, origin_location): - '''Retrieve context for component.''' - resource_identifier = origin_location.get_resource_identifier( - new_component - ) + """Retrieve context for component.""" + resource_identifier = origin_location.get_resource_identifier(new_component) context = new_location._get_context(new_component, origin_location) - assert context == { - 'source_resource_identifier': resource_identifier - } + assert context == {"source_resource_identifier": resource_identifier} def test_get_context_for_component_not_in_source(new_component, new_location): - '''Retrieve context for component not in source location.''' + """Retrieve context for component not in source location.""" context = new_location._get_context(new_component, new_location) assert context == {} def test_data_transfer(session, new_location, origin_location): - '''Transfer a real file and make sure it is identical.''' + """Transfer a real file and make sure it is identical.""" video_file = os.path.abspath( os.path.join( os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'colour_wheel.mov' + "..", + "..", + "fixture", + "media", + "colour_wheel.mov", ) ) - component = session.create_component( - video_file, location=new_location - ) + component = session.create_component(video_file, location=new_location) new_video_file = new_location.get_filesystem_path(component) assert filecmp.cmp(video_file, new_video_file) def test_get_thumbnail_url(server_location, server_image_component): - '''Test download a thumbnail image from server location''' - thumbnail_url = server_location.get_thumbnail_url( - server_image_component, - size=10 - ) + """Test download a thumbnail image from server location""" + thumbnail_url = server_location.get_thumbnail_url(server_image_component, size=10) assert thumbnail_url response = requests.get(thumbnail_url) @@ -524,25 +448,24 @@ def test_get_thumbnail_url(server_location, server_image_component): image_file = os.path.abspath( os.path.join( os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image-resized-10.png' + "..", + "..", + "fixture", + "media", + "image-resized-10.png", ) ) - expected_image_contents = open( - image_file, 'rb' - ).read() + expected_image_contents = open(image_file, "rb").read() assert response.content == expected_image_contents # meta fixture to parametrise location fixtures # https://github.com/pytest-dev/pytest/issues/349 -@pytest.fixture(params=[ - 'new_location', - 'new_unmanaged_location', +@pytest.fixture( + params=[ + "new_location", + "new_unmanaged_location", ] ) def multi_location(request): @@ -552,11 +475,7 @@ def multi_location(request): def test_transfer_component_from_server( server_location, server_image_component, multi_location ): - '''Test add component to new location from server location''' + """Test add component to new location from server location""" multi_location.add_component(server_image_component, server_location) - assert ( - multi_location.get_component_availability(server_image_component) - == 100.0 - ) - + assert multi_location.get_component_availability(server_image_component) == 100.0 diff --git a/test/unit/entity/test_metadata.py b/test/unit/entity/test_metadata.py index 3a81fdbe..9ab86ad9 100644 --- a/test/unit/entity/test_metadata.py +++ b/test/unit/entity/test_metadata.py @@ -7,129 +7,114 @@ def test_query_metadata(new_project): - '''Query metadata.''' + """Query metadata.""" session = new_project.session metadata_key = uuid.uuid1().hex metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value + new_project["metadata"][metadata_key] = metadata_value session.commit() - results = session.query( - 'Project where metadata.key is {0}'.format(metadata_key) - ) + results = session.query("Project where metadata.key is {0}".format(metadata_key)) assert len(results) == 1 - assert new_project['id'] == results[0]['id'] + assert new_project["id"] == results[0]["id"] results = session.query( - 'Project where metadata.value is {0}'.format(metadata_value) + "Project where metadata.value is {0}".format(metadata_value) ) assert len(results) == 1 - assert new_project['id'] == results[0]['id'] + assert new_project["id"] == results[0]["id"] results = session.query( - 'Project where metadata.key is {0} and ' - 'metadata.value is {1}'.format(metadata_key, metadata_value) + "Project where metadata.key is {0} and " + "metadata.value is {1}".format(metadata_key, metadata_value) ) assert len(results) == 1 - assert new_project['id'] == results[0]['id'] + assert new_project["id"] == results[0]["id"] def test_set_get_metadata_from_different_sessions(new_project): - '''Get and set metadata using different sessions.''' + """Get and set metadata using different sessions.""" session = new_project.session metadata_key = uuid.uuid1().hex metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value + new_project["metadata"][metadata_key] = metadata_value session.commit() new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] + project = new_session.query("Project where id is {0}".format(new_project["id"]))[0] - assert project['metadata'][metadata_key] == metadata_value + assert project["metadata"][metadata_key] == metadata_value - project['metadata'][metadata_key] = uuid.uuid1().hex + project["metadata"][metadata_key] = uuid.uuid1().hex new_session.commit() new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(project['id']) - )[0] + project = new_session.query("Project where id is {0}".format(project["id"]))[0] - assert project['metadata'][metadata_key] != metadata_value + assert project["metadata"][metadata_key] != metadata_value def test_get_set_multiple_metadata(new_project): - '''Get and set multiple metadata.''' + """Get and set multiple metadata.""" session = new_project.session - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } + new_project["metadata"] = {"key1": "value1", "key2": "value2"} session.commit() - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) + assert set(new_project["metadata"].keys()) == set(["key1", "key2"]) new_session = ftrack_api.Session() - retrieved = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] + retrieved = new_session.query("Project where id is {0}".format(new_project["id"]))[ + 0 + ] - assert set(retrieved['metadata'].keys()) == set(['key1', 'key2']) + assert set(retrieved["metadata"].keys()) == set(["key1", "key2"]) def test_metadata_parent_type_remains_in_schema_id_format(session, new_project): - '''Metadata parent_type remains in schema id format post commit.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) + """Metadata parent_type remains in schema id format post commit.""" + entity = session.create( + "Metadata", + { + "key": "key", + "value": "value", + "parent_type": new_project.entity_type, + "parent_id": new_project["id"], + }, + ) session.commit() - assert entity['parent_type'] == new_project.entity_type + assert entity["parent_type"] == new_project.entity_type def test_set_metadata_twice(new_project): - '''Set metadata twice in a row.''' + """Set metadata twice in a row.""" session = new_project.session - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } + new_project["metadata"] = {"key1": "value1", "key2": "value2"} session.commit() - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) + assert set(new_project["metadata"].keys()) == set(["key1", "key2"]) - new_project['metadata'] = { - 'key3': 'value3', - 'key4': 'value4' - } + new_project["metadata"] = {"key3": "value3", "key4": "value4"} session.commit() def test_set_same_metadata_on_retrieved_entity(new_project): - '''Set same metadata on retrieved entity.''' + """Set same metadata on retrieved entity.""" session = new_project.session - new_project['metadata'] = { - 'key1': 'value1' - } + new_project["metadata"] = {"key1": "value1"} session.commit() - project = session.get('Project', new_project['id']) + project = session.get("Project", new_project["id"]) - project['metadata'] = { - 'key1': 'value1' - } + project["metadata"] = {"key1": "value1"} session.commit() diff --git a/test/unit/entity/test_note.py b/test/unit/entity/test_note.py index 43971f72..cb6d2af8 100644 --- a/test/unit/entity/test_note.py +++ b/test/unit/entity/test_note.py @@ -9,35 +9,34 @@ @flaky(max_runs=2, min_passes=1) def test_create_reply(session, new_note, user, unique_name): - '''Create reply to a note.''' - reply_text = 'My reply on note' + """Create reply to a note.""" + reply_text = "My reply on note" new_note.create_reply(reply_text, user) session.commit() - assert len(new_note['replies']) == 1 + assert len(new_note["replies"]) == 1 - assert reply_text == new_note['replies'][0]['content'] + assert reply_text == new_note["replies"][0]["content"] def test_create_note_on_entity(session, new_task, user, unique_name): - '''Create note attached to an entity.''' + """Create note attached to an entity.""" note = new_task.create_note(unique_name, user) session.commit() session.reset() retrieved_task = session.get(*ftrack_api.inspection.identity(new_task)) - assert len(retrieved_task['notes']) == 1 - assert ( - ftrack_api.inspection.identity(retrieved_task['notes'][0]) - == ftrack_api.inspection.identity(note) - ) + assert len(retrieved_task["notes"]) == 1 + assert ftrack_api.inspection.identity( + retrieved_task["notes"][0] + ) == ftrack_api.inspection.identity(note) def test_create_note_on_entity_specifying_recipients( session, new_task, user, unique_name, new_user ): - '''Create note with specified recipients attached to an entity.''' + """Create note with specified recipients attached to an entity.""" recipient = new_user note = new_task.create_note(unique_name, user, recipients=[recipient]) session.commit() @@ -47,10 +46,10 @@ def test_create_note_on_entity_specifying_recipients( # Note: The calling user is automatically added server side so there will be # 2 recipients. - assert len(retrieved_note['recipients']) == 2 + assert len(retrieved_note["recipients"]) == 2 specified_recipient_present = False - for entry in retrieved_note['recipients']: - if entry['resource_id'] == recipient['id']: + for entry in retrieved_note["recipients"]: + if entry["resource_id"] == recipient["id"]: specified_recipient_present = True break @@ -60,11 +59,11 @@ def test_create_note_on_entity_specifying_recipients( def test_create_note_on_entity_specifying_category( session, new_task, user, unique_name ): - '''Create note with specified category attached to an entity.''' - category = session.query('NoteCategory').first() + """Create note with specified category attached to an entity.""" + category = session.query("NoteCategory").first() note = new_task.create_note(unique_name, user, category=category) session.commit() session.reset() retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - assert retrieved_note['category']['id'] == category['id'] + assert retrieved_note["category"]["id"] == category["id"] diff --git a/test/unit/entity/test_project_schema.py b/test/unit/entity/test_project_schema.py index 2f46b500..7653e840 100644 --- a/test/unit/entity/test_project_schema.py +++ b/test/unit/entity/test_project_schema.py @@ -6,52 +6,70 @@ import pytest -@pytest.mark.parametrize('schema, expected', [ - pytest.param('Task', [ - 'Not started', 'In progress', 'Awaiting approval', 'Approved' - ], id='task'), - pytest.param('Shot', [ - 'Normal', 'Omitted', 'On Hold' - ], id='shot'), - pytest.param('AssetVersion', [ - 'Approved', 'Pending' - ], id='asset version'), - pytest.param('AssetBuild', [ - 'Normal', 'Omitted', 'On Hold' - ], id='asset_build'), - pytest.param( - 'Invalid', ValueError,id='invalid') -]) +@pytest.mark.parametrize( + "schema, expected", + [ + pytest.param( + "Task", + ["Not started", "In progress", "Awaiting approval", "Approved"], + id="task", + ), + pytest.param("Shot", ["Normal", "Omitted", "On Hold"], id="shot"), + pytest.param("AssetVersion", ["Approved", "Pending"], id="asset version"), + pytest.param("AssetBuild", ["Normal", "Omitted", "On Hold"], id="asset_build"), + pytest.param("Invalid", ValueError, id="invalid"), + ], +) def test_get_statuses(project_schema, schema, expected): - '''Retrieve statuses for schema and optional type.''' + """Retrieve statuses for schema and optional type.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): project_schema.get_statuses(schema) else: statuses = project_schema.get_statuses(schema) - status_names = [status['name'] for status in statuses] + status_names = [status["name"] for status in statuses] assert sorted(status_names) == sorted(expected) -@pytest.mark.parametrize('schema, expected', [ - pytest.param('Task', [ - 'Generic', 'Animation', 'Modeling', 'Previz', 'Lookdev', 'Hair', - 'Cloth', 'FX', 'Lighting', 'Compositing', 'Tracking', 'Rigging', - 'test 1', 'test type 2' - ], id='task'), - pytest.param('AssetBuild', [ - 'Character', 'Prop', 'Environment', 'Matte Painting' - ], id='asset build'), - pytest.param('Invalid', ValueError, id='invalid') -]) +@pytest.mark.parametrize( + "schema, expected", + [ + pytest.param( + "Task", + [ + "Generic", + "Animation", + "Modeling", + "Previz", + "Lookdev", + "Hair", + "Cloth", + "FX", + "Lighting", + "Compositing", + "Tracking", + "Rigging", + "test 1", + "test type 2", + ], + id="task", + ), + pytest.param( + "AssetBuild", + ["Character", "Prop", "Environment", "Matte Painting"], + id="asset build", + ), + pytest.param("Invalid", ValueError, id="invalid"), + ], +) def test_get_types(project_schema, schema, expected): - '''Retrieve types for schema.''' + """Retrieve types for schema.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): project_schema.get_types(schema) else: types = project_schema.get_types(schema) - type_names = [type_['name'] for type_ in types] + type_names = [type_["name"] for type_ in types] assert sorted(type_names) == sorted(expected) diff --git a/test/unit/entity/test_scopes.py b/test/unit/entity/test_scopes.py index 1a5afe70..6144abe7 100644 --- a/test/unit/entity/test_scopes.py +++ b/test/unit/entity/test_scopes.py @@ -3,20 +3,20 @@ def test_add_remove_and_query_scopes_for_tasks(session, new_task, new_scope): - '''Add, remove and query scopes for task.''' - query_string = 'Task where scopes.name is {0}'.format(new_scope['name']) + """Add, remove and query scopes for task.""" + query_string = "Task where scopes.name is {0}".format(new_scope["name"]) tasks = session.query(query_string) assert len(tasks) == 0 - new_task['scopes'].append(new_scope) + new_task["scopes"].append(new_scope) session.commit() tasks = session.query(query_string) assert len(tasks) == 1 and tasks[0] == new_task - new_task['scopes'].remove(new_scope) + new_task["scopes"].remove(new_scope) session.commit() tasks = session.query(query_string) diff --git a/test/unit/entity/test_user.py b/test/unit/entity/test_user.py index c854fe90..9a3ab0c3 100644 --- a/test/unit/entity/test_user.py +++ b/test/unit/entity/test_user.py @@ -3,43 +3,40 @@ from past.builtins import long + def test_force_start_timer(new_user, task): - '''Successfully force starting a timer when another timer is running.''' + """Successfully force starting a timer when another timer is running.""" first_timer = new_user.start_timer(context=task) second_timer = new_user.start_timer(context=task, force=True) - assert first_timer['id'] - assert second_timer['id'] - assert first_timer['id'] != second_timer['id'] + assert first_timer["id"] + assert second_timer["id"] + assert first_timer["id"] != second_timer["id"] def test_timer_creates_timelog(new_user, task, unique_name): - '''Successfully create time log when stopping timer. + """Successfully create time log when stopping timer. A timer which was immediately stopped should have a duration less than a minute. - ''' - comment = 'comment' + unique_name - timer = new_user.start_timer( - context=task, - name=unique_name, - comment=comment - ) - timer_start = timer['start'] + """ + comment = "comment" + unique_name + timer = new_user.start_timer(context=task, name=unique_name, comment=comment) + timer_start = timer["start"] timelog = new_user.stop_timer() - assert timelog['user_id'] == new_user['id'] - assert timelog['context_id']== task['id'] - assert timelog['name'] == unique_name - assert timelog['comment'] == comment - assert timelog['start'] == timer_start - assert isinstance(timelog['duration'], (int, long, float)) - assert timelog['duration'] < 60 + assert timelog["user_id"] == new_user["id"] + assert timelog["context_id"] == task["id"] + assert timelog["name"] == unique_name + assert timelog["comment"] == comment + assert timelog["start"] == timer_start + assert isinstance(timelog["duration"], (int, long, float)) + assert timelog["duration"] < 60 def test_reset_user_api_key(new_user): - '''Test resetting of api keys.''' + """Test resetting of api keys.""" api_keys = list() for i in range(0, 10): @@ -47,4 +44,3 @@ def test_reset_user_api_key(new_user): # make sure all api keys are unique assert len(set(api_keys)) == 10 - diff --git a/test/unit/event/event_hub_server_heartbeat.py b/test/unit/event/event_hub_server_heartbeat.py index 446b6ed0..865c3bd6 100644 --- a/test/unit/event/event_hub_server_heartbeat.py +++ b/test/unit/event/event_hub_server_heartbeat.py @@ -14,17 +14,17 @@ def callback(event): - '''Track received messages.''' - counter = event['data']['counter'] + """Track received messages.""" + counter = event["data"]["counter"] RECEIVED.append(counter) - print('Received message {0} ({1} in total)'.format(counter, len(RECEIVED))) + print("Received message {0} ({1} in total)".format(counter, len(RECEIVED))) def main(arguments=None): - '''Publish and receive heartbeat test.''' + """Publish and receive heartbeat test.""" parser = argparse.ArgumentParser() - parser.add_argument('mode', choices=['publish', 'subscribe']) - parser.add_argument('topic') + parser.add_argument("mode", choices=["publish", "subscribe"]) + parser.add_argument("topic") namespace = parser.parse_args(arguments) logging.basicConfig(level=logging.INFO) @@ -34,47 +34,44 @@ def main(arguments=None): message_count = 100 sleep_time_per_message = 1 - if namespace.mode == 'publish': + if namespace.mode == "publish": max_atempts = 100 retry_interval = 0.1 atempt = 0 while not session.event_hub.connected: - print ( - 'Session is not yet connected to event hub, sleeping for 0.1s' - ) + print("Session is not yet connected to event hub, sleeping for 0.1s") time.sleep(retry_interval) atempt = atempt + 1 if atempt > max_atempts: raise Exception( - 'Unable to connect to server within {0} seconds'.format( + "Unable to connect to server within {0} seconds".format( max_atempts * retry_interval ) ) - print('Sending {0} messages...'.format(message_count)) + print("Sending {0} messages...".format(message_count)) for counter in range(1, message_count + 1): session.event_hub.publish( Event(topic=namespace.topic, data=dict(counter=counter)) ) - print('Sent message {0}'.format(counter)) + print("Sent message {0}".format(counter)) if counter < message_count: time.sleep(sleep_time_per_message) - elif namespace.mode == 'subscribe': - session.event_hub.subscribe('topic={0}'.format(namespace.topic), callback) + elif namespace.mode == "subscribe": + session.event_hub.subscribe("topic={0}".format(namespace.topic), callback) session.event_hub.wait( - duration=( - ((message_count - 1) * sleep_time_per_message) + 15 - ) + duration=(((message_count - 1) * sleep_time_per_message) + 15) ) if len(RECEIVED) != message_count: print( - '>> Failed to receive all messages. Dropped {0} <<' - .format(message_count - len(RECEIVED)) + ">> Failed to receive all messages. Dropped {0} <<".format( + message_count - len(RECEIVED) + ) ) return False @@ -84,7 +81,7 @@ def main(arguments=None): return True -if __name__ == '__main__': +if __name__ == "__main__": result = main(sys.argv[1:]) if not result: raise SystemExit(1) diff --git a/test/unit/event/test_base.py b/test/unit/event/test_base.py index d343e238..d8319731 100644 --- a/test/unit/event/test_base.py +++ b/test/unit/event/test_base.py @@ -5,16 +5,14 @@ def test_string_representation(): - '''String representation.''' - event = ftrack_api.event.base.Event('test', id='some-id') - assert ( - str(event._data) in str(event) - ) + """String representation.""" + event = ftrack_api.event.base.Event("test", id="some-id") + assert str(event._data) in str(event) def test_stop(): - '''Set stopped flag on event.''' - event = ftrack_api.event.base.Event('test', id='some-id') + """Set stopped flag on event.""" + event = ftrack_api.event.base.Event("test", id="some-id") assert event.is_stopped() is False @@ -23,8 +21,8 @@ def test_stop(): def test_is_stopped(): - '''Report stopped status of event.''' - event = ftrack_api.event.base.Event('test', id='some-id') + """Report stopped status of event.""" + event = ftrack_api.event.base.Event("test", id="some-id") assert event.is_stopped() is False diff --git a/test/unit/event/test_expression.py b/test/unit/event/test_expression.py index 0b711ed1..9b9bbea5 100644 --- a/test/unit/event/test_expression.py +++ b/test/unit/event/test_expression.py @@ -6,50 +6,54 @@ import pytest -from ftrack_api.event.expression import ( - Expression, All, Any, Not, Condition, Parser -) +from ftrack_api.event.expression import Expression, All, Any, Not, Condition, Parser from ftrack_api.exception import ParseError @pytest.fixture() def candidate(): - '''Return common candidate to test expressions against.''' - return { - 'id': 10, - 'name': 'value', - 'change': { - 'name': 'value', - 'new_value': 10 - } - } - - -@pytest.mark.parametrize('expression, expected', [ - pytest.param('', Expression(), marks=pytest.mark.xfail, id='Empty Expression'), - pytest.param('invalid', ParseError, id='Invalid Expression'), - pytest.param('key=value nor other=value', ParseError, id='Invalid Conjunction'), - pytest.param('key=value', Condition('key', operator.eq, 'value'), id='Basic Condition'), - pytest.param('key="value"', Condition('key', operator.eq, 'value'), id='Basic Quoted Condition'), - pytest.param( - 'a=b and ((c=d or e!=f) and not g.h > 10)', - All([ - Condition('a', operator.eq, 'b'), - All([ - Any([ - Condition('c', operator.eq, 'd'), - Condition('e', operator.ne, 'f') - ]), - Not( - Condition('g.h', operator.gt, 10) - ) - ]) - ]), - id='Complex Condition' - ) -]) + """Return common candidate to test expressions against.""" + return {"id": 10, "name": "value", "change": {"name": "value", "new_value": 10}} + + +@pytest.mark.parametrize( + "expression, expected", + [ + pytest.param("", Expression(), marks=pytest.mark.xfail, id="Empty Expression"), + pytest.param("invalid", ParseError, id="Invalid Expression"), + pytest.param("key=value nor other=value", ParseError, id="Invalid Conjunction"), + pytest.param( + "key=value", Condition("key", operator.eq, "value"), id="Basic Condition" + ), + pytest.param( + 'key="value"', + Condition("key", operator.eq, "value"), + id="Basic Quoted Condition", + ), + pytest.param( + "a=b and ((c=d or e!=f) and not g.h > 10)", + All( + [ + Condition("a", operator.eq, "b"), + All( + [ + Any( + [ + Condition("c", operator.eq, "d"), + Condition("e", operator.ne, "f"), + ] + ), + Not(Condition("g.h", operator.gt, 10)), + ] + ), + ] + ), + id="Complex Condition", + ), + ], +) def test_parser_parse(expression, expected): - '''Parse expression into Expression instances.''' + """Parse expression into Expression instances.""" parser = Parser() if inspect.isclass(expected) and issubclass(expected, Exception): @@ -59,91 +63,119 @@ def test_parser_parse(expression, expected): assert str(parser.parse(expression)) == str(expected) -@pytest.mark.parametrize('expression, expected', [ - pytest.param(Expression(), '', id='Expressions'), - pytest.param(All([Expression(), Expression()]), ' ]>', id='All'), - pytest.param(Any([Expression(), Expression()]), ' ]>', id='Any'), - pytest.param(Not(Expression()), '>', id='Not'), - pytest.param(Condition('key', '=', 'value'), '', id='Condition') -]) +@pytest.mark.parametrize( + "expression, expected", + [ + pytest.param(Expression(), "", id="Expressions"), + pytest.param( + All([Expression(), Expression()]), + " ]>", + id="All", + ), + pytest.param( + Any([Expression(), Expression()]), + " ]>", + id="Any", + ), + pytest.param(Not(Expression()), ">", id="Not"), + pytest.param( + Condition("key", "=", "value"), "", id="Condition" + ), + ], +) def test_string_representation(expression, expected): - '''String representation of expression.''' + """String representation of expression.""" assert str(expression) == expected -@pytest.mark.parametrize('expression, expected', [ - # Expression - pytest.param(Expression(), True, id='Expression-always matches'), - - # All - pytest.param(All(), True, id='All-no expressions always matches'), - pytest.param(All([Expression(), Expression()]), True, id='All-all match'), - pytest.param(All([Expression(), Condition('test', operator.eq, 'value')]), False, id='All-not all match'), - - # Any - pytest.param(Any(), False, id='Any-no expressions never matches'), - pytest.param(Any([Expression(), Condition('test', operator.eq, 'value')]), True, id='Any-some match'), - pytest.param(Any([ - Condition('test', operator.eq, 'value'), - Condition('other', operator.eq, 'value') - ]), False, id='Any-none match'), - - # Not - pytest.param(Not(Expression()), False, id='Not-invert positive match'), - pytest.param(Not(Not(Expression())), True, id='Not-double negative is positive match') -]) +@pytest.mark.parametrize( + "expression, expected", + [ + # Expression + pytest.param(Expression(), True, id="Expression-always matches"), + # All + pytest.param(All(), True, id="All-no expressions always matches"), + pytest.param(All([Expression(), Expression()]), True, id="All-all match"), + pytest.param( + All([Expression(), Condition("test", operator.eq, "value")]), + False, + id="All-not all match", + ), + # Any + pytest.param(Any(), False, id="Any-no expressions never matches"), + pytest.param( + Any([Expression(), Condition("test", operator.eq, "value")]), + True, + id="Any-some match", + ), + pytest.param( + Any( + [ + Condition("test", operator.eq, "value"), + Condition("other", operator.eq, "value"), + ] + ), + False, + id="Any-none match", + ), + # Not + pytest.param(Not(Expression()), False, id="Not-invert positive match"), + pytest.param( + Not(Not(Expression())), True, id="Not-double negative is positive match" + ), + ], +) def test_match(expression, candidate, expected): - '''Determine if candidate matches expression.''' + """Determine if candidate matches expression.""" assert expression.match(candidate) is expected def parametrize_test_condition_match(metafunc): - '''Parametrize condition_match tests.''' + """Parametrize condition_match tests.""" identifiers = [] data = [] matrix = { # Operator, match, no match operator.eq: { - 'match': 10, 'no-match': 20, - 'wildcard-match': 'valu*', 'wildcard-no-match': 'values*' + "match": 10, + "no-match": 20, + "wildcard-match": "valu*", + "wildcard-no-match": "values*", }, - operator.ne: {'match': 20, 'no-match': 10}, - operator.ge: {'match': 10, 'no-match': 20}, - operator.le: {'match': 10, 'no-match': 0}, - operator.gt: {'match': 0, 'no-match': 10}, - operator.lt: {'match': 20, 'no-match': 10} + operator.ne: {"match": 20, "no-match": 10}, + operator.ge: {"match": 10, "no-match": 20}, + operator.le: {"match": 10, "no-match": 0}, + operator.gt: {"match": 0, "no-match": 10}, + operator.lt: {"match": 20, "no-match": 10}, } for operator_function, values in matrix.items(): for value_label, value in values.items(): - if value_label.startswith('wildcard'): - key_options = { - 'plain': 'name', - 'nested': 'change.name' - } + if value_label.startswith("wildcard"): + key_options = {"plain": "name", "nested": "change.name"} else: - key_options = { - 'plain': 'id', - 'nested': 'change.new_value' - } + key_options = {"plain": "id", "nested": "change.new_value"} for key_label, key in key_options.items(): - identifier = '{} operator {} key {}'.format( + identifier = "{} operator {} key {}".format( operator_function.__name__, key_label, value_label ) - data.append(pytest.param( - key, operator_function, value, - 'no-match' not in value_label, id=identifier - )) + data.append( + pytest.param( + key, + operator_function, + value, + "no-match" not in value_label, + id=identifier, + ) + ) - metafunc.parametrize( - 'key, operator, value, expected', data - ) + metafunc.parametrize("key, operator, value, expected", data) def test_condition_match(key, operator, value, candidate, expected): - '''Determine if candidate matches condition expression.''' + """Determine if candidate matches condition expression.""" condition = Condition(key, operator, value) assert condition.match(candidate) is expected diff --git a/test/unit/event/test_hub.py b/test/unit/event/test_hub.py index df0fecef..2c49c390 100644 --- a/test/unit/event/test_hub.py +++ b/test/unit/event/test_hub.py @@ -21,56 +21,56 @@ class MockClass(object): - '''Mock class for testing.''' + """Mock class for testing.""" def method(self): - '''Mock method for testing.''' + """Mock method for testing.""" def mockFunction(): - '''Mock function for testing.''' + """Mock function for testing.""" class MockConnection(object): - '''Mock connection for testing.''' + """Mock connection for testing.""" @property def connected(self): - '''Return whether connected.''' + """Return whether connected.""" return True def close(self): - '''Close mock connection.''' + """Close mock connection.""" pass def assert_callbacks(hub, callbacks): - '''Assert hub has exactly *callbacks* subscribed.''' + """Assert hub has exactly *callbacks* subscribed.""" # Subscribers always starts with internal handle_reply subscriber. subscribers = hub._subscribers[:] subscribers.pop(0) if len(subscribers) != len(callbacks): raise AssertionError( - 'Number of subscribers ({0}) != number of callbacks ({1})' - .format(len(subscribers), len(callbacks)) + "Number of subscribers ({0}) != number of callbacks ({1})".format( + len(subscribers), len(callbacks) + ) ) for index, subscriber in enumerate(subscribers): if subscriber.callback != callbacks[index]: raise AssertionError( - 'Callback at {0} != subscriber callback at same index.' - .format(index) + "Callback at {0} != subscriber callback at same index.".format(index) ) @pytest.fixture() def event_hub(request, session): - '''Return event hub to test against. + """Return event hub to test against. Hub is automatically connected at start of test and disconnected at end. - ''' + """ hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) @@ -78,7 +78,7 @@ def event_hub(request, session): hub.connect() def cleanup(): - '''Cleanup.''' + """Cleanup.""" if hub.connected: hub.disconnect() @@ -87,44 +87,59 @@ def cleanup(): return hub -@pytest.mark.parametrize('server_url, expected', [ - pytest.param('https://test.ftrackapp.com', 'https://test.ftrackapp.com', id='with port'), - pytest.param('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000', id='without port') -]) +@pytest.mark.parametrize( + "server_url, expected", + [ + pytest.param( + "https://test.ftrackapp.com", "https://test.ftrackapp.com", id="with port" + ), + pytest.param( + "https://test.ftrackapp.com:9000", + "https://test.ftrackapp.com:9000", + id="without port", + ), + ], +) def test_get_server_url(server_url, expected): - '''Return server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) + """Return server url.""" + event_hub = ftrack_api.event.hub.EventHub(server_url, "user", "key") assert event_hub.get_server_url() == expected -@pytest.mark.parametrize('server_url, expected', [ - pytest.param('https://test.ftrackapp.com', 'test.ftrackapp.com', id='with port'), - pytest.param('https://test.ftrackapp.com:9000', 'test.ftrackapp.com:9000', id='without port') -]) +@pytest.mark.parametrize( + "server_url, expected", + [ + pytest.param( + "https://test.ftrackapp.com", "test.ftrackapp.com", id="with port" + ), + pytest.param( + "https://test.ftrackapp.com:9000", + "test.ftrackapp.com:9000", + id="without port", + ), + ], +) def test_get_network_location(server_url, expected): - '''Return network location of server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) + """Return network location of server url.""" + event_hub = ftrack_api.event.hub.EventHub(server_url, "user", "key") assert event_hub.get_network_location() == expected -@pytest.mark.parametrize('server_url, expected', [ - pytest.param('https://test.ftrackapp.com', True, id='secure'), - pytest.param('http://test.ftrackapp.com', False, id='not secure') -]) +@pytest.mark.parametrize( + "server_url, expected", + [ + pytest.param("https://test.ftrackapp.com", True, id="secure"), + pytest.param("http://test.ftrackapp.com", False, id="not secure"), + ], +) def test_secure_property(server_url, expected, mocker): - '''Return whether secure connection used.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) + """Return whether secure connection used.""" + event_hub = ftrack_api.event.hub.EventHub(server_url, "user", "key") assert event_hub.secure is expected def test_connected_property(session): - '''Return connected state.''' + """Return connected state.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) @@ -137,28 +152,33 @@ def test_connected_property(session): assert event_hub.connected is False -@pytest.mark.parametrize('server_url, expected', [ - pytest.param('https://test.ftrackapp.com', 'https://test.ftrackapp.com', id='with port'), - pytest.param('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000', id='without port'), - pytest.param('test.ftrackapp.com', ValueError, id='missing scheme'), - pytest.param('https://:9000', ValueError, id='missing hostname'), -]) +@pytest.mark.parametrize( + "server_url, expected", + [ + pytest.param( + "https://test.ftrackapp.com", "https://test.ftrackapp.com", id="with port" + ), + pytest.param( + "https://test.ftrackapp.com:9000", + "https://test.ftrackapp.com:9000", + id="without port", + ), + pytest.param("test.ftrackapp.com", ValueError, id="missing scheme"), + pytest.param("https://:9000", ValueError, id="missing hostname"), + ], +) def test_initialise_against_server_url(server_url, expected): - '''Initialise against server url.''' + """Initialise against server url.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): - ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) + ftrack_api.event.hub.EventHub(server_url, "user", "key") else: - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) + event_hub = ftrack_api.event.hub.EventHub(server_url, "user", "key") assert event_hub.get_server_url() == expected def test_connect(session): - '''Connect.''' + """Connect.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) @@ -169,89 +189,81 @@ def test_connect(session): def test_connect_custom_headers(session): - '''Connect with custom headers passed in.''' + """Connect with custom headers passed in.""" event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key, headers={'abc': 'def'} + session.server_url, session.api_user, session.api_key, headers={"abc": "def"} ) event_hub.connect() assert ( - 'abc' in event_hub._headers.keys(), - event_hub._headers['abc'] == 'def', - event_hub.connected is True + "abc" in event_hub._headers.keys(), + event_hub._headers["abc"] == "def", + event_hub.connected is True, ) event_hub.disconnect() @pytest.mark.parametrize( - 'headers', [ - ( - requests.structures.CaseInsensitiveDict( - {'ftrack-strict-api': 'true'} - ) - ), - ( - {'ftrack-strict-api': 'true'} - ) - ] + "headers", + [ + (requests.structures.CaseInsensitiveDict({"ftrack-strict-api": "true"})), + ({"ftrack-strict-api": "true"}), + ], ) def test_connect_strict_api_header(session, headers): - '''Connect with ftrack-strict-api = True header passed in.''' + """Connect with ftrack-strict-api = True header passed in.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key, headers=headers ) event_hub.connect() assert ( - 'ftrack-strict-api' in event_hub._headers.keys(), + "ftrack-strict-api" in event_hub._headers.keys(), isinstance(event_hub._headers, dict), - event_hub._headers['ftrack-strict-api'] is True, - event_hub.connected is True + event_hub._headers["ftrack-strict-api"] is True, + event_hub.connected is True, ) event_hub.disconnect() def test_connect_custom_cookies(session): - '''Connect with custom cookies passed in.''' + """Connect with custom cookies passed in.""" event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key, cookies={'abc': 'def'} + session.server_url, session.api_user, session.api_key, cookies={"abc": "def"} ) event_hub.connect() - assert ( - event_hub._cookies == 'abc=def', - event_hub.connected is True - ) + assert (event_hub._cookies == "abc=def", event_hub.connected is True) event_hub.disconnect() def test_connect_when_already_connected(event_hub): - '''Fail to connect when already connected''' + """Fail to connect when already connected""" assert event_hub.connected is True with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: event_hub.connect() - assert 'Already connected' in str(error) + assert "Already connected" in str(error) def test_connect_failure(session, mocker): - '''Fail to connect to server.''' + """Fail to connect to server.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') + """Force connection failure.""" + raise Exception("Forced fail.") - mocker.patch('websocket.create_connection', force_fail) + mocker.patch("websocket.create_connection", force_fail) with pytest.raises(ftrack_api.exception.EventHubConnectionError): event_hub.connect() def test_connect_missing_required_transport(session, mocker, caplog): - '''Fail to connect to server that does not provide correct transport.''' + """Fail to connect to server that does not provide correct transport.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) @@ -259,26 +271,22 @@ def test_connect_missing_required_transport(session, mocker, caplog): original_get_socket_io_session = event_hub._get_socket_io_session def _get_socket_io_session(): - '''Patched to return no transports.''' + """Patched to return no transports.""" session = original_get_socket_io_session() - return ftrack_api.event.hub.SocketIoSession( - session[0], session[1], [] - ) + return ftrack_api.event.hub.SocketIoSession(session[0], session[1], []) - mocker.patch.object( - event_hub, '_get_socket_io_session', _get_socket_io_session - ) + mocker.patch.object(event_hub, "_get_socket_io_session", _get_socket_io_session) - with caplog.at_level(logging.DEBUG) as log_ctx, pytest.raises(ftrack_api.exception.EventHubConnectionError) as exception_ctx: + with caplog.at_level(logging.DEBUG) as log_ctx, pytest.raises( + ftrack_api.exception.EventHubConnectionError + ) as exception_ctx: event_hub.connect() - - assert ( - 'Server does not support websocket sessions.' in str(caplog.text) - ) + + assert "Server does not support websocket sessions." in str(caplog.text) def test_disconnect(event_hub): - '''Disconnect and unsubscribe all subscribers.''' + """Disconnect and unsubscribe all subscribers.""" event_hub.disconnect() assert len(event_hub._subscribers) == 0 @@ -289,7 +297,7 @@ def test_disconnect(event_hub): def test_disconnect_without_unsubscribing(event_hub): - '''Disconnect without unsubscribing all subscribers.''' + """Disconnect without unsubscribing all subscribers.""" event_hub.disconnect(unsubscribe=False) assert len(event_hub._subscribers) > 0 assert event_hub.connected is False @@ -297,7 +305,7 @@ def test_disconnect_without_unsubscribing(event_hub): def test_disconnect_with_reconnect(event_hub): - '''Disconnect with the intention of reconnecting.''' + """Disconnect with the intention of reconnecting.""" event_hub.disconnect(reconnect=True) assert event_hub._connection_initialised is True @@ -305,32 +313,32 @@ def test_disconnect_with_reconnect(event_hub): def test_close_connection_from_manually_connected_hub(session_no_autoconnect_hub): - '''Close connection from manually connected hub.''' + """Close connection from manually connected hub.""" session_no_autoconnect_hub.event_hub.connect() session_no_autoconnect_hub.close() assert session_no_autoconnect_hub.event_hub.connected is False def test_disconnect_when_not_connected(session): - '''Fail to disconnect when not connected''' + """Fail to disconnect when not connected""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: event_hub.disconnect() - assert 'Not currently connected' in str(error) + assert "Not currently connected" in str(error) def test_reconnect(event_hub): - '''Reconnect successfully.''' + """Reconnect successfully.""" assert event_hub.connected is True event_hub.reconnect() assert event_hub.connected is True def test_reconnect_when_not_connected(session): - '''Reconnect successfully even if not already connected.''' + """Reconnect successfully even if not already connected.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) @@ -343,7 +351,7 @@ def test_reconnect_when_not_connected(session): def test_fail_to_reconnect(session, mocker): - '''Fail to reconnect.''' + """Fail to reconnect.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) @@ -351,45 +359,45 @@ def test_fail_to_reconnect(session, mocker): assert event_hub.connected is True def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') + """Force connection failure.""" + raise Exception("Forced fail.") - mocker.patch('websocket.create_connection', force_fail) + mocker.patch("websocket.create_connection", force_fail) attempts = 2 with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: event_hub.reconnect(attempts=attempts, delay=0.5) - assert 'Failed to reconnect to event server' in str(error) - assert 'after {} attempts'.format(attempts) in str(error) + assert "Failed to reconnect to event server" in str(error) + assert "after {} attempts".format(attempts) in str(error) def test_wait(event_hub): - '''Wait for event and handle as they arrive.''' - called = {'callback': False} + """Wait for event and handle as they arrive.""" + called = {"callback": False} def callback(event): - called['callback'] = True + called["callback"] = True - event_hub.subscribe('topic=test-subscribe', callback) + event_hub.subscribe("topic=test-subscribe", callback) - event_hub.publish(Event(topic='test-subscribe')) + event_hub.publish(Event(topic="test-subscribe")) # Until wait, the event should not have been processed even if received. time.sleep(1) - assert called == {'callback': False} + assert called == {"callback": False} event_hub.wait(2) - assert called == {'callback': True} + assert called == {"callback": True} def test_wait_interrupted_by_disconnect(event_hub): - '''Interrupt wait loop with disconnect event.''' + """Interrupt wait loop with disconnect event.""" wait_time = 5 start = time.time() # Inject event directly for test purposes. - event = Event(topic='ftrack.meta.disconnected') + event = Event(topic="ftrack.meta.disconnected") event_hub._event_queue.put(event) event_hub.wait(wait_time) @@ -397,97 +405,98 @@ def test_wait_interrupted_by_disconnect(event_hub): assert time.time() - start < wait_time -@pytest.mark.parametrize('identifier, registered', [ - pytest.param('registered-test-subscriber', True, id='registered'), - pytest.param('unregistered-test-subscriber', False, id='missing') -]) +@pytest.mark.parametrize( + "identifier, registered", + [ + pytest.param("registered-test-subscriber", True, id="registered"), + pytest.param("unregistered-test-subscriber", False, id="missing"), + ], +) def test_get_subscriber_by_identifier(event_hub, identifier, registered): - '''Return subscriber by identifier.''' + """Return subscriber by identifier.""" + def callback(event): pass - subscriber = { - 'id': 'registered-test-subscriber' - } + subscriber = {"id": "registered-test-subscriber"} - event_hub.subscribe('topic=test-subscribe', callback, subscriber) + event_hub.subscribe("topic=test-subscribe", callback, subscriber) retrieved = event_hub.get_subscriber_by_identifier(identifier) if registered: assert isinstance(retrieved, ftrack_api.event.subscriber.Subscriber) - assert retrieved.metadata.get('id') == subscriber['id'] + assert retrieved.metadata.get("id") == subscriber["id"] else: assert retrieved is None def test_subscribe(event_hub): - '''Subscribe to topics.''' - called = {'a': False, 'b': False} + """Subscribe to topics.""" + called = {"a": False, "b": False} def callback_a(event): - called['a'] = True + called["a"] = True def callback_b(event): - called['b'] = True + called["b"] = True - event_hub.subscribe('topic=test-subscribe', callback_a) - event_hub.subscribe('topic=test-subscribe-other', callback_b) + event_hub.subscribe("topic=test-subscribe", callback_a) + event_hub.subscribe("topic=test-subscribe-other", callback_b) - event_hub.publish(Event(topic='test-subscribe')) + event_hub.publish(Event(topic="test-subscribe")) event_hub.wait(2) - assert called == {'a': True, 'b': False} + assert called == {"a": True, "b": False} def test_subscribe_before_connected(session): - '''Subscribe to topic before connected.''' + """Subscribe to topic before connected.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) - called = {'callback': False} + called = {"callback": False} def callback(event): - called['callback'] = True + called["callback"] = True - identifier = 'test-subscriber' - event_hub.subscribe( - 'topic=test-subscribe', callback, subscriber={'id': identifier} - ) + identifier = "test-subscriber" + event_hub.subscribe("topic=test-subscribe", callback, subscriber={"id": identifier}) assert event_hub.get_subscriber_by_identifier(identifier) is not None event_hub.connect() try: - event_hub.publish(Event(topic='test-subscribe')) + event_hub.publish(Event(topic="test-subscribe")) event_hub.wait(2) finally: event_hub.disconnect() - assert called == {'callback': True} + assert called == {"callback": True} def test_duplicate_subscriber(event_hub): - '''Fail to subscribe same subscriber more than once.''' - subscriber = {'id': 'test-subscriber'} - event_hub.subscribe('topic=test', None, subscriber=subscriber) + """Fail to subscribe same subscriber more than once.""" + subscriber = {"id": "test-subscriber"} + event_hub.subscribe("topic=test", None, subscriber=subscriber) with pytest.raises(ftrack_api.exception.NotUniqueError) as error: - event_hub.subscribe('topic=test', None, subscriber=subscriber) + event_hub.subscribe("topic=test", None, subscriber=subscriber) - assert '{0} already exists'.format(subscriber['id']) in str(error) + assert "{0} already exists".format(subscriber["id"]) in str(error) def test_unsubscribe(event_hub): - '''Unsubscribe a specific callback.''' + """Unsubscribe a specific callback.""" + def callback_a(event): pass def callback_b(event): pass - identifier_a = event_hub.subscribe('topic=test', callback_a) - identifier_b = event_hub.subscribe('topic=test', callback_b) + identifier_a = event_hub.subscribe("topic=test", callback_a) + identifier_b = event_hub.subscribe("topic=test", callback_b) assert_callbacks(event_hub, [callback_a, callback_b]) @@ -502,8 +511,8 @@ def callback_b(event): @flaky(max_runs=2, min_passes=1) def test_unsubscribe_whilst_disconnected(event_hub): - '''Unsubscribe whilst disconnected.''' - identifier = event_hub.subscribe('topic=test', None) + """Unsubscribe whilst disconnected.""" + identifier = event_hub.subscribe("topic=test", None) event_hub.disconnect(unsubscribe=False) event_hub.unsubscribe(identifier) @@ -511,228 +520,229 @@ def test_unsubscribe_whilst_disconnected(event_hub): def test_unsubscribe_missing_subscriber(event_hub): - '''Fail to unsubscribe a non-subscribed subscriber.''' - identifier = 'non-subscribed-subscriber' + """Fail to unsubscribe a non-subscribed subscriber.""" + identifier = "non-subscribed-subscriber" with pytest.raises(ftrack_api.exception.NotFoundError) as error: event_hub.unsubscribe(identifier) - assert ( - 'missing subscriber with identifier {}'.format(identifier) - in str(error) - ) + assert "missing subscriber with identifier {}".format(identifier) in str(error) -@pytest.mark.parametrize('event_data', [ - pytest.param(dict(source=dict(id='1', user=dict(username='auto'))), id='pre-prepared'), - pytest.param(dict(source=dict(user=dict(username='auto'))), id='missing id'), - pytest.param(dict(source=dict(id='1')), id='missing user'), - pytest.param(dict(), id='no source') -]) +@pytest.mark.parametrize( + "event_data", + [ + pytest.param( + dict(source=dict(id="1", user=dict(username="auto"))), id="pre-prepared" + ), + pytest.param(dict(source=dict(user=dict(username="auto"))), id="missing id"), + pytest.param(dict(source=dict(id="1")), id="missing user"), + pytest.param(dict(), id="no source"), + ], +) def test_prepare_event(session, event_data): - '''Prepare event.''' + """Prepare event.""" # Replace username `auto` in event data with API user. try: - if event_data['source']['user']['username'] == 'auto': - event_data['source']['user']['username'] = session.api_user + if event_data["source"]["user"]["username"] == "auto": + event_data["source"]["user"]["username"] = session.api_user except KeyError: pass event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) - event_hub.id = '1' + event_hub.id = "1" - event = Event('test', id='event-id', **event_data) + event = Event("test", id="event-id", **event_data) expected = Event( - 'test', id='event-id', source=dict(id='1', user=dict(username=session.api_user)) + "test", id="event-id", source=dict(id="1", user=dict(username=session.api_user)) ) event_hub._prepare_event(event) assert event == expected def test_prepare_reply_event(session): - '''Prepare reply event.''' + """Prepare reply event.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) - source_event = Event('source', source=dict(id='source-id')) - reply_event = Event('reply') + source_event = Event("source", source=dict(id="source-id")) + reply_event = Event("reply") event_hub._prepare_reply_event(reply_event, source_event) - assert source_event['source']['id'] in reply_event['target'] - assert reply_event['in_reply_to_event'] == source_event['id'] + assert source_event["source"]["id"] in reply_event["target"] + assert reply_event["in_reply_to_event"] == source_event["id"] - event_hub._prepare_reply_event(reply_event, source_event, {'id': 'source'}) - assert reply_event['source'] == {'id': 'source'} + event_hub._prepare_reply_event(reply_event, source_event, {"id": "source"}) + assert reply_event["source"] == {"id": "source"} def test_publish(event_hub): - '''Publish asynchronous event.''' - called = {'callback': False} + """Publish asynchronous event.""" + called = {"callback": False} def callback(event): - called['callback'] = True + called["callback"] = True - event_hub.subscribe('topic=test-subscribe', callback) + event_hub.subscribe("topic=test-subscribe", callback) - event_hub.publish(Event(topic='test-subscribe')) + event_hub.publish(Event(topic="test-subscribe")) event_hub.wait(2) - assert called == {'callback': True} + assert called == {"callback": True} def test_publish_raising_error(event_hub): - '''Raise error, when configured, on failed publish.''' + """Raise error, when configured, on failed publish.""" # Note that the event hub currently only fails publish when not connected. # All other errors are inconsistently swallowed. event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) + event = Event(topic="a-topic", data=dict(status="fail")) with pytest.raises(Exception): - event_hub.publish(event, on_error='raise') + event_hub.publish(event, on_error="raise") def test_publish_ignoring_error(event_hub): - '''Ignore error, when configured, on failed publish.''' + """Ignore error, when configured, on failed publish.""" # Note that the event hub currently only fails publish when not connected. # All other errors are inconsistently swallowed. event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event, on_error='ignore') + event = Event(topic="a-topic", data=dict(status="fail")) + event_hub.publish(event, on_error="ignore") def test_publish_logs_other_errors(event_hub, caplog, mocker): - '''Log publish errors other than connection error.''' + """Log publish errors other than connection error.""" # Mock connection to force error. - mocker.patch.object(event_hub, '_connection', MockConnection()) + mocker.patch.object(event_hub, "_connection", MockConnection()) - event = Event(topic='a-topic', data=dict(status='fail')) + event = Event(topic="a-topic", data=dict(status="fail")) event_hub.publish(event) - expected = 'Error sending event {0}.'.format(event) + expected = "Error sending event {0}.".format(event) - records = caplog.get_records(when='call') + records = caplog.get_records(when="call") messages = [record.getMessage().strip() for record in records] - assert expected in messages, 'Expected log message missing in output.' + assert expected in messages, "Expected log message missing in output." def test_synchronous_publish(event_hub): - '''Publish event synchronously and collect results.''' + """Publish event synchronously and collect results.""" + def callback_a(event): - return 'A' + return "A" def callback_b(event): - return 'B' + return "B" def callback_c(event): - return 'C' + return "C" - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) + event_hub.subscribe("topic=test", callback_a, priority=50) + event_hub.subscribe("topic=test", callback_b, priority=60) + event_hub.subscribe("topic=test", callback_c, priority=70) - results = event_hub.publish(Event(topic='test'), synchronous=True) - assert results == ['A', 'B', 'C'] + results = event_hub.publish(Event(topic="test"), synchronous=True) + assert results == ["A", "B", "C"] def test_publish_during_connect(session, mocker): - '''Test publishing while connection is initialising.''' + """Test publishing while connection is initialising.""" event_hub = ftrack_api.event.hub.EventHub( session.server_url, session.api_user, session.api_key ) def replier(event): - '''Replier.''' - return 'Replied' + """Replier.""" + return "Replied" - event_hub.subscribe('topic=test', replier) - called = {'callback': None} + event_hub.subscribe("topic=test", replier) + called = {"callback": None} def on_reply(event): - called['callback'] = event['data'] + called["callback"] = event["data"] # Mark the connection as initialised. event_hub.init_connection() - event_hub.publish( - Event(topic='test'), on_reply=on_reply - ) + event_hub.publish(Event(topic="test"), on_reply=on_reply) assert event_hub._event_send_queue.qsize() == 1 event_hub.connect() event_hub.wait(2) - assert called['callback'] == 'Replied' + assert called["callback"] == "Replied" assert event_hub._event_send_queue.qsize() == 0 def test_publish_with_reply(event_hub): - '''Publish asynchronous event with on reply handler.''' + """Publish asynchronous event with on reply handler.""" def replier(event): - '''Replier.''' - return 'Replied' + """Replier.""" + return "Replied" - topic_name = 'test_{0}'.format(uuid.uuid4()) - event_hub.subscribe('topic={0}'.format(topic_name), replier) + topic_name = "test_{0}".format(uuid.uuid4()) + event_hub.subscribe("topic={0}".format(topic_name), replier) - called = {'callback': None} + called = {"callback": None} def on_reply(event): - called['callback'] = event['data'] + called["callback"] = event["data"] event_hub.publish(Event(topic=topic_name), on_reply=on_reply) event_hub.wait(2) - assert called['callback'] == 'Replied' + assert called["callback"] == "Replied" def test_publish_with_multiple_replies(event_hub): - '''Publish asynchronous event and retrieve multiple replies.''' + """Publish asynchronous event and retrieve multiple replies.""" def replier_one(event): - '''Replier.''' - return 'One' + """Replier.""" + return "One" def replier_two(event): - '''Replier.''' - return 'Two' + """Replier.""" + return "Two" - topic_name = 'test_{0}'.format(uuid.uuid4()) - event_hub.subscribe('topic={0}'.format(topic_name), replier_one) - event_hub.subscribe('topic={0}'.format(topic_name), replier_two) + topic_name = "test_{0}".format(uuid.uuid4()) + event_hub.subscribe("topic={0}".format(topic_name), replier_one) + event_hub.subscribe("topic={0}".format(topic_name), replier_two) - called = {'callback': []} + called = {"callback": []} def on_reply(event): - called['callback'].append(event['data']) + called["callback"].append(event["data"]) event_hub.publish(Event(topic=topic_name), on_reply=on_reply) event_hub.wait(2) - assert sorted(called['callback']) == ['One', 'Two'] + assert sorted(called["callback"]) == ["One", "Two"] def test_server_heartbeat_response(): - '''Maintain connection by responding to server heartbeat request.''' + """Maintain connection by responding to server heartbeat request.""" test_script = os.path.join( - os.path.dirname(__file__), 'event_hub_server_heartbeat.py' + os.path.dirname(__file__), "event_hub_server_heartbeat.py" ) # set the topic name to something unique - topic = 'test_event_hub_server_heartbeat_{0}'.format(uuid.uuid4()) + topic = "test_event_hub_server_heartbeat_{0}".format(uuid.uuid4()) # Start subscriber that will listen for all three messages. - subscriber = subprocess.Popen([sys.executable, test_script, 'subscribe', topic]) + subscriber = subprocess.Popen([sys.executable, test_script, "subscribe", topic]) # Give subscriber time to connect to server. time.sleep(10) # Start publisher to publish three messages. - publisher = subprocess.Popen([sys.executable, test_script, 'publish', topic]) + publisher = subprocess.Popen([sys.executable, test_script, "publish", topic]) publisher.wait() subscriber.wait() @@ -741,53 +751,39 @@ def test_server_heartbeat_response(): def test_stop_event(event_hub): - '''Stop processing of subsequent local handlers when stop flag set.''' - called = { - 'a': False, - 'b': False, - 'c': False - } + """Stop processing of subsequent local handlers when stop flag set.""" + called = {"a": False, "b": False, "c": False} def callback_a(event): - called['a'] = True + called["a"] = True def callback_b(event): - called['b'] = True + called["b"] = True event.stop() def callback_c(event): - called['c'] = True + called["c"] = True - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) + event_hub.subscribe("topic=test", callback_a, priority=50) + event_hub.subscribe("topic=test", callback_b, priority=60) + event_hub.subscribe("topic=test", callback_c, priority=70) - event_hub.publish(Event(topic='test')) + event_hub.publish(Event(topic="test")) event_hub.wait(2) - assert called == { - 'a': True, - 'b': True, - 'c': False - } + assert called == {"a": True, "b": True, "c": False} def test_encode(session): - '''Encode event data.''' - encoded = session.event_hub._encode( - dict(name='ftrack.event', args=[Event('test')]) - ) - assert 'inReplyToEvent' in encoded - assert 'in_reply_to_event' not in encoded + """Encode event data.""" + encoded = session.event_hub._encode(dict(name="ftrack.event", args=[Event("test")])) + assert "inReplyToEvent" in encoded + assert "in_reply_to_event" not in encoded def test_decode(session): - '''Decode event data.''' - decoded = session.event_hub._decode( - json.dumps({ - 'inReplyToEvent': 'id' - }) - ) + """Decode event data.""" + decoded = session.event_hub._decode(json.dumps({"inReplyToEvent": "id"})) - assert 'in_reply_to_event' in decoded - assert 'inReplyToEvent' not in decoded + assert "in_reply_to_event" in decoded + assert "inReplyToEvent" not in decoded diff --git a/test/unit/event/test_subscriber.py b/test/unit/event/test_subscriber.py index 5f08b5c0..53039964 100644 --- a/test/unit/event/test_subscriber.py +++ b/test/unit/event/test_subscriber.py @@ -8,23 +8,28 @@ def test_string_representation(): - '''String representation.''' + """String representation.""" subscriber = ftrack_api.event.subscriber.Subscriber( - 'topic=test', lambda x: None, {'meta': 'info'}, 100 + "topic=test", lambda x: None, {"meta": "info"}, 100 ) assert str(subscriber) == ( - '' + "" ) -@pytest.mark.parametrize('expression, event, expected', [ - pytest.param('topic=test', Event(topic='test'), True, id='interested'), - pytest.param('topic=test', Event(topic='other-test'), False, id='not interested') -]) +@pytest.mark.parametrize( + "expression, event, expected", + [ + pytest.param("topic=test", Event(topic="test"), True, id="interested"), + pytest.param( + "topic=test", Event(topic="other-test"), False, id="not interested" + ), + ], +) def test_interested_in(expression, event, expected): - '''Determine if subscriber interested in event.''' + """Determine if subscriber interested in event.""" subscriber = ftrack_api.event.subscriber.Subscriber( - expression, lambda x: None, {'meta': 'info'}, 100 + expression, lambda x: None, {"meta": "info"}, 100 ) assert subscriber.interested_in(event) is expected diff --git a/test/unit/event/test_subscription.py b/test/unit/event/test_subscription.py index 9a1d12db..872baf8d 100644 --- a/test/unit/event/test_subscription.py +++ b/test/unit/event/test_subscription.py @@ -8,18 +8,21 @@ def test_string_representation(): - '''String representation is subscription expression.''' - expression = 'topic=some-topic' + """String representation is subscription expression.""" + expression = "topic=some-topic" subscription = ftrack_api.event.subscription.Subscription(expression) assert str(subscription) == expression -@pytest.mark.parametrize('expression, event, expected', [ - pytest.param('topic=test', Event(topic='test'), True,id='match'), - pytest.param('topic=test', Event(topic='other-test'), False, id='no match') -]) +@pytest.mark.parametrize( + "expression, event, expected", + [ + pytest.param("topic=test", Event(topic="test"), True, id="match"), + pytest.param("topic=test", Event(topic="other-test"), False, id="no match"), + ], +) def test_includes(expression, event, expected): - '''Subscription includes event.''' + """Subscription includes event.""" subscription = ftrack_api.event.subscription.Subscription(expression) assert subscription.includes(event) is expected diff --git a/test/unit/resource_identifier_transformer/test_base.py b/test/unit/resource_identifier_transformer/test_base.py index fee80454..a5c36536 100644 --- a/test/unit/resource_identifier_transformer/test_base.py +++ b/test/unit/resource_identifier_transformer/test_base.py @@ -8,23 +8,39 @@ @pytest.fixture() def transformer(session): - '''Return instance of ResourceIdentifierTransformer.''' + """Return instance of ResourceIdentifierTransformer.""" return _transformer.ResourceIdentifierTransformer(session) -@pytest.mark.parametrize('resource_identifier, context, expected', [ - pytest.param('identifier', None, 'identifier', id='no context'), - pytest.param('identifier', {'user': {'username': 'user'}}, 'identifier', id='basic context') -]) +@pytest.mark.parametrize( + "resource_identifier, context, expected", + [ + pytest.param("identifier", None, "identifier", id="no context"), + pytest.param( + "identifier", + {"user": {"username": "user"}}, + "identifier", + id="basic context", + ), + ], +) def test_encode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' + """Encode resource identifier.""" assert transformer.encode(resource_identifier, context) == expected -@pytest.mark.parametrize('resource_identifier, context, expected', [ - pytest.param('identifier', None, 'identifier',id='no context'), - pytest.param('identifier', {'user': {'username': 'user'}}, 'identifier', id='basic context') -]) +@pytest.mark.parametrize( + "resource_identifier, context, expected", + [ + pytest.param("identifier", None, "identifier", id="no context"), + pytest.param( + "identifier", + {"user": {"username": "user"}}, + "identifier", + id="basic context", + ), + ], +) def test_decode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' + """Encode resource identifier.""" assert transformer.decode(resource_identifier, context) == expected diff --git a/test/unit/structure/test_base.py b/test/unit/structure/test_base.py index 98cbeee1..a257e4ee 100644 --- a/test/unit/structure/test_base.py +++ b/test/unit/structure/test_base.py @@ -7,22 +7,25 @@ class Concrete(ftrack_api.structure.base.Structure): - '''Concrete implementation to allow testing non-abstract methods.''' + """Concrete implementation to allow testing non-abstract methods.""" def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. + """Return a resource identifier for supplied *entity*. *context* can be a mapping that supplies additional information. - ''' - return 'resource_identifier' + """ + return "resource_identifier" -@pytest.mark.parametrize('sequence, expected', [ - pytest.param({'padding': None}, '%d', id='no padding'), - pytest.param({'padding': 4}, '%04d', id='padded') -],) +@pytest.mark.parametrize( + "sequence, expected", + [ + pytest.param({"padding": None}, "%d", id="no padding"), + pytest.param({"padding": 4}, "%04d", id="padded"), + ], +) def test_get_sequence_expression(sequence, expected): - '''Get sequence expression from sequence.''' + """Get sequence expression from sequence.""" structure = Concrete() assert structure._get_sequence_expression(sequence) == expected diff --git a/test/unit/structure/test_entity_id.py b/test/unit/structure/test_entity_id.py index 78ec9305..17b5d421 100644 --- a/test/unit/structure/test_entity_id.py +++ b/test/unit/structure/test_entity_id.py @@ -10,9 +10,9 @@ import ftrack_api.structure.entity_id -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def structure(): - '''Return structure.''' + """Return structure.""" return ftrack_api.structure.entity_id.EntityIdStructure() @@ -20,25 +20,37 @@ def structure(): # called functions here can change to standard fixtures. # https://github.com/pytest-dev/pytest/issues/579 + def valid_entity(): - '''Return valid entity.''' + """Return valid entity.""" session = ftrack_api.Session() - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': 'file_component', - 'file_type': '.png' - }) + entity = session.create( + "FileComponent", + { + "id": "f6cd40cb-d1c0-469f-a2d5-10369be8a724", + "name": "file_component", + "file_type": ".png", + }, + ) return entity -@pytest.mark.parametrize('entity, context, expected', [ - pytest.param(valid_entity(), {}, 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', id='valid-entity'), - pytest.param(mock.Mock(), {}, Exception, id='non-entity') -]) +@pytest.mark.parametrize( + "entity, context, expected", + [ + pytest.param( + valid_entity(), + {}, + "f6cd40cb-d1c0-469f-a2d5-10369be8a724", + id="valid-entity", + ), + pytest.param(mock.Mock(), {}, Exception, id="non-entity"), + ], +) def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' + """Get resource identifier.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): structure.get_resource_identifier(entity, context) diff --git a/test/unit/structure/test_get_file_from_string_event.py b/test/unit/structure/test_get_file_from_string_event.py index 76ffd59f..23d541a5 100644 --- a/test/unit/structure/test_get_file_from_string_event.py +++ b/test/unit/structure/test_get_file_from_string_event.py @@ -10,96 +10,102 @@ import ftrack_api.structure.id -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def structure(): - '''Return structure.''' - return ftrack_api.structure.id.IdStructure(prefix='another_path') + """Return structure.""" + return ftrack_api.structure.id.IdStructure(prefix="another_path") def file_compound_extension_no_component_event(component_file=None): - - ''' + """ Return file component with compound extension through **ftrack.api.session.get-file-type-from-string** event. - ''' + """ session = ftrack_api.Session() - entity = session.create_component( - component_file - ) + entity = session.create_component(component_file) return entity def file_compound_extension_component_event(component_file=None): - - ''' + """ Return file component with compound extension through **ftrack.api.session.get-file-type-from-string** event. - ''' + """ plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..','..','fixture', 'plugin') + os.path.join(os.path.dirname(__file__), "..", "..", "fixture", "plugin") ) session = ftrack_api.Session(plugin_paths=[plugin_path]) - entity = session.create_component( - component_file - ) + entity = session.create_component(component_file) return entity -@pytest.mark.parametrize('entity, context, expected', [ - pytest.param( - file_compound_extension_component_event('mytest.foo.bar'), {}, - '.foo.bar', - id='file-compound-extension-component-event' - ), - pytest.param( - file_compound_extension_component_event('mytest.%4d.foo.bar'), {}, - '.foo.bar', - id='file-sequence-compound-extension-component-event' - ), - pytest.param( - file_compound_extension_component_event('mytest'), {}, - '', - id='no-file-compound-extension-component-event' - ), - pytest.param( - file_compound_extension_no_component_event('mytest.foo.bar'), {}, - '.bar', - id='file-compound-extension-no-component-event' - ), - pytest.param( - file_compound_extension_no_component_event('mytest.%4d.foo.bar'), {}, - '.bar', - id='file-sequence-compound-extension-no-component-event' - ), - pytest.param( - file_compound_extension_no_component_event('mytest'), {}, - '', - id='no-file-compound-extension-no-component-event' - ), - pytest.param( - file_compound_extension_component_event('%04d.bgeo.sc [1-10]'), {}, - '.bgeo.sc', - id='file-sequence-compound-extension-component-event-valid-clique' - ), - pytest.param( - file_compound_extension_component_event('argh.%04d.bgeo.sc [1-10]'), {}, - '.bgeo.sc', - id='file-sequence-compound-extension-component-event-valid-clique-with-prefix' - ), - pytest.param( - file_compound_extension_component_event('foobar.%04d.jpg [1-10]'), {}, - '.jpg', - id='file-sequence-compound-extension-component-event-valid-clique-single-extension' - ), -]) +@pytest.mark.parametrize( + "entity, context, expected", + [ + pytest.param( + file_compound_extension_component_event("mytest.foo.bar"), + {}, + ".foo.bar", + id="file-compound-extension-component-event", + ), + pytest.param( + file_compound_extension_component_event("mytest.%4d.foo.bar"), + {}, + ".foo.bar", + id="file-sequence-compound-extension-component-event", + ), + pytest.param( + file_compound_extension_component_event("mytest"), + {}, + "", + id="no-file-compound-extension-component-event", + ), + pytest.param( + file_compound_extension_no_component_event("mytest.foo.bar"), + {}, + ".bar", + id="file-compound-extension-no-component-event", + ), + pytest.param( + file_compound_extension_no_component_event("mytest.%4d.foo.bar"), + {}, + ".bar", + id="file-sequence-compound-extension-no-component-event", + ), + pytest.param( + file_compound_extension_no_component_event("mytest"), + {}, + "", + id="no-file-compound-extension-no-component-event", + ), + pytest.param( + file_compound_extension_component_event("%04d.bgeo.sc [1-10]"), + {}, + ".bgeo.sc", + id="file-sequence-compound-extension-component-event-valid-clique", + ), + pytest.param( + file_compound_extension_component_event("argh.%04d.bgeo.sc [1-10]"), + {}, + ".bgeo.sc", + id="file-sequence-compound-extension-component-event-valid-clique-with-prefix", + ), + pytest.param( + file_compound_extension_component_event("foobar.%04d.jpg [1-10]"), + {}, + ".jpg", + id="file-sequence-compound-extension-component-event-valid-clique-single-extension", + ), + ], +) def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' + """Get resource identifier.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): structure.get_resource_identifier(entity, context) diff --git a/test/unit/structure/test_id.py b/test/unit/structure/test_id.py index 328b1a32..e6774fb2 100644 --- a/test/unit/structure/test_id.py +++ b/test/unit/structure/test_id.py @@ -10,108 +10,119 @@ import ftrack_api.structure.id -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def structure(): - '''Return structure.''' - return ftrack_api.structure.id.IdStructure(prefix='path') + """Return structure.""" + return ftrack_api.structure.id.IdStructure(prefix="path") # Note: When it is possible to use indirect=True on just a few arguments, the # called functions here can change to standard fixtures. # https://github.com/pytest-dev/pytest/issues/579 + def file_component(container=None): - '''Return file component.''' + """Return file component.""" session = ftrack_api.Session() - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': '0001', - 'file_type': '.png', - 'container': container - }) + entity = session.create( + "FileComponent", + { + "id": "f6cd40cb-d1c0-469f-a2d5-10369be8a724", + "name": "0001", + "file_type": ".png", + "container": container, + }, + ) return entity def sequence_component(padding=0): - '''Return sequence component with *padding*.''' + """Return sequence component with *padding*.""" session = ftrack_api.Session() - entity = session.create('SequenceComponent', { - 'id': 'ff17edad-2129-483b-8b59-d1a654c8497b', - 'name': 'sequence_component', - 'file_type': '.png', - 'padding': padding - }) + entity = session.create( + "SequenceComponent", + { + "id": "ff17edad-2129-483b-8b59-d1a654c8497b", + "name": "sequence_component", + "file_type": ".png", + "padding": padding, + }, + ) return entity def container_component(): - '''Return container component.''' + """Return container component.""" session = ftrack_api.Session() - entity = session.create('ContainerComponent', { - 'id': '03ab9967-f86c-4b55-8252-cd187d0c244a', - 'name': 'container_component' - }) + entity = session.create( + "ContainerComponent", + {"id": "03ab9967-f86c-4b55-8252-cd187d0c244a", "name": "container_component"}, + ) return entity def unsupported_entity(): - '''Return an unsupported entity.''' + """Return an unsupported entity.""" session = ftrack_api.Session() - entity = session.create('User', { - 'username': 'martin' - }) + entity = session.create("User", {"username": "martin"}) return entity -@pytest.mark.parametrize('entity, context, expected', [ - pytest.param( - file_component(), {}, - 'path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png' - , id='file-component' - ), - pytest.param( - file_component(container_component()), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/' - 'f6cd40cb-d1c0-469f-a2d5-10369be8a724.png', - id='file-component-in-container' - ), - pytest.param( - file_component(sequence_component()), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png', - id='file-component-in-sequence' - ), - pytest.param( - sequence_component(padding=0), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png', - id='unpadded-sequence-component' - ), - pytest.param( - sequence_component(padding=4), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png', - id='padded-sequence-component' - ), - pytest.param( - container_component(), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a', - id='container-component' - ), - pytest.param( - unsupported_entity(), {}, - NotImplementedError, - id='unsupported-entity' - ) - -]) +@pytest.mark.parametrize( + "entity, context, expected", + [ + pytest.param( + file_component(), + {}, + "path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png", + id="file-component", + ), + pytest.param( + file_component(container_component()), + {}, + "path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/" + "f6cd40cb-d1c0-469f-a2d5-10369be8a724.png", + id="file-component-in-container", + ), + pytest.param( + file_component(sequence_component()), + {}, + "path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png", + id="file-component-in-sequence", + ), + pytest.param( + sequence_component(padding=0), + {}, + "path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png", + id="unpadded-sequence-component", + ), + pytest.param( + sequence_component(padding=4), + {}, + "path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png", + id="padded-sequence-component", + ), + pytest.param( + container_component(), + {}, + "path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a", + id="container-component", + ), + pytest.param( + unsupported_entity(), {}, NotImplementedError, id="unsupported-entity" + ), + ], +) def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' + """Get resource identifier.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): structure.get_resource_identifier(entity, context) diff --git a/test/unit/structure/test_origin.py b/test/unit/structure/test_origin.py index 2b2168e8..c07f7a02 100644 --- a/test/unit/structure/test_origin.py +++ b/test/unit/structure/test_origin.py @@ -9,19 +9,27 @@ import ftrack_api.structure.origin -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def structure(): - '''Return structure.''' + """Return structure.""" return ftrack_api.structure.origin.OriginStructure() -@pytest.mark.parametrize('entity, context, expected', [ - pytest.param(mock.Mock(), {'source_resource_identifier': 'identifier'}, 'identifier', id='valid-context'), - pytest.param(mock.Mock(), {}, ValueError, id='invalid-context'), - pytest.param(mock.Mock(), None, ValueError, id='unspecified-context') -]) +@pytest.mark.parametrize( + "entity, context, expected", + [ + pytest.param( + mock.Mock(), + {"source_resource_identifier": "identifier"}, + "identifier", + id="valid-context", + ), + pytest.param(mock.Mock(), {}, ValueError, id="invalid-context"), + pytest.param(mock.Mock(), None, ValueError, id="unspecified-context"), + ], +) def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' + """Get resource identifier.""" if inspect.isclass(expected) and issubclass(expected, Exception): with pytest.raises(expected): structure.get_resource_identifier(entity, context) diff --git a/test/unit/structure/test_standard.py b/test/unit/structure/test_standard.py index 4d2ca825..13d26f32 100644 --- a/test/unit/structure/test_standard.py +++ b/test/unit/structure/test_standard.py @@ -9,23 +9,26 @@ import ftrack_api.structure.standard -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def new_project(request): - '''Return new empty project.''' + """Return new empty project.""" session = ftrack_api.Session() - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) + project_schema = session.query("ProjectSchema").first() + project_name = "python_api_test_{0}".format(uuid.uuid1().hex) + project = session.create( + "Project", + { + "name": project_name, + "full_name": project_name + "_full", + "project_schema": project_schema, + }, + ) session.commit() def cleanup(): - '''Remove created entity.''' + """Remove created entity.""" session.delete(project) session.commit() @@ -35,39 +38,35 @@ def cleanup(): def new_container_component(): - '''Return container component.''' + """Return container component.""" session = ftrack_api.Session() - entity = session.create('ContainerComponent', { - 'name': 'container_component' - }) + entity = session.create("ContainerComponent", {"name": "container_component"}) return entity def new_sequence_component(): - '''Return sequence component.''' + """Return sequence component.""" session = ftrack_api.Session() entity = session.create_component( - '/tmp/foo/%04d.jpg [1-10]', location=None, data={'name': 'baz'} + "/tmp/foo/%04d.jpg [1-10]", location=None, data={"name": "baz"} ) return entity -def new_file_component(name='foo', container=None): - '''Return file component with *name* and *container*.''' +def new_file_component(name="foo", container=None): + """Return file component with *name* and *container*.""" if container: session = container.session else: session = ftrack_api.Session() - entity = session.create('FileComponent', { - 'name': name, - 'file_type': '.png', - 'container': container - }) + entity = session.create( + "FileComponent", {"name": name, "file_type": ".png", "container": container} + ) return entity @@ -82,189 +81,201 @@ def new_file_component(name='foo', container=None): # tests. This means that all hierarchical names must be unique, otherwise an # IntegrityError will be raised on the server. + @pytest.mark.parametrize( - 'component, hierarchy, expected, structure, asset_name', + "component, hierarchy, expected, structure, asset_name", [ pytest.param( file_component, [], - '{project_name}/my_new_asset/v001/foo.png', + "{project_name}/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='file_component_on_project' + "my_new_asset", + id="file_component_on_project", ), pytest.param( file_component, [], - '{project_name}/foobar/my_new_asset/v001/foo.png', + "{project_name}/foobar/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure( - project_versions_prefix='foobar' + project_versions_prefix="foobar" ), - 'my_new_asset', id='file_component_on_project_with_prefix' + "my_new_asset", + id="file_component_on_project_with_prefix", ), pytest.param( file_component, - ['baz1', 'bar'], - '{project_name}/baz1/bar/my_new_asset/v001/foo.png', + ["baz1", "bar"], + "{project_name}/baz1/bar/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='file_component_with_hierarchy' + "my_new_asset", + id="file_component_with_hierarchy", ), pytest.param( sequence_component, - ['baz2', 'bar'], - '{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg', + ["baz2", "bar"], + "{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='sequence_component' + "my_new_asset", + id="sequence_component", ), pytest.param( - sequence_component['members'][3], - ['baz3', 'bar'], - '{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg', + sequence_component["members"][3], + ["baz3", "bar"], + "{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='sequence_component_member' + "my_new_asset", + id="sequence_component_member", ), pytest.param( container_component, - ['baz4', 'bar'], - '{project_name}/baz4/bar/my_new_asset/v001/container_component', + ["baz4", "bar"], + "{project_name}/baz4/bar/my_new_asset/v001/container_component", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='container_component' + "my_new_asset", + id="container_component", ), pytest.param( new_file_component(container=container_component), - ['baz5', 'bar'], + ["baz5", "bar"], ( - '{project_name}/baz5/bar/my_new_asset/v001/container_component/' - 'foo.png' + "{project_name}/baz5/bar/my_new_asset/v001/container_component/" + "foo.png" ), ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='container_component_member' + "my_new_asset", + id="container_component_member", ), pytest.param( file_component, - [u'björn'], - '{project_name}/bjorn/my_new_asset/v001/foo.png', + ["björn"], + "{project_name}/bjorn/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='slugify_non_ascii_hierarchy' + "my_new_asset", + id="slugify_non_ascii_hierarchy", ), pytest.param( file_component, - [u'björn!'], - '{project_name}/bjorn_/my_new_asset/v001/foo.png', + ["björn!"], + "{project_name}/bjorn_/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='slugify_illegal_hierarchy' + "my_new_asset", + id="slugify_illegal_hierarchy", ), pytest.param( - new_file_component(name=u'fää'), + new_file_component(name="fää"), [], - '{project_name}/my_new_asset/v001/faa.png', + "{project_name}/my_new_asset/v001/faa.png", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='slugify_non_ascii_component_name' + "my_new_asset", + id="slugify_non_ascii_component_name", ), pytest.param( - new_file_component(name=u'fo/o'), + new_file_component(name="fo/o"), [], - '{project_name}/my_new_asset/v001/fo_o.png', + "{project_name}/my_new_asset/v001/fo_o.png", ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset', id='slugify_illegal_component_name' + "my_new_asset", + id="slugify_illegal_component_name", ), pytest.param( file_component, [], - '{project_name}/aao/v001/foo.png', + "{project_name}/aao/v001/foo.png", ftrack_api.structure.standard.StandardStructure(), - u'åäö', id='slugify_non_ascii_asset_name' + "åäö", + id="slugify_non_ascii_asset_name", ), pytest.param( file_component, [], - '{project_name}/my_ne____w_asset/v001/foo.png', + "{project_name}/my_ne____w_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure(), - u'my_ne!!!!w_asset', id='slugify_illegal_asset_name' + "my_ne!!!!w_asset", + id="slugify_illegal_asset_name", ), pytest.param( file_component, - [u'björn2'], - u'{project_name}/björn2/my_new_asset/v001/foo.png', + ["björn2"], + "{project_name}/björn2/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure( illegal_character_substitute=None ), - 'my_new_asset', id='slugify_none' + "my_new_asset", + id="slugify_none", ), pytest.param( file_component, - [u'bj!rn'], - '{project_name}/bj^rn/my_new_asset/v001/foo.png', + ["bj!rn"], + "{project_name}/bj^rn/my_new_asset/v001/foo.png", ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute='^' + illegal_character_substitute="^" ), - 'my_new_asset', id='slugify_other_character' - ) - ]) + "my_new_asset", + id="slugify_other_character", + ), + ], +) def test_get_resource_identifier( component, hierarchy, expected, structure, asset_name, new_project ): - '''Get resource identifier.''' + """Get resource identifier.""" session = component.session # Create structure, asset and version. - context_id = new_project['id'] + context_id = new_project["id"] for name in hierarchy: - context_id = session.create('Folder', { - 'name': name, - 'project_id': new_project['id'], - 'parent_id': context_id - })['id'] - - asset = session.create( - 'Asset', {'name': asset_name, 'context_id': context_id} - ) - version = session.create('AssetVersion', {'asset': asset}) + context_id = session.create( + "Folder", + {"name": name, "project_id": new_project["id"], "parent_id": context_id}, + )["id"] + + asset = session.create("Asset", {"name": asset_name, "context_id": context_id}) + version = session.create("AssetVersion", {"asset": asset}) # Update component with version. - if component['container']: - component['container']['version'] = version + if component["container"]: + component["container"]["version"] = version else: - component['version'] = version + component["version"] = version session.commit() assert structure.get_resource_identifier(component) == expected.format( - project_name=new_project['name'] + project_name=new_project["name"] ) def test_unsupported_entity(user): - '''Fail to get resource identifier for unsupported entity.''' + """Fail to get resource identifier for unsupported entity.""" structure = ftrack_api.structure.standard.StandardStructure() with pytest.raises(NotImplementedError): structure.get_resource_identifier(user) def test_component_without_version_relation(new_project): - '''Get an identifer for component without a version relation.''' + """Get an identifer for component without a version relation.""" session = new_project.session - asset = session.create( - 'Asset', {'name': 'foo', 'context_id': new_project['id']} - ) - version = session.create('AssetVersion', {'asset': asset}) + asset = session.create("Asset", {"name": "foo", "context_id": new_project["id"]}) + version = session.create("AssetVersion", {"asset": asset}) session.commit() file_component = new_file_component() - file_component['version_id'] = version['id'] + file_component["version_id"] = version["id"] structure = ftrack_api.structure.standard.StandardStructure() structure.get_resource_identifier(file_component) def test_component_without_committed_version_relation(): - '''Fail to get an identifer for component without a committed version.''' + """Fail to get an identifer for component without a committed version.""" file_component = new_file_component() session = file_component.session - version = session.create('AssetVersion', {}) + version = session.create("AssetVersion", {}) - file_component['version'] = version + file_component["version"] = version structure = ftrack_api.structure.standard.StandardStructure() @@ -273,16 +284,15 @@ def test_component_without_committed_version_relation(): @pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' + raises=ftrack_api.exception.ServerError, reason="Due to user permission errors." ) def test_component_without_committed_asset_relation(): - '''Fail to get an identifer for component without a committed asset.''' + """Fail to get an identifer for component without a committed asset.""" file_component = new_file_component() session = file_component.session - version = session.create('AssetVersion', {}) + version = session.create("AssetVersion", {}) - file_component['version'] = version + file_component["version"] = version session.commit() diff --git a/test/unit/test_attribute.py b/test/unit/test_attribute.py index acfce42e..481b4abb 100644 --- a/test/unit/test_attribute.py +++ b/test/unit/test_attribute.py @@ -7,30 +7,35 @@ import ftrack_api.exception -@pytest.mark.parametrize('attributes', [ - pytest.param([], id='no initial attributes'), - pytest.param([ftrack_api.attribute.Attribute('test')], id='with initial attributes') -]) +@pytest.mark.parametrize( + "attributes", + [ + pytest.param([], id="no initial attributes"), + pytest.param( + [ftrack_api.attribute.Attribute("test")], id="with initial attributes" + ), + ], +) def test_initialise_attributes_collection(attributes): - '''Initialise attributes collection.''' + """Initialise attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes(attributes) assert sorted(list(attribute_collection)) == sorted(attributes) def test_add_attribute_to_attributes_collection(): - '''Add valid attribute to attributes collection.''' + """Add valid attribute to attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') + attribute = ftrack_api.attribute.Attribute("test") assert attribute_collection.keys() == [] attribute_collection.add(attribute) - assert attribute_collection.keys() == ['test'] + assert attribute_collection.keys() == ["test"] def test_add_duplicate_attribute_to_attributes_collection(): - '''Fail to add attribute with duplicate name to attributes collection.''' + """Fail to add attribute with duplicate name to attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') + attribute = ftrack_api.attribute.Attribute("test") attribute_collection.add(attribute) with pytest.raises(ftrack_api.exception.NotUniqueError): @@ -38,9 +43,9 @@ def test_add_duplicate_attribute_to_attributes_collection(): def test_remove_attribute_from_attributes_collection(): - '''Remove attribute from attributes collection.''' + """Remove attribute from attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') + attribute = ftrack_api.attribute.Attribute("test") attribute_collection.add(attribute) assert len(attribute_collection) == 1 @@ -50,77 +55,92 @@ def test_remove_attribute_from_attributes_collection(): def test_remove_missing_attribute_from_attributes_collection(): - '''Fail to remove attribute not present in attributes collection.''' + """Fail to remove attribute not present in attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') + attribute = ftrack_api.attribute.Attribute("test") with pytest.raises(KeyError): attribute_collection.remove(attribute) def test_get_attribute_from_attributes_collection(): - '''Get attribute from attributes collection.''' + """Get attribute from attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') + attribute = ftrack_api.attribute.Attribute("test") attribute_collection.add(attribute) - retrieved_attribute = attribute_collection.get('test') + retrieved_attribute = attribute_collection.get("test") assert retrieved_attribute is attribute def test_get_missing_attribute_from_attributes_collection(): - '''Get attribute not present in attributes collection.''' + """Get attribute not present in attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes() - assert attribute_collection.get('test') is None - - -@pytest.mark.parametrize('attributes, expected', [ - pytest.param([], [], id='no initial attributes'), - pytest.param([ftrack_api.attribute.Attribute('test')], ['test'], id='with initial attributes') -]) + assert attribute_collection.get("test") is None + + +@pytest.mark.parametrize( + "attributes, expected", + [ + pytest.param([], [], id="no initial attributes"), + pytest.param( + [ftrack_api.attribute.Attribute("test")], + ["test"], + id="with initial attributes", + ), + ], +) def test_attribute_collection_keys(attributes, expected): - '''Retrieve keys for attribute collection.''' + """Retrieve keys for attribute collection.""" attribute_collection = ftrack_api.attribute.Attributes(attributes) assert sorted(attribute_collection.keys()) == sorted(expected) -@pytest.mark.parametrize('attribute, expected', [ - pytest.param(None, False, id='none attribute'), - pytest.param(ftrack_api.attribute.Attribute('b'), True, id='present attribute'), - pytest.param(ftrack_api.attribute.Attribute('c'), False, id='missing attribute') -]) +@pytest.mark.parametrize( + "attribute, expected", + [ + pytest.param(None, False, id="none attribute"), + pytest.param(ftrack_api.attribute.Attribute("b"), True, id="present attribute"), + pytest.param( + ftrack_api.attribute.Attribute("c"), False, id="missing attribute" + ), + ], +) def test_attributes_collection_contains(attribute, expected): - '''Check presence in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes([ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ]) + """Check presence in attributes collection.""" + attribute_collection = ftrack_api.attribute.Attributes( + [ftrack_api.attribute.Attribute("a"), ftrack_api.attribute.Attribute("b")] + ) assert (attribute in attribute_collection) is expected -@pytest.mark.parametrize('attributes, expected', [ - pytest.param([], 0, id='no attributes'), - pytest.param([ftrack_api.attribute.Attribute('test')], 1, id='single attribute'), - pytest.param( - [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ], - 2, id='multiple attributes'), -]) +@pytest.mark.parametrize( + "attributes, expected", + [ + pytest.param([], 0, id="no attributes"), + pytest.param( + [ftrack_api.attribute.Attribute("test")], 1, id="single attribute" + ), + pytest.param( + [ftrack_api.attribute.Attribute("a"), ftrack_api.attribute.Attribute("b")], + 2, + id="multiple attributes", + ), + ], +) def test_attributes_collection_count(attributes, expected): - '''Count attributes in attributes collection.''' + """Count attributes in attributes collection.""" attribute_collection = ftrack_api.attribute.Attributes(attributes) assert len(attribute_collection) == expected def test_iterate_over_attributes_collection(): - '''Iterate over attributes collection.''' + """Iterate over attributes collection.""" attributes = [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') + ftrack_api.attribute.Attribute("a"), + ftrack_api.attribute.Attribute("b"), ] attribute_collection = ftrack_api.attribute.Attributes(attributes) @@ -128,4 +148,3 @@ def test_iterate_over_attributes_collection(): attributes.remove(attribute) assert len(attributes) == 0 - diff --git a/test/unit/test_cache.py b/test/unit/test_cache.py index b221d06a..4dd6fbe7 100644 --- a/test/unit/test_cache.py +++ b/test/unit/test_cache.py @@ -10,74 +10,67 @@ import ftrack_api.cache -@pytest.fixture(params=['proxy', 'layered', 'memory', 'file', 'serialised']) +@pytest.fixture(params=["proxy", "layered", "memory", "file", "serialised"]) def cache(request): - '''Return cache.''' - if request.param == 'proxy': - cache = ftrack_api.cache.ProxyCache( - ftrack_api.cache.MemoryCache() - ) + """Return cache.""" + if request.param == "proxy": + cache = ftrack_api.cache.ProxyCache(ftrack_api.cache.MemoryCache()) - elif request.param == 'layered': - cache = ftrack_api.cache.LayeredCache( - [ftrack_api.cache.MemoryCache()] - ) + elif request.param == "layered": + cache = ftrack_api.cache.LayeredCache([ftrack_api.cache.MemoryCache()]) - elif request.param == 'memory': + elif request.param == "memory": cache = ftrack_api.cache.MemoryCache() - elif request.param == 'file': + elif request.param == "file": cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + tempfile.gettempdir(), "{0}.dbm".format(uuid.uuid4().hex) ) cache = ftrack_api.cache.FileCache(cache_path) def cleanup(): - '''Cleanup.''' + """Cleanup.""" try: os.remove(cache_path) except OSError: # BSD DB (Mac OSX) implementation of the interface will append # a .db extension. - os.remove(cache_path + '.db') + os.remove(cache_path + ".db") request.addfinalizer(cleanup) - elif request.param == 'serialised': + elif request.param == "serialised": cache = ftrack_api.cache.SerialisedCache( ftrack_api.cache.MemoryCache(), encode=lambda value: value, - decode=lambda value: value + decode=lambda value: value, ) else: - raise ValueError( - 'Unrecognised cache fixture type {0!r}'.format(request.param) - ) + raise ValueError("Unrecognised cache fixture type {0!r}".format(request.param)) return cache - class Class(object): - '''Class for testing.''' + """Class for testing.""" def method(self, key): - '''Method for testing.''' + """Method for testing.""" def function(mutable, x, y=2): - '''Function for testing.''' - mutable['called'] = True - return {'result': x + y} + """Function for testing.""" + mutable["called"] = True + return {"result": x + y} def assert_memoised_call( memoiser, function, expected, args=None, kw=None, memoised=True ): - '''Assert *function* call via *memoiser* was *memoised*.''' - mapping = {'called': False} + """Assert *function* call via *memoiser* was *memoised*.""" + mapping = {"called": False} if args is not None: args = (mapping,) + args else: @@ -86,59 +79,59 @@ def assert_memoised_call( result = memoiser.call(function, args, kw) assert result == expected - assert mapping['called'] is not memoised + assert mapping["called"] is not memoised def test_get(cache): - '''Retrieve item from cache.''' - cache.set('key', 'value') - assert cache.get('key') == 'value' + """Retrieve item from cache.""" + cache.set("key", "value") + assert cache.get("key") == "value" def test_get_missing_key(cache): - '''Fail to retrieve missing item from cache.''' + """Fail to retrieve missing item from cache.""" with pytest.raises(KeyError): - cache.get('key') + cache.get("key") def test_set(cache): - '''Set item in cache.''' + """Set item in cache.""" with pytest.raises(KeyError): - cache.get('key') + cache.get("key") - cache.set('key', 'value') - assert cache.get('key') == 'value' + cache.set("key", "value") + assert cache.get("key") == "value" def test_remove(cache): - '''Remove item from cache.''' - cache.set('key', 'value') - cache.remove('key') + """Remove item from cache.""" + cache.set("key", "value") + cache.remove("key") with pytest.raises(KeyError): - cache.get('key') + cache.get("key") def test_remove_missing_key(cache): - '''Fail to remove missing key.''' + """Fail to remove missing key.""" with pytest.raises(KeyError): - cache.remove('key') + cache.remove("key") def test_keys(cache): - '''Retrieve keys of items in cache.''' + """Retrieve keys of items in cache.""" assert cache.keys() == [] - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - assert sorted(cache.keys()) == sorted(['a', 'b', 'c']) + cache.set("a", "a_value") + cache.set("b", "b_value") + cache.set("c", "c_value") + assert sorted(cache.keys()) == sorted(["a", "b", "c"]) def test_clear(cache): - '''Remove items from cache.''' - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') + """Remove items from cache.""" + cache.set("a", "a_value") + cache.set("b", "b_value") + cache.set("c", "c_value") assert cache.keys() cache.clear() @@ -147,244 +140,219 @@ def test_clear(cache): def test_clear_using_pattern(cache): - '''Remove items that match pattern from cache.''' - cache.set('matching_key', 'value') - cache.set('another_matching_key', 'value') - cache.set('key_not_matching', 'value') + """Remove items that match pattern from cache.""" + cache.set("matching_key", "value") + cache.set("another_matching_key", "value") + cache.set("key_not_matching", "value") assert cache.keys() - cache.clear(pattern='.*matching_key$') + cache.clear(pattern=".*matching_key$") - assert cache.keys() == ['key_not_matching'] + assert cache.keys() == ["key_not_matching"] def test_clear_encountering_missing_key(cache, mocker): - '''Clear missing key.''' + """Clear missing key.""" # Force reporting keys that are not actually valid for test purposes. - mocker.patch.object(cache, 'keys', lambda: ['missing']) - assert cache.keys() == ['missing'] + mocker.patch.object(cache, "keys", lambda: ["missing"]) + assert cache.keys() == ["missing"] # Should not error even though key not valid. cache.clear() # The key was not successfully removed so should still be present. - assert cache.keys() == ['missing'] + assert cache.keys() == ["missing"] def test_layered_cache_propagates_value_on_get(): - '''Layered cache propagates value on get.''' + """Layered cache propagates value on get.""" caches = [ ftrack_api.cache.MemoryCache(), ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() + ftrack_api.cache.MemoryCache(), ] cache = ftrack_api.cache.LayeredCache(caches) # Set item on second level cache only. - caches[1].set('key', 'value') + caches[1].set("key", "value") # Retrieving key via layered cache should propagate it automatically to # higher level caches only. - assert cache.get('key') == 'value' - assert caches[0].get('key') == 'value' + assert cache.get("key") == "value" + assert caches[0].get("key") == "value" with pytest.raises(KeyError): - caches[2].get('key') + caches[2].get("key") def test_layered_cache_remove_at_depth(): - '''Remove key that only exists at depth in LayeredCache.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] + """Remove key that only exists at depth in LayeredCache.""" + caches = [ftrack_api.cache.MemoryCache(), ftrack_api.cache.MemoryCache()] cache = ftrack_api.cache.LayeredCache(caches) # Set item on second level cache only. - caches[1].set('key', 'value') + caches[1].set("key", "value") # Removing key that only exists at depth should not raise key error. - cache.remove('key') + cache.remove("key") # Ensure key was removed. assert not cache.keys() def test_expand_references(): - '''Test that references are expanded from serialized cache.''' + """Test that references are expanded from serialized cache.""" - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) + cache_path = os.path.join(tempfile.gettempdir(), "{0}.dbm".format(uuid.uuid4().hex)) def make_cache(session, cache_path): - '''Create a serialised file cache.''' + """Create a serialised file cache.""" serialized_file_cache = ftrack_api.cache.SerialisedCache( ftrack_api.cache.FileCache(cache_path), encode=session.encode, - decode=session.decode + decode=session.decode, ) return serialized_file_cache # Populate the serialized file cache. session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) + cache=lambda session, cache_path=cache_path: make_cache(session, cache_path) ) expanded_results = dict() - query_string = 'select asset.parent from AssetVersion where asset is_not None limit 10' + query_string = ( + "select asset.parent from AssetVersion where asset is_not None limit 10" + ) for sequence in session.query(query_string): - asset = sequence.get('asset') + asset = sequence.get("asset") - expanded_results.setdefault( - asset.get('id'), asset.get('parent') - ) + expanded_results.setdefault(asset.get("id"), asset.get("parent")) # Fetch the data from cache. new_session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) + cache=lambda session, cache_path=cache_path: make_cache(session, cache_path) ) - new_session_two = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) + cache=lambda session, cache_path=cache_path: make_cache(session, cache_path) ) # Make sure references are merged. for sequence in new_session.query(query_string): - asset = sequence.get('asset') + asset = sequence.get("asset") - assert ( - asset.get('parent') == expanded_results[asset.get('id')] - ) + assert asset.get("parent") == expanded_results[asset.get("id")] # Use for fetching directly using get. assert ( - new_session_two.get(asset.entity_type, asset.get('id')).get('parent') == - expanded_results[asset.get('id')] + new_session_two.get(asset.entity_type, asset.get("id")).get("parent") + == expanded_results[asset.get("id")] ) -@pytest.mark.parametrize('items, key', [ - pytest.param(({},), '{}', id='single object'), - pytest.param(({}, {}), '{}{}', id='multiple objects') -]) +@pytest.mark.parametrize( + "items, key", + [ + pytest.param(({},), "{}", id="single object"), + pytest.param(({}, {}), "{}{}", id="multiple objects"), + ], +) def test_string_key_maker_key(items, key): - '''Generate key using string key maker.''' + """Generate key using string key maker.""" key_maker = ftrack_api.cache.StringKeyMaker() assert key_maker.key(*items) == key @pytest.mark.skipif(sys.version_info > (3, 0), reason="requires Python2") -@pytest.mark.parametrize('items, key', [ - pytest.param( - ({},), - b'\x01\x01', - id='single mapping'), - pytest.param( - ({'a': 'b'}, [1, 2]), - '\x01' - '\x80\x02U\x01a.' '\x02' '\x80\x02U\x01b.' - '\x01' - '\x00' - '\x03' - '\x80\x02K\x01.' '\x00' '\x80\x02K\x02.' - '\x03', - id='multiple objects'), - pytest.param( - (function,), - b'\x04function\x00unit.test_cache', - id='function'), - pytest.param( - (Class,), - b'\x04Class\x00unit.test_cache', - id='class'), - pytest.param( - (Class().method,), - b'\x04method\x00Class\x00unit.test_cache', - id='method'), - pytest.param( - (callable,), - b'\x04callable', - id='builtin') -]) +@pytest.mark.parametrize( + "items, key", + [ + pytest.param(({},), b"\x01\x01", id="single mapping"), + pytest.param( + ({"a": "b"}, [1, 2]), + "\x01" + "\x80\x02U\x01a." + "\x02" + "\x80\x02U\x01b." + "\x01" + "\x00" + "\x03" + "\x80\x02K\x01." + "\x00" + "\x80\x02K\x02." + "\x03", + id="multiple objects", + ), + pytest.param((function,), b"\x04function\x00unit.test_cache", id="function"), + pytest.param((Class,), b"\x04Class\x00unit.test_cache", id="class"), + pytest.param( + (Class().method,), b"\x04method\x00Class\x00unit.test_cache", id="method" + ), + pytest.param((callable,), b"\x04callable", id="builtin"), + ], +) def test_object_key_maker_key_py2k(items, key): - '''Generate key using string key maker.''' + """Generate key using string key maker.""" key_maker = ftrack_api.cache.ObjectKeyMaker() assert key_maker.key(*items) == key @pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3") -@pytest.mark.parametrize('items, key', [ - pytest.param( - ({},), - b'\x01\x01', - id='single mapping'), - pytest.param( - ({'a': 'b'}, [1, 2]), - b'\x01\x80\x02X\x01\x00\x00\x00aq\x00.\x02\x80\x02X\x01\x00\x00\x00bq\x00.' - b'\x01\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03', - id='multiple objects'), - pytest.param( - (function,), - b'\x04function\x00unit.test_cache', - id='function'), - pytest.param( - (Class,), - b'\x04Class\x00unit.test_cache', - id='class'), - pytest.param( - (Class().method,), - b'\x04method\x00Class\x00unit.test_cache', - id='method'), - pytest.param( - (callable,), - b'\x04callable', - id='builtin') -]) +@pytest.mark.parametrize( + "items, key", + [ + pytest.param(({},), b"\x01\x01", id="single mapping"), + pytest.param( + ({"a": "b"}, [1, 2]), + b"\x01\x80\x02X\x01\x00\x00\x00aq\x00.\x02\x80\x02X\x01\x00\x00\x00bq\x00." + b"\x01\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03", + id="multiple objects", + ), + pytest.param((function,), b"\x04function\x00unit.test_cache", id="function"), + pytest.param((Class,), b"\x04Class\x00unit.test_cache", id="class"), + pytest.param( + (Class().method,), b"\x04method\x00Class\x00unit.test_cache", id="method" + ), + pytest.param((callable,), b"\x04callable", id="builtin"), + ], +) def test_object_key_maker_key_py3k(items, key): - '''Generate key using string key maker.''' + """Generate key using string key maker.""" key_maker = ftrack_api.cache.ObjectKeyMaker() assert key_maker.key(*items) == key def test_memoised_call(): - '''Call memoised function.''' + """Call memoised function.""" memoiser = ftrack_api.cache.Memoiser() # Initial call should not be memoised so function is executed. assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=False + memoiser, function, args=(1,), expected={"result": 3}, memoised=False ) # Identical call should be memoised so function is not executed again. assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=True + memoiser, function, args=(1,), expected={"result": 3}, memoised=True ) # Differing call is not memoised so function is executed. assert_memoised_call( - memoiser, function, args=(3,), expected={'result': 5}, memoised=False + memoiser, function, args=(3,), expected={"result": 5}, memoised=False ) def test_memoised_call_variations(): - '''Call memoised function with identical arguments using variable format.''' + """Call memoised function with identical arguments using variable format.""" memoiser = ftrack_api.cache.Memoiser() - expected = {'result': 3} + expected = {"result": 3} # Call function once to ensure is memoised. assert_memoised_call( @@ -394,48 +362,49 @@ def test_memoised_call_variations(): # Each of the following calls should equate to the same key and make # use of the memoised value. for args, kw in [ - ((), {'x': 1}), - ((), {'x': 1, 'y': 2}), - ((1,), {'y': 2}), - ((1,), {}) + ((), {"x": 1}), + ((), {"x": 1, "y": 2}), + ((1,), {"y": 2}), + ((1,), {}), ]: - assert_memoised_call( - memoiser, function, args=args, kw=kw, expected=expected - ) + assert_memoised_call(memoiser, function, args=args, kw=kw, expected=expected) # The following calls should all be treated as new variations and so # not use any memoised value. assert_memoised_call( - memoiser, function, kw={'x': 2}, expected={'result': 4}, memoised=False + memoiser, function, kw={"x": 2}, expected={"result": 4}, memoised=False ) assert_memoised_call( - memoiser, function, kw={'x': 3, 'y': 2}, expected={'result': 5}, - memoised=False + memoiser, function, kw={"x": 3, "y": 2}, expected={"result": 5}, memoised=False ) assert_memoised_call( - memoiser, function, args=(4, ), kw={'y': 2}, expected={'result': 6}, - memoised=False + memoiser, + function, + args=(4,), + kw={"y": 2}, + expected={"result": 6}, + memoised=False, ) assert_memoised_call( - memoiser, function, args=(5, ), expected={'result': 7}, memoised=False + memoiser, function, args=(5,), expected={"result": 7}, memoised=False ) def test_memoised_mutable_return_value(): - '''Avoid side effects for returned mutable arguments when memoising.''' + """Avoid side effects for returned mutable arguments when memoising.""" memoiser = ftrack_api.cache.Memoiser() - arguments = ({'called': False}, 1) + arguments = ({"called": False}, 1) result_a = memoiser.call(function, arguments) - assert result_a == {'result': 3} - assert arguments[0]['called'] + assert result_a == {"result": 3} + assert arguments[0]["called"] # Modify mutable externally and check that stored memoised value is # unchanged. - del result_a['result'] + del result_a["result"] - arguments[0]['called'] = False + arguments[0]["called"] = False result_b = memoiser.call(function, arguments) - assert result_b == {'result': 3} - assert not arguments[0]['called'] + assert result_b == {"result": 3} + assert not arguments[0]["called"] diff --git a/test/unit/test_collection.py b/test/unit/test_collection.py index 54ef0a55..f45255a8 100644 --- a/test/unit/test_collection.py +++ b/test/unit/test_collection.py @@ -15,44 +15,39 @@ def create_mock_entity(session): - '''Return new mock entity for *session*.''' + """Return new mock entity for *session*.""" entity = mock.MagicMock() entity.session = session - entity.primary_key_attributes = ['id'] - entity['id'] = str(uuid.uuid4()) + entity.primary_key_attributes = ["id"] + entity["id"] = str(uuid.uuid4()) return entity @pytest.fixture def mock_entity(session): - '''Return mock entity.''' + """Return mock entity.""" return create_mock_entity(session) @pytest.fixture def mock_entities(session): - '''Return list of two mock entities.''' - return [ - create_mock_entity(session), - create_mock_entity(session) - ] + """Return list of two mock entities.""" + return [create_mock_entity(session), create_mock_entity(session)] @pytest.fixture def mock_attribute(): - '''Return mock attribute.''' + """Return mock attribute.""" attribute = mock.MagicMock() - attribute.name = 'test' + attribute.name = "test" return attribute def test_collection_initialisation_does_not_modify_entity_state( mock_entity, mock_attribute, mock_entities ): - '''Initialising collection does not modify entity state.''' - ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) + """Initialising collection does not modify entity state.""" + ftrack_api.collection.Collection(mock_entity, mock_attribute, data=mock_entities) assert ftrack_api.inspection.state(mock_entity) is ftrack_api.symbol.NOT_SET @@ -60,7 +55,7 @@ def test_collection_initialisation_does_not_modify_entity_state( def test_immutable_collection_initialisation( mock_entity, mock_attribute, mock_entities ): - '''Initialise immutable collection.''' + """Initialise immutable collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities, mutable=False ) @@ -69,10 +64,8 @@ def test_immutable_collection_initialisation( assert collection.mutable is False -def test_collection_shallow_copy( - mock_entity, mock_attribute, mock_entities, session -): - '''Shallow copying collection should avoid indirect mutation.''' +def test_collection_shallow_copy(mock_entity, mock_attribute, mock_entities, session): + """Shallow copying collection should avoid indirect mutation.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -86,10 +79,8 @@ def test_collection_shallow_copy( assert list(collection_copy) == mock_entities + [new_entity] -def test_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Insert a value into collection.''' +def test_collection_insert(mock_entity, mock_attribute, mock_entities, session): + """Insert a value into collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -99,10 +90,8 @@ def test_collection_insert( assert list(collection) == [new_entity] + mock_entities -def test_collection_insert_duplicate( - mock_entity, mock_attribute, mock_entities -): - '''Fail to insert a duplicate value into collection.''' +def test_collection_insert_duplicate(mock_entity, mock_attribute, mock_entities): + """Fail to insert a duplicate value into collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -114,7 +103,7 @@ def test_collection_insert_duplicate( def test_immutable_collection_insert( mock_entity, mock_attribute, mock_entities, session ): - '''Fail to insert a value into immutable collection.''' + """Fail to insert a value into immutable collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities, mutable=False ) @@ -123,10 +112,8 @@ def test_immutable_collection_insert( collection.insert(0, create_mock_entity(session)) -def test_collection_set_item( - mock_entity, mock_attribute, mock_entities, session -): - '''Set item at index in collection.''' +def test_collection_set_item(mock_entity, mock_attribute, mock_entities, session): + """Set item at index in collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -136,10 +123,8 @@ def test_collection_set_item( assert list(collection) == [new_entity, mock_entities[1]] -def test_collection_re_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Re-set value at exact same index in collection.''' +def test_collection_re_set_item(mock_entity, mock_attribute, mock_entities): + """Re-set value at exact same index in collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -148,10 +133,8 @@ def test_collection_re_set_item( assert list(collection) == mock_entities -def test_collection_set_duplicate_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set a duplicate value into collection at different index.''' +def test_collection_set_duplicate_item(mock_entity, mock_attribute, mock_entities): + """Fail to set a duplicate value into collection at different index.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -160,10 +143,8 @@ def test_collection_set_duplicate_item( collection[0] = mock_entities[1] -def test_immutable_collection_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set item at index in immutable collection.''' +def test_immutable_collection_set_item(mock_entity, mock_attribute, mock_entities): + """Fail to set item at index in immutable collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities, mutable=False ) @@ -172,10 +153,8 @@ def test_immutable_collection_set_item( collection[0] = mock_entities[0] -def test_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Remove item at index from collection.''' +def test_collection_delete_item(mock_entity, mock_attribute, mock_entities): + """Remove item at index from collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -186,7 +165,7 @@ def test_collection_delete_item( def test_collection_delete_item_at_invalid_index( mock_entity, mock_attribute, mock_entities ): - '''Fail to remove item at missing index from immutable collection.''' + """Fail to remove item at missing index from immutable collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -195,10 +174,8 @@ def test_collection_delete_item_at_invalid_index( del collection[4] -def test_immutable_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at index from immutable collection.''' +def test_immutable_collection_delete_item(mock_entity, mock_attribute, mock_entities): + """Fail to remove item at index from immutable collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities, mutable=False ) @@ -207,10 +184,8 @@ def test_immutable_collection_delete_item( del collection[0] -def test_collection_count( - mock_entity, mock_attribute, mock_entities, session -): - '''Count items in collection.''' +def test_collection_count(mock_entity, mock_attribute, mock_entities, session): + """Count items in collection.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -223,19 +198,20 @@ def test_collection_count( assert len(collection) == 2 -@pytest.mark.parametrize('other, expected', [ - pytest.param([], False, id='empty'), - pytest.param([1, 2], True, id='same'), - pytest.param([1, 2, 3], False, id='additional'), - pytest.param([1], False, id='missing') -]) +@pytest.mark.parametrize( + "other, expected", + [ + pytest.param([], False, id="empty"), + pytest.param([1, 2], True, id="same"), + pytest.param([1, 2, 3], False, id="additional"), + pytest.param([1], False, id="missing"), + ], +) def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): - '''Determine collection equality against another collection.''' + """Determine collection equality against another collection.""" # Temporarily override determination of entity identity so that it works # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) + mocker.patch.object(ftrack_api.inspection, "identity", lambda entity: str(entity)) collection_a = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=[1, 2] @@ -247,15 +223,11 @@ def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): assert (collection_a == collection_b) is expected -def test_collection_not_equal_to_non_collection( - mocker, mock_entity, mock_attribute -): - '''Collection not equal to a non-collection.''' +def test_collection_not_equal_to_non_collection(mocker, mock_entity, mock_attribute): + """Collection not equal to a non-collection.""" # Temporarily override determination of entity identity so that it works # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) + mocker.patch.object(ftrack_api.inspection, "identity", lambda entity: str(entity)) collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=[1, 2] @@ -267,7 +239,7 @@ def test_collection_not_equal_to_non_collection( def test_collection_notify_on_modification( mock_entity, mock_attribute, mock_entities, session ): - '''Record UpdateEntityOperation on collection modification.''' + """Record UpdateEntityOperation on collection modification.""" collection = ftrack_api.collection.Collection( mock_entity, mock_attribute, data=mock_entities ) @@ -281,8 +253,8 @@ def test_collection_notify_on_modification( def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): - '''Shallow copying mapped collection proxy avoids indirect mutation.''' - metadata = new_project['metadata'] + """Shallow copying mapped collection proxy avoids indirect mutation.""" + metadata = new_project["metadata"] with new_project.session.operation_recording(False): metadata_copy = copy.copy(metadata) @@ -293,8 +265,8 @@ def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): def test_mapped_collection_proxy_mutable_property(new_project): - '''Mapped collection mutable property maps to underlying collection.''' - metadata = new_project['metadata'] + """Mapped collection mutable property maps to underlying collection.""" + metadata = new_project["metadata"] assert metadata.mutable is True assert metadata.collection.mutable is True @@ -303,11 +275,9 @@ def test_mapped_collection_proxy_mutable_property(new_project): assert metadata.collection.mutable is False -def test_mapped_collection_proxy_attribute_property( - new_project, mock_attribute -): - '''Mapped collection attribute property maps to underlying collection.''' - metadata = new_project['metadata'] +def test_mapped_collection_proxy_attribute_property(new_project, mock_attribute): + """Mapped collection attribute property maps to underlying collection.""" + metadata = new_project["metadata"] assert metadata.attribute is metadata.collection.attribute @@ -316,12 +286,12 @@ def test_mapped_collection_proxy_attribute_property( def test_mapped_collection_proxy_get_item(new_project, unique_name): - '''Retrieve item in mapped collection proxy.''' + """Retrieve item in mapped collection proxy.""" session = new_project.session # Prepare data. - metadata = new_project['metadata'] - value = 'value' + metadata = new_project["metadata"] + value = "value" metadata[unique_name] = value session.commit() @@ -330,18 +300,18 @@ def test_mapped_collection_proxy_get_item(new_project, unique_name): retrieved = session.get(*ftrack_api.inspection.identity(new_project)) assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value + assert retrieved["metadata"].keys() == [unique_name] + assert retrieved["metadata"][unique_name] == value def test_mapped_collection_proxy_set_item(new_project, unique_name): - '''Set new item in mapped collection proxy.''' + """Set new item in mapped collection proxy.""" session = new_project.session - metadata = new_project['metadata'] + metadata = new_project["metadata"] assert unique_name not in metadata - value = 'value' + value = "value" metadata[unique_name] = value assert metadata[unique_name] == value @@ -351,22 +321,22 @@ def test_mapped_collection_proxy_set_item(new_project, unique_name): retrieved = session.get(*ftrack_api.inspection.identity(new_project)) assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value + assert retrieved["metadata"].keys() == [unique_name] + assert retrieved["metadata"][unique_name] == value def test_mapped_collection_proxy_update_item(new_project, unique_name): - '''Update existing item in mapped collection proxy.''' + """Update existing item in mapped collection proxy.""" session = new_project.session # Prepare a pre-existing value. - metadata = new_project['metadata'] - value = 'value' + metadata = new_project["metadata"] + value = "value" metadata[unique_name] = value session.commit() # Set new value. - new_value = 'new_value' + new_value = "new_value" metadata[unique_name] = new_value # Confirm change persisted correctly. @@ -375,23 +345,23 @@ def test_mapped_collection_proxy_update_item(new_project, unique_name): retrieved = session.get(*ftrack_api.inspection.identity(new_project)) assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == new_value + assert retrieved["metadata"].keys() == [unique_name] + assert retrieved["metadata"][unique_name] == new_value def test_mapped_collection_proxy_delete_item(new_project, unique_name): - '''Remove existing item from mapped collection proxy.''' + """Remove existing item from mapped collection proxy.""" session = new_project.session # Prepare a pre-existing value to remove. - metadata = new_project['metadata'] - value = 'value' + metadata = new_project["metadata"] + value = "value" metadata[unique_name] = value session.commit() # Now remove value. - del new_project['metadata'][unique_name] - assert unique_name not in new_project['metadata'] + del new_project["metadata"][unique_name] + assert unique_name not in new_project["metadata"] # Confirm change persisted correctly. session.commit() @@ -399,26 +369,22 @@ def test_mapped_collection_proxy_delete_item(new_project, unique_name): retrieved = session.get(*ftrack_api.inspection.identity(new_project)) assert retrieved is not new_project - assert retrieved['metadata'].keys() == [] - assert unique_name not in retrieved['metadata'] + assert retrieved["metadata"].keys() == [] + assert unique_name not in retrieved["metadata"] def test_mapped_collection_proxy_delete_missing_item(new_project, unique_name): - '''Fail to remove item for missing key from mapped collection proxy.''' - metadata = new_project['metadata'] + """Fail to remove item for missing key from mapped collection proxy.""" + metadata = new_project["metadata"] assert unique_name not in metadata with pytest.raises(KeyError): del metadata[unique_name] def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): - '''Iterate over keys in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) + """Iterate over keys in mapped collection proxy.""" + metadata = new_project["metadata"] + metadata.update({"a": "value-a", "b": "value-b", "c": "value-c"}) # Commit here as otherwise cleanup operation will fail because transaction # will include updating metadata to refer to a deleted entity. @@ -428,17 +394,13 @@ def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): for key in metadata: iterated.add(key) - assert iterated == set(['a', 'b', 'c']) + assert iterated == set(["a", "b", "c"]) def test_mapped_collection_proxy_count(new_project, unique_name): - '''Count items in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) + """Count items in mapped collection proxy.""" + metadata = new_project["metadata"] + metadata.update({"a": "value-a", "b": "value-b", "c": "value-c"}) # Commit here as otherwise cleanup operation will fail because transaction # will include updating metadata to refer to a deleted entity. @@ -448,21 +410,17 @@ def test_mapped_collection_proxy_count(new_project, unique_name): def test_mapped_collection_on_create(session, unique_name, project): - '''Test that it is possible to set relational attributes on create''' - metadata = { - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - } + """Test that it is possible to set relational attributes on create""" + metadata = {"a": "value-a", "b": "value-b", "c": "value-c"} task_id = session.create( - 'Task', { - 'name': unique_name, - 'parent': project, - 'metadata': metadata, - - } - ).get('id') + "Task", + { + "name": unique_name, + "parent": project, + "metadata": metadata, + }, + ).get("id") session.commit() @@ -470,100 +428,83 @@ def test_mapped_collection_on_create(session, unique_name, project): # values. session.reset() - task = session.get( - 'Task', task_id - ) + task = session.get("Task", task_id) for key, value in metadata.items(): - assert value == task['metadata'][key] + assert value == task["metadata"][key] def test_collection_refresh(new_asset_version, new_component): - '''Test collection reload.''' + """Test collection reload.""" session_two = ftrack_api.Session(auto_connect_event_hub=False) query_string = 'select components from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') + new_asset_version.get("id") ) # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() + new_asset_version_two = session_two.query(query_string).one() # Modify our asset version - new_asset_version.get('components').append( - new_component - ) + new_asset_version.get("components").append(new_component) new_asset_version.session.commit() # Query the same asset version again and make sure we get the newly # populated data. - session_two.query( - query_string - ).all() + session_two.query(query_string).all() - assert ( - new_asset_version.get('components') == new_asset_version_two.get('components') + assert new_asset_version.get("components") == new_asset_version_two.get( + "components" ) # Make a local change to our asset version - new_asset_version_two.get('components').pop() + new_asset_version_two.get("components").pop() # Query the same asset version again and make sure our local changes # are not overwritten. - session_two.query( - query_string - ).all() + session_two.query(query_string).all() - assert len(new_asset_version_two.get('components')) == 0 + assert len(new_asset_version_two.get("components")) == 0 def test_mapped_collection_reload(new_asset_version): - '''Test mapped collection reload.''' + """Test mapped collection reload.""" session_two = ftrack_api.Session(auto_connect_event_hub=False) query_string = 'select metadata from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') + new_asset_version.get("id") ) # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() + new_asset_version_two = session_two.query(query_string).one() # Modify our asset version - new_asset_version['metadata']['test'] = str(uuid.uuid4()) + new_asset_version["metadata"]["test"] = str(uuid.uuid4()) new_asset_version.session.commit() # Query the same asset version again and make sure we get the newly # populated data. - session_two.query( - query_string - ).all() + session_two.query(query_string).all() assert ( - new_asset_version['metadata']['test'] == new_asset_version_two['metadata']['test'] + new_asset_version["metadata"]["test"] + == new_asset_version_two["metadata"]["test"] ) local_data = str(uuid.uuid4()) - new_asset_version_two['metadata']['test'] = local_data + new_asset_version_two["metadata"]["test"] = local_data # Modify our asset version again - new_asset_version['metadata']['test'] = str(uuid.uuid4()) + new_asset_version["metadata"]["test"] = str(uuid.uuid4()) new_asset_version.session.commit() # Query the same asset version again and make sure our local changes # are not overwritten. - session_two.query( - query_string - ).all() + session_two.query(query_string).all() - assert ( - new_asset_version_two['metadata']['test'] == local_data - ) + assert new_asset_version_two["metadata"]["test"] == local_data diff --git a/test/unit/test_custom_attribute.py b/test/unit/test_custom_attribute.py index 4b2e8a71..50909838 100644 --- a/test/unit/test_custom_attribute.py +++ b/test/unit/test_custom_attribute.py @@ -7,220 +7,205 @@ import ftrack_api + @pytest.fixture( params=[ - 'AssetVersion', 'Shot', 'AssetVersionList', 'TypedContextList', 'User', - 'Asset' + "AssetVersion", + "Shot", + "AssetVersionList", + "TypedContextList", + "User", + "Asset", ] ) def new_entity_and_custom_attribute(request, session): - '''Return tuple with new entity, custom attribute name and value.''' - if request.param == 'AssetVersion': + """Return tuple with new entity, custom attribute name and value.""" + if request.param == "AssetVersion": entity = session.create( - request.param, { - 'asset': session.query('Asset').first() - } + request.param, {"asset": session.query("Asset").first()} ) - return (entity, 'versiontest', 123) + return (entity, "versiontest", 123) - elif request.param == 'Shot': - sequence = session.query('Sequence').first() + elif request.param == "Shot": + sequence = session.query("Sequence").first() entity = session.create( - request.param, { - 'parent_id': sequence['id'], - 'project_id': sequence['project_id'], - 'name': str(uuid.uuid1()) - } + request.param, + { + "parent_id": sequence["id"], + "project_id": sequence["project_id"], + "name": str(uuid.uuid1()), + }, ) - return (entity, 'fstart', 1005) + return (entity, "fstart", 1005) - elif request.param == 'Asset': - shot = session.query('Shot').first() + elif request.param == "Asset": + shot = session.query("Shot").first() entity = session.create( - request.param, { - 'context_id': shot['project_id'], - 'name': str(uuid.uuid1()) - } + request.param, {"context_id": shot["project_id"], "name": str(uuid.uuid1())} ) - return (entity, 'htest', 1005) + return (entity, "htest", 1005) - elif request.param in ('AssetVersionList', 'TypedContextList'): + elif request.param in ("AssetVersionList", "TypedContextList"): entity = session.create( - request.param, { - 'project_id': session.query('Project').first()['id'], - 'category_id': session.query('ListCategory').first()['id'], - 'name': str(uuid.uuid1()) - } + request.param, + { + "project_id": session.query("Project").first()["id"], + "category_id": session.query("ListCategory").first()["id"], + "name": str(uuid.uuid1()), + }, ) - return (entity, 'listbool', True) + return (entity, "listbool", True) - elif request.param == 'User': + elif request.param == "User": entity = session.create( - request.param, { - 'first_name': 'Custom attribute test', - 'last_name': 'Custom attribute test', - 'username': str(uuid.uuid1()) - } + request.param, + { + "first_name": "Custom attribute test", + "last_name": "Custom attribute test", + "username": str(uuid.uuid1()), + }, ) - return (entity, 'teststring', 'foo') + return (entity, "teststring", "foo") @pytest.mark.parametrize( - 'entity_type, entity_model_name, custom_attribute_name', + "entity_type, entity_model_name, custom_attribute_name", [ - pytest.param('Task', 'task', 'customNumber', id='task'), - pytest.param('AssetVersion', 'assetversion', 'NumberField', id='asset_version') - ] + pytest.param("Task", "task", "customNumber", id="task"), + pytest.param("AssetVersion", "assetversion", "NumberField", id="asset_version"), + ], ) def test_read_set_custom_attribute( session, entity_type, entity_model_name, custom_attribute_name ): - '''Retrieve custom attribute value set on instance.''' + """Retrieve custom attribute value set on instance.""" custom_attribute_value = session.query( - 'CustomAttributeValue where configuration.key is ' - '{custom_attribute_name}' - .format( - custom_attribute_name=custom_attribute_name - ) + "CustomAttributeValue where configuration.key is " + "{custom_attribute_name}".format(custom_attribute_name=custom_attribute_name) ).first() entity = session.query( - 'select custom_attributes from {entity_type} where id is ' - '{entity_id}'.format( + "select custom_attributes from {entity_type} where id is " + "{entity_id}".format( entity_type=entity_type, - entity_id=custom_attribute_value['entity_id'], + entity_id=custom_attribute_value["entity_id"], ) ).first() assert custom_attribute_value - assert entity['id'] == entity['custom_attributes'].collection.entity['id'] - assert entity is entity['custom_attributes'].collection.entity + assert entity["id"] == entity["custom_attributes"].collection.entity["id"] + assert entity is entity["custom_attributes"].collection.entity assert ( - entity['custom_attributes'][custom_attribute_name] == - custom_attribute_value['value'] + entity["custom_attributes"][custom_attribute_name] + == custom_attribute_value["value"] ) - assert custom_attribute_name in entity['custom_attributes'].keys() + assert custom_attribute_name in entity["custom_attributes"].keys() @pytest.mark.parametrize( - 'entity_type, custom_attribute_name', + "entity_type, custom_attribute_name", [ - pytest.param('Task', 'customNumber', id='task'), - pytest.param('Shot', 'fstart', id='shot'), - pytest.param('AssetVersion', 'NumberField', id='asset_version') - ] + pytest.param("Task", "customNumber", id="task"), + pytest.param("Shot", "fstart", id="shot"), + pytest.param("AssetVersion", "NumberField", id="asset_version"), + ], ) -def test_write_set_custom_attribute_value( - session, entity_type, custom_attribute_name -): - '''Overwrite existing instance level custom attribute value.''' +def test_write_set_custom_attribute_value(session, entity_type, custom_attribute_name): + """Overwrite existing instance level custom attribute value.""" entity = session.query( - 'select custom_attributes from {entity_type} where ' - 'custom_attributes.configuration.key is {custom_attribute_name}'.format( - entity_type=entity_type, - custom_attribute_name=custom_attribute_name + "select custom_attributes from {entity_type} where " + "custom_attributes.configuration.key is {custom_attribute_name}".format( + entity_type=entity_type, custom_attribute_name=custom_attribute_name ) ).first() - entity['custom_attributes'][custom_attribute_name] = 42 + entity["custom_attributes"][custom_attribute_name] = 42 - assert entity['custom_attributes'][custom_attribute_name] == 42 + assert entity["custom_attributes"][custom_attribute_name] == 42 session.commit() @pytest.mark.parametrize( - 'entity_type, custom_attribute_name', + "entity_type, custom_attribute_name", [ - pytest.param('Task', 'fstart', id='task'), - pytest.param('Shot', 'Not existing', id='shot'), - pytest.param('AssetVersion', 'fstart', id='asset_version') - ] + pytest.param("Task", "fstart", id="task"), + pytest.param("Shot", "Not existing", id="shot"), + pytest.param("AssetVersion", "fstart", id="asset_version"), + ], ) def test_read_custom_attribute_that_does_not_exist( session, entity_type, custom_attribute_name ): - '''Fail to read value from a custom attribute that does not exist.''' + """Fail to read value from a custom attribute that does not exist.""" entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) + "select custom_attributes from {entity_type}".format(entity_type=entity_type) ).first() with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] + entity["custom_attributes"][custom_attribute_name] @pytest.mark.parametrize( - 'entity_type, custom_attribute_name', + "entity_type, custom_attribute_name", [ - pytest.param('Task', 'fstart', id='task'), - pytest.param('Shot', 'Not existing', id='shot'), - pytest.param('AssetVersion', 'fstart', id='asset_version') - ] + pytest.param("Task", "fstart", id="task"), + pytest.param("Shot", "Not existing", id="shot"), + pytest.param("AssetVersion", "fstart", id="asset_version"), + ], ) def test_write_custom_attribute_that_does_not_exist( session, entity_type, custom_attribute_name ): - '''Fail to write a value to a custom attribute that does not exist.''' + """Fail to write a value to a custom attribute that does not exist.""" entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) + "select custom_attributes from {entity_type}".format(entity_type=entity_type) ).first() with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] = 42 + entity["custom_attributes"][custom_attribute_name] = 42 -def test_set_custom_attribute_on_new_but_persisted_version( - session, new_asset_version -): - '''Set custom attribute on new persisted version.''' - new_asset_version['custom_attributes']['versiontest'] = 5 +def test_set_custom_attribute_on_new_but_persisted_version(session, new_asset_version): + """Set custom attribute on new persisted version.""" + new_asset_version["custom_attributes"]["versiontest"] = 5 session.commit() @pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' + raises=ftrack_api.exception.ServerError, reason="Due to user permission errors." ) -def test_batch_create_entity_and_custom_attributes( - new_entity_and_custom_attribute -): - '''Write custom attribute value and entity in the same batch.''' +def test_batch_create_entity_and_custom_attributes(new_entity_and_custom_attribute): + """Write custom attribute value and entity in the same batch.""" entity, name, value = new_entity_and_custom_attribute session = entity.session - entity['custom_attributes'][name] = value + entity["custom_attributes"][name] = value - assert entity['custom_attributes'][name] == value + assert entity["custom_attributes"][name] == value session.commit() - assert entity['custom_attributes'][name] == value + assert entity["custom_attributes"][name] == value def test_refresh_custom_attribute(new_asset_version): - '''Test custom attribute refresh.''' + """Test custom attribute refresh.""" session_two = ftrack_api.Session() - query_string = 'select custom_attributes from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') + query_string = ( + 'select custom_attributes from AssetVersion where id is "{0}"'.format( + new_asset_version.get("id") + ) ) - new_asset_version['custom_attributes']['versiontest'] = 42 + new_asset_version["custom_attributes"]["versiontest"] = 42 new_asset_version.session.commit() - asset_version_two = session_two.query( - query_string - ).first() + asset_version_two = session_two.query(query_string).first() assert ( - new_asset_version['custom_attributes']['versiontest'] == - asset_version_two['custom_attributes']['versiontest'] + new_asset_version["custom_attributes"]["versiontest"] + == asset_version_two["custom_attributes"]["versiontest"] ) - - - diff --git a/test/unit/test_data.py b/test/unit/test_data.py index 3e2e67dc..510ad042 100644 --- a/test/unit/test_data.py +++ b/test/unit/test_data.py @@ -11,120 +11,120 @@ @pytest.fixture() def content(): - '''Return initial content.''' - return 'test data' + """Return initial content.""" + return "test data" -@pytest.fixture(params=['file', 'file_wrapper', 'string']) +@pytest.fixture(params=["file", "file_wrapper", "string"]) def data(request, content): - '''Return cache.''' + """Return cache.""" - if request.param == 'string': + if request.param == "string": data_object = ftrack_api.data.String(content) - elif request.param == 'file': + elif request.param == "file": file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') + file_object = os.fdopen(file_handle, "r+") file_object.write(content) file_object.flush() file_object.close() - data_object = ftrack_api.data.File(path, 'r+') + data_object = ftrack_api.data.File(path, "r+") def cleanup(): - '''Cleanup.''' + """Cleanup.""" data_object.close() os.remove(path) request.addfinalizer(cleanup) - elif request.param == 'file_wrapper': + elif request.param == "file_wrapper": file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') + file_object = os.fdopen(file_handle, "r+") file_object.write(content) file_object.seek(0) data_object = ftrack_api.data.FileWrapper(file_object) def cleanup(): - '''Cleanup.''' + """Cleanup.""" data_object.close() os.remove(path) request.addfinalizer(cleanup) else: - raise ValueError('Unrecognised parameter: {0}'.format(request.param)) + raise ValueError("Unrecognised parameter: {0}".format(request.param)) return data_object def test_read(data, content): - '''Return content from current position up to *limit*.''' + """Return content from current position up to *limit*.""" assert data.read(5) == content[:5] assert data.read() == content[5:] def test_write(data, content): - '''Write content at current position.''' + """Write content at current position.""" assert data.read() == content - data.write('more test data') + data.write("more test data") data.seek(0) - assert data.read() == content + 'more test data' + assert data.read() == content + "more test data" def test_flush(data): - '''Flush buffers ensuring data written.''' + """Flush buffers ensuring data written.""" # TODO: Implement better test than just calling function. data.flush() def test_seek(data, content): - '''Move internal pointer to *position*.''' + """Move internal pointer to *position*.""" data.seek(5) assert data.read() == content[5:] def test_tell(data): - '''Return current position of internal pointer.''' + """Return current position of internal pointer.""" assert data.tell() == 0 data.seek(5) assert data.tell() == 5 def test_close(data): - '''Flush buffers and prevent further access.''' + """Flush buffers and prevent further access.""" data.close() with pytest.raises(ValueError) as error: data.read() # Changed for python 3 compat - assert 'closed file' in str(error.value) + assert "closed file" in str(error.value) class Dummy(ftrack_api.data.Data): - '''Dummy string.''' + """Dummy string.""" def read(self, limit=None): - '''Return content from current position up to *limit*.''' + """Return content from current position up to *limit*.""" def write(self, content): - '''Write content at current position.''' + """Write content at current position.""" def test_unsupported_tell(): - '''Fail when tell unsupported.''' + """Fail when tell unsupported.""" data = Dummy() with pytest.raises(NotImplementedError) as error: data.tell() - assert 'Tell not supported' in str(error.value) + assert "Tell not supported" in str(error.value) def test_unsupported_seek(): - '''Fail when seek unsupported.''' + """Fail when seek unsupported.""" data = Dummy() with pytest.raises(NotImplementedError) as error: data.seek(5) - assert 'Seek not supported' in str(error.value) + assert "Seek not supported" in str(error.value) diff --git a/test/unit/test_formatter.py b/test/unit/test_formatter.py index ae565cb3..47d4dccb 100644 --- a/test/unit/test_formatter.py +++ b/test/unit/test_formatter.py @@ -7,64 +7,64 @@ def colored(text, *args, **kwargs): - '''Pass through so there are no escape sequences in output.''' + """Pass through so there are no escape sequences in output.""" return text def test_format(user, mocker): - '''Return formatted representation of entity.''' - mocker.patch.object(termcolor, 'colored', colored) + """Return formatted representation of entity.""" + mocker.patch.object(termcolor, "colored", colored) result = ftrack_api.formatter.format(user) # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' in result - assert ' email: ' in result + assert result.startswith("User\n") + assert " username: jenkins" in result + assert " email: " in result def test_format_using_custom_formatters(user): - '''Return formatted representation of entity using custom formatters.''' + """Return formatted representation of entity using custom formatters.""" result = ftrack_api.formatter.format( - user, formatters={ - 'header': lambda text: '*{0}*'.format(text), - 'label': lambda text: '-{0}'.format(text) - } + user, + formatters={ + "header": lambda text: "*{0}*".format(text), + "label": lambda text: "-{0}".format(text), + }, ) # Cannot test entire string as too variable so check for key text. - assert result.startswith('*User*\n') - assert ' -username: jenkins' in result - assert ' -email: ' in result + assert result.startswith("*User*\n") + assert " -username: jenkins" in result + assert " -email: " in result def test_format_filtering(new_user, mocker): - '''Return formatted representation using custom filter.''' - mocker.patch.object(termcolor, 'colored', colored) + """Return formatted representation using custom filter.""" + mocker.patch.object(termcolor, "colored", colored) with new_user.session.auto_populating(False): result = ftrack_api.formatter.format( - new_user, - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] + new_user, attribute_filter=ftrack_api.formatter.FILTER["ignore_unset"] ) # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: {0}'.format(new_user['username']) in result - assert ' email: ' not in result + assert result.startswith("User\n") + assert " username: {0}".format(new_user["username"]) in result + assert " email: " not in result def test_format_recursive(user, mocker): - '''Return formatted recursive representation.''' - mocker.patch.object(termcolor, 'colored', colored) + """Return formatted recursive representation.""" + mocker.patch.object(termcolor, "colored", colored) - user.session.populate(user, 'timelogs.user') + user.session.populate(user, "timelogs.user") with user.session.auto_populating(False): result = ftrack_api.formatter.format(user, recursive=True) # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' - assert ' timelogs: Timelog' in result - assert ' user: User{...}' in result + assert result.startswith("User\n") + assert " username: jenkins" + assert " timelogs: Timelog" in result + assert " user: User{...}" in result diff --git a/test/unit/test_inspection.py b/test/unit/test_inspection.py index 57b44613..646dbbde 100644 --- a/test/unit/test_inspection.py +++ b/test/unit/test_inspection.py @@ -6,50 +6,48 @@ def test_identity(user): - '''Retrieve identity of *user*.''' + """Retrieve identity of *user*.""" identity = ftrack_api.inspection.identity(user) - assert identity[0] == 'User' - assert identity[1] == ['d07ae5d0-66e1-11e1-b5e9-f23c91df25eb'] + assert identity[0] == "User" + assert identity[1] == ["d07ae5d0-66e1-11e1-b5e9-f23c91df25eb"] def test_primary_key(user): - '''Retrieve primary key of *user*.''' + """Retrieve primary key of *user*.""" primary_key = ftrack_api.inspection.primary_key(user) - assert primary_key == { - 'id': 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb' - } + assert primary_key == {"id": "d07ae5d0-66e1-11e1-b5e9-f23c91df25eb"} def test_created_entity_state(session, unique_name): - '''Created entity has CREATED state.''' - new_user = session.create('User', {'username': unique_name}) + """Created entity has CREATED state.""" + new_user = session.create("User", {"username": unique_name}) assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED # Even after a modification the state should remain as CREATED. - new_user['username'] = 'changed' + new_user["username"] = "changed" assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED def test_retrieved_entity_state(user): - '''Retrieved entity has NOT_SET state.''' + """Retrieved entity has NOT_SET state.""" assert ftrack_api.inspection.state(user) is ftrack_api.symbol.NOT_SET def test_modified_entity_state(user): - '''Modified entity has MODIFIED state.''' - user['username'] = 'changed' + """Modified entity has MODIFIED state.""" + user["username"] = "changed" assert ftrack_api.inspection.state(user) is ftrack_api.symbol.MODIFIED def test_deleted_entity_state(session, user): - '''Deleted entity has DELETED state.''' + """Deleted entity has DELETED state.""" session.delete(user) assert ftrack_api.inspection.state(user) is ftrack_api.symbol.DELETED def test_post_commit_entity_state(session, unique_name): - '''Entity has NOT_SET state post commit.''' - new_user = session.create('User', {'username': unique_name}) + """Entity has NOT_SET state post commit.""" + new_user = session.create("User", {"username": unique_name}) assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED session.commit() @@ -58,21 +56,21 @@ def test_post_commit_entity_state(session, unique_name): def test_states(session, unique_name, user): - '''Determine correct states for multiple entities.''' + """Determine correct states for multiple entities.""" # NOT_SET - user_a = session.create('User', {'username': unique_name}) + user_a = session.create("User", {"username": unique_name}) session.commit() # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' + user_b = session.create("User", {"username": unique_name}) + user_b["username"] = "changed" # MODIFIED user_c = user - user_c['username'] = 'changed' + user_c["username"] = "changed" # DELETED - user_d = session.create('User', {'username': unique_name}) + user_d = session.create("User", {"username": unique_name}) session.delete(user_d) # Assert states. @@ -82,20 +80,20 @@ def test_states(session, unique_name, user): ftrack_api.symbol.NOT_SET, ftrack_api.symbol.CREATED, ftrack_api.symbol.MODIFIED, - ftrack_api.symbol.DELETED + ftrack_api.symbol.DELETED, ] def test_states_for_no_entities(): - '''Return empty list of states when no entities passed.''' + """Return empty list of states when no entities passed.""" states = ftrack_api.inspection.states([]) assert states == [] def test_skip_operations_for_non_inspected_entities(session, unique_name): - '''Skip operations for non inspected entities.''' - user_a = session.create('User', {'username': unique_name + '-1'}) - user_b = session.create('User', {'username': unique_name + '-2'}) + """Skip operations for non inspected entities.""" + user_a = session.create("User", {"username": unique_name + "-1"}) + user_b = session.create("User", {"username": unique_name + "-2"}) states = ftrack_api.inspection.states([user_a]) assert states == [ftrack_api.symbol.CREATED] diff --git a/test/unit/test_operation.py b/test/unit/test_operation.py index 702bfae3..ef793cd8 100644 --- a/test/unit/test_operation.py +++ b/test/unit/test_operation.py @@ -5,13 +5,13 @@ def test_operations_initialise(): - '''Initialise empty operations stack.''' + """Initialise empty operations stack.""" operations = ftrack_api.operation.Operations() assert len(operations) == 0 def test_operations_push(): - '''Push new operation onto stack.''' + """Push new operation onto stack.""" operations = ftrack_api.operation.Operations() assert len(operations) == 0 @@ -21,7 +21,7 @@ def test_operations_push(): def test_operations_pop(): - '''Pop and return operation from stack.''' + """Pop and return operation from stack.""" operations = ftrack_api.operation.Operations() assert len(operations) == 0 @@ -37,7 +37,7 @@ def test_operations_pop(): def test_operations_count(): - '''Count operations in stack.''' + """Count operations in stack.""" operations = ftrack_api.operation.Operations() assert len(operations) == 0 @@ -49,7 +49,7 @@ def test_operations_count(): def test_operations_clear(): - '''Clear operations stack.''' + """Clear operations stack.""" operations = ftrack_api.operation.Operations() operations.push(ftrack_api.operation.Operation()) operations.push(ftrack_api.operation.Operation()) @@ -61,7 +61,7 @@ def test_operations_clear(): def test_operations_iter(): - '''Iterate over operations stack.''' + """Iterate over operations stack.""" operations = ftrack_api.operation.Operations() operation_a = ftrack_api.operation.Operation() operation_b = ftrack_api.operation.Operation() @@ -72,8 +72,5 @@ def test_operations_iter(): operations.push(operation_c) assert len(operations) == 3 - for operation, expected in zip( - operations, [operation_a, operation_b, operation_c] - ): + for operation, expected in zip(operations, [operation_a, operation_b, operation_c]): assert operation is expected - diff --git a/test/unit/test_package.py b/test/unit/test_package.py index 247b496d..1f543b8e 100644 --- a/test/unit/test_package.py +++ b/test/unit/test_package.py @@ -5,44 +5,44 @@ class Class(object): - '''Class.''' + """Class.""" class Mixin(object): - '''Mixin.''' + """Mixin.""" def method(self): - '''Method.''' + """Method.""" return True def test_mixin(): - '''Mixin class to instance.''' + """Mixin class to instance.""" instance_a = Class() instance_b = Class() - assert not hasattr(instance_a, 'method') - assert not hasattr(instance_b, 'method') + assert not hasattr(instance_a, "method") + assert not hasattr(instance_b, "method") ftrack_api.mixin(instance_a, Mixin) - assert hasattr(instance_a, 'method') + assert hasattr(instance_a, "method") assert instance_a.method() is True - assert not hasattr(instance_b, 'method') + assert not hasattr(instance_b, "method") def test_mixin_same_class_multiple_times(): - '''Mixin class to instance multiple times.''' + """Mixin class to instance multiple times.""" instance = Class() - assert not hasattr(instance, 'method') + assert not hasattr(instance, "method") assert len(instance.__class__.mro()) == 2 ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') + assert hasattr(instance, "method") assert instance.method() is True assert len(instance.__class__.mro()) == 4 ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') + assert hasattr(instance, "method") assert instance.method() is True assert len(instance.__class__.mro()) == 4 diff --git a/test/unit/test_plugin.py b/test/unit/test_plugin.py index 84ba7966..7e94024d 100644 --- a/test/unit/test_plugin.py +++ b/test/unit/test_plugin.py @@ -13,95 +13,107 @@ @pytest.fixture() def valid_plugin(temporary_path): - '''Return path to directory containing a valid plugin.''' - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' + """Return path to directory containing a valid plugin.""" + with open(os.path.join(temporary_path, "plugin.py"), "w") as file_object: + file_object.write( + textwrap.dedent( + """ from __future__ import print_function def register(*args, **kw): print("Registered", args, kw) - ''')) + """ + ) + ) return temporary_path @pytest.fixture() def python_non_plugin(temporary_path): - '''Return path to directory containing Python file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' + """Return path to directory containing Python file that is non plugin.""" + with open(os.path.join(temporary_path, "non.py"), "w") as file_object: + file_object.write( + textwrap.dedent( + """ from __future__ import print_function print("Not a plugin") def not_called(): print("Not called") - ''')) + """ + ) + ) return temporary_path @pytest.fixture() def non_plugin(temporary_path): - '''Return path to directory containing file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.txt'), 'w') as file_object: - file_object.write('Never seen') + """Return path to directory containing file that is non plugin.""" + with open(os.path.join(temporary_path, "non.txt"), "w") as file_object: + file_object.write("Never seen") return temporary_path @pytest.fixture() def broken_plugin(temporary_path): - '''Return path to directory containing broken plugin.''' - with open(os.path.join(temporary_path, 'broken.py'), 'w') as file_object: - file_object.write('syntax error') + """Return path to directory containing broken plugin.""" + with open(os.path.join(temporary_path, "broken.py"), "w") as file_object: + file_object.write("syntax error") return temporary_path @pytest.fixture() def plugin(request, temporary_path): - '''Return path containing a plugin with requested specification.''' + """Return path containing a plugin with requested specification.""" specification = request.param - output = re.sub('(\w+)=\w+', '"\g<1>={}".format(\g<1>)', specification) - output = re.sub('\*args', 'args', output) - output = re.sub('\*\*kwargs', 'sorted(kwargs.items())', output) + output = re.sub("(\w+)=\w+", '"\g<1>={}".format(\g<1>)', specification) + output = re.sub("\*args", "args", output) + output = re.sub("\*\*kwargs", "sorted(kwargs.items())", output) - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - content = textwrap.dedent(''' + with open(os.path.join(temporary_path, "plugin.py"), "w") as file_object: + content = textwrap.dedent( + """ from __future__ import print_function def register({}): print({}) - '''.format(specification, output)) + """.format( + specification, output + ) + ) file_object.write(content) return temporary_path def test_discover_empty_paths(capsys): - '''Discover no plugins when paths are empty.''' - ftrack_api.plugin.discover([' ']) + """Discover no plugins when paths are empty.""" + ftrack_api.plugin.discover([" "]) output, error = capsys.readouterr() assert not output assert not error def test_discover_valid_plugin(valid_plugin, capsys): - '''Discover valid plugin.''' - ftrack_api.plugin.discover([valid_plugin], (1, 2), {'3': 4}) + """Discover valid plugin.""" + ftrack_api.plugin.discover([valid_plugin], (1, 2), {"3": 4}) output, error = capsys.readouterr() - assert 'Registered (1, 2) {\'3\': 4}' in output + assert "Registered (1, 2) {'3': 4}" in output def test_discover_python_non_plugin(python_non_plugin, capsys): - '''Discover Python non plugin.''' + """Discover Python non plugin.""" ftrack_api.plugin.discover([python_non_plugin]) output, error = capsys.readouterr() - assert 'Not a plugin' in output - assert 'Not called' not in output + assert "Not a plugin" in output + assert "Not called" not in output def test_discover_non_plugin(non_plugin, capsys): - '''Discover non plugin.''' + """Discover non plugin.""" ftrack_api.plugin.discover([non_plugin]) output, error = capsys.readouterr() assert not output @@ -109,88 +121,99 @@ def test_discover_non_plugin(non_plugin, capsys): def test_discover_broken_plugin(broken_plugin, caplog): - '''Discover broken plugin.''' + """Discover broken plugin.""" caplog.set_level(logging.DEBUG) ftrack_api.plugin.discover([broken_plugin]) - records = caplog.get_records(when='call') + records = caplog.get_records(when="call") assert len(records) == 2 assert records[0].levelno is logging.WARNING assert records[1].levelno is logging.DEBUG - assert 'Failed to load plugin' in records[0].message - assert 'Traceback' in records[1].message + assert "Failed to load plugin" in records[0].message + assert "Traceback" in records[1].message @pytest.mark.parametrize( - 'plugin, positional, keyword, expected', + "plugin, positional, keyword, expected", [ pytest.param( - 'a, b=False, c=False, d=False', - (1, 2), {'c': True, 'd': True, 'e': True}, - '1 b=2 c=True d=True', id='mixed-explicit' + "a, b=False, c=False, d=False", + (1, 2), + {"c": True, "d": True, "e": True}, + "1 b=2 c=True d=True", + id="mixed-explicit", ), pytest.param( - '*args', - (1, 2), {'b': True, 'c': False}, - '(1, 2)', id='variable-args-only' + "*args", (1, 2), {"b": True, "c": False}, "(1, 2)", id="variable-args-only" ), pytest.param( - '**kwargs', - tuple(), {'b': True, 'c': False}, - '[(\'b\', True), (\'c\', False)]', id='variable-kwargs-only' + "**kwargs", + tuple(), + {"b": True, "c": False}, + "[('b', True), ('c', False)]", + id="variable-kwargs-only", ), pytest.param( - 'a=False, b=False', - (True,), {'b': True}, - 'a=True b=True', id='keyword-from-positional' + "a=False, b=False", + (True,), + {"b": True}, + "a=True b=True", + id="keyword-from-positional", ), pytest.param( - 'a, c=False, *args', - (1, 2, 3, 4), {}, - '1 c=2 (3, 4)', id='trailing-variable-args' + "a, c=False, *args", + (1, 2, 3, 4), + {}, + "1 c=2 (3, 4)", + id="trailing-variable-args", ), pytest.param( - 'a, c=False, **kwargs', - tuple(), {'a': 1, 'b': 2, 'c': 3, 'd': 4}, - '1 c=3 [(\'b\', 2), (\'d\', 4)]', id='trailing-keyword-args' + "a, c=False, **kwargs", + tuple(), + {"a": 1, "b": 2, "c": 3, "d": 4}, + "1 c=3 [('b', 2), ('d', 4)]", + id="trailing-keyword-args", ), ], - indirect=['plugin'], + indirect=["plugin"], ) def test_discover_plugin_with_specific_signature( plugin, positional, keyword, expected, capsys ): - '''Discover plugin passing only supported arguments.''' - ftrack_api.plugin.discover( - [plugin], positional, keyword - ) + """Discover plugin passing only supported arguments.""" + ftrack_api.plugin.discover([plugin], positional, keyword) output, error = capsys.readouterr() assert expected in output def test_discover_plugin_varying_signatures(temporary_path, capsys): - '''Discover multiple plugins with varying signatures.''' + """Discover multiple plugins with varying signatures.""" - with open(os.path.join(temporary_path, 'plugin_a.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' + with open(os.path.join(temporary_path, "plugin_a.py"), "w") as file_object: + file_object.write( + textwrap.dedent( + """ from __future__ import print_function def register(a): print((a,)) - ''')) - - with open(os.path.join(temporary_path, 'plugin_b.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' + """ + ) + ) + + with open(os.path.join(temporary_path, "plugin_b.py"), "w") as file_object: + file_object.write( + textwrap.dedent( + """ from __future__ import print_function def register(a, b=False): print((a,), {'b': b}) - ''')) - - ftrack_api.plugin.discover( - [temporary_path], (True,), {'b': True} - ) + """ + ) + ) + ftrack_api.plugin.discover([temporary_path], (True,), {"b": True}) output, error = capsys.readouterr() - assert '(True,)' in output - assert '(True,) {\'b\': True}' in output + assert "(True,)" in output + assert "(True,) {'b': True}" in output diff --git a/test/unit/test_query.py b/test/unit/test_query.py index de7f93f2..0f787bc4 100644 --- a/test/unit/test_query.py +++ b/test/unit/test_query.py @@ -12,27 +12,27 @@ def test_index(session): - '''Index into query result.''' - results = session.query('User') - assert isinstance(results[2], session.types['User']) + """Index into query result.""" + results = session.query("User") + assert isinstance(results[2], session.types["User"]) def test_len(session): - '''Return count of results using len.''' - results = session.query('User where username is jenkins') + """Return count of results using len.""" + results = session.query("User where username is jenkins") assert len(results) == 1 def test_all(session): - '''Return all results using convenience method.''' - results = session.query('User').all() + """Return all results using convenience method.""" + results = session.query("User").all() assert isinstance(results, list) assert len(results) def test_implicit_iteration(session): - '''Implicitly iterate through query result.''' - results = session.query('User') + """Implicitly iterate through query result.""" + results = session.query("User") assert isinstance(results, ftrack_api.query.QueryResult) records = [] @@ -43,102 +43,100 @@ def test_implicit_iteration(session): def test_one(session): - '''Return single result using convenience method.''' - user = session.query('User where username is jenkins').one() - assert user['username'] == 'jenkins' + """Return single result using convenience method.""" + user = session.query("User where username is jenkins").one() + assert user["username"] == "jenkins" def test_one_fails_for_no_results(session): - '''Fail to fetch single result when no results available.''' + """Fail to fetch single result when no results available.""" with pytest.raises(ftrack_api.exception.NoResultFoundError): - session.query('User where username is does_not_exist').one() + session.query("User where username is does_not_exist").one() def test_one_fails_for_multiple_results(session): - '''Fail to fetch single result when multiple results available.''' + """Fail to fetch single result when multiple results available.""" with pytest.raises(ftrack_api.exception.MultipleResultsFoundError): - session.query('User').one() + session.query("User").one() def test_one_with_existing_limit(session): - '''Fail to return single result when existing limit in expression.''' + """Fail to return single result when existing limit in expression.""" with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').one() + session.query("User where username is jenkins limit 0").one() def test_one_with_existing_offset(session): - '''Fail to return single result when existing offset in expression.''' + """Fail to return single result when existing offset in expression.""" with pytest.raises(ValueError): - session.query('User where username is jenkins offset 2').one() + session.query("User where username is jenkins offset 2").one() def test_one_with_prefetched_data(session): - '''Return single result ignoring prefetched data.''' - query = session.query('User where username is jenkins') + """Return single result ignoring prefetched data.""" + query = session.query("User where username is jenkins") query.all() user = query.one() - assert user['username'] == 'jenkins' + assert user["username"] == "jenkins" def test_first(session): - '''Return first result using convenience method.''' - users = session.query('User').all() + """Return first result using convenience method.""" + users = session.query("User").all() - user = session.query('User').first() + user = session.query("User").first() assert user == users[0] def test_first_returns_none_when_no_results(session): - '''Return None when no results available.''' - user = session.query('User where username is does_not_exist').first() + """Return None when no results available.""" + user = session.query("User where username is does_not_exist").first() assert user is None def test_first_with_existing_limit(session): - '''Fail to return first result when existing limit in expression.''' + """Fail to return first result when existing limit in expression.""" with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').first() + session.query("User where username is jenkins limit 0").first() def test_first_with_existing_offset(session): - '''Return first result whilst respecting custom offset.''' - users = session.query('User').all() + """Return first result whilst respecting custom offset.""" + users = session.query("User").all() - user = session.query('User offset 2').first() + user = session.query("User offset 2").first() assert user == users[2] def test_first_with_prefetched_data(session): - '''Return first result ignoring prefetched data.''' - query = session.query('User where username is jenkins') + """Return first result ignoring prefetched data.""" + query = session.query("User where username is jenkins") query.all() user = query.first() - assert user['username'] == 'jenkins' + assert user["username"] == "jenkins" def test_paging(session, mocker): - '''Page through results.''' - mocker.patch.object(session, 'call', wraps=session.call) + """Page through results.""" + mocker.patch.object(session, "call", wraps=session.call) page_size = 5 - query = session.query('User limit 50', page_size=page_size) + query = session.query("User limit 50", page_size=page_size) records = query.all() - assert session.call.call_count == ( - math.ceil(len(records) / float(page_size)) - ) + assert session.call.call_count == (math.ceil(len(records) / float(page_size))) def test_paging_respects_offset_and_limit(session, mocker): - '''Page through results respecting offset and limit.''' - users = session.query('User').all() + """Page through results respecting offset and limit.""" + users = session.query("User").all() - mocker.patch.object(session, 'call', wraps=session.call) + mocker.patch.object(session, "call", wraps=session.call) page_size = 6 - query = session.query('User offset 2 limit 8', page_size=page_size) + query = session.query("User offset 2 limit 8", page_size=page_size) records = query.all() assert session.call.call_count == 2 @@ -147,19 +145,16 @@ def test_paging_respects_offset_and_limit(session, mocker): def test_paging_respects_limit_smaller_than_page_size(session, mocker): - '''Use initial limit when less than page size.''' - mocker.patch.object(session, 'call', wraps=session.call) + """Use initial limit when less than page size.""" + mocker.patch.object(session, "call", wraps=session.call) page_size = 100 - query = session.query('User limit 10', page_size=page_size) + query = session.query("User limit 10", page_size=page_size) records = query.all() assert session.call.call_count == 1 session.call.assert_called_once_with( - [{ - 'action': 'query', - 'expression': 'select id from User offset 0 limit 10' - }] + [{"action": "query", "expression": "select id from User offset 0 limit 10"}] ) - assert len(records) == 10 \ No newline at end of file + assert len(records) == 10 diff --git a/test/unit/test_session.py b/test/unit/test_session.py index 6ea5d9f0..8735aab9 100644 --- a/test/unit/test_session.py +++ b/test/unit/test_session.py @@ -26,32 +26,32 @@ import ftrack_api.collection -@pytest.fixture(params=['memory', 'persisted']) +@pytest.fixture(params=["memory", "persisted"]) def cache(request): - '''Return cache.''' - if request.param == 'memory': + """Return cache.""" + if request.param == "memory": cache = None # There is already a default Memory cache present. - elif request.param == 'persisted': + elif request.param == "persisted": cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + tempfile.gettempdir(), "{0}.dbm".format(uuid.uuid4().hex) ) cache = lambda session: ftrack_api.cache.SerialisedCache( ftrack_api.cache.FileCache(cache_path), encode=functools.partial( - session.encode, entity_attribute_strategy='persisted_only' + session.encode, entity_attribute_strategy="persisted_only" ), - decode=session.decode + decode=session.decode, ) def cleanup(): - '''Cleanup.''' + """Cleanup.""" try: os.remove(cache_path) except OSError: # BSD DB (Mac OSX) implementation of the interface will append # a .db extension. - os.remove(cache_path + '.db') + os.remove(cache_path + ".db") request.addfinalizer(cleanup) @@ -60,17 +60,17 @@ def cleanup(): @pytest.fixture() def temporary_invalid_schema_cache(request): - '''Return schema cache path to invalid schema cache file.''' + """Return schema cache path to invalid schema cache file.""" schema_cache_path = os.path.join( tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) + "ftrack_api_schema_cache_test_{0}.json".format(uuid.uuid4().hex), ) - with open(schema_cache_path, 'w') as file_: - file_.write('${invalid json}') + with open(schema_cache_path, "w") as file_: + file_.write("${invalid json}") def cleanup(): - '''Cleanup.''' + """Cleanup.""" os.remove(schema_cache_path) request.addfinalizer(cleanup) @@ -80,17 +80,17 @@ def cleanup(): @pytest.fixture() def temporary_valid_schema_cache(request, mocked_schemas): - '''Return schema cache path to valid schema cache file.''' + """Return schema cache path to valid schema cache file.""" schema_cache_path = os.path.join( tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) + "ftrack_api_schema_cache_test_{0}.json".format(uuid.uuid4().hex), ) - with open(schema_cache_path, 'w') as file_: + with open(schema_cache_path, "w") as file_: json.dump(mocked_schemas, file_, indent=4) def cleanup(): - '''Cleanup.''' + """Cleanup.""" os.remove(schema_cache_path) request.addfinalizer(cleanup) @@ -99,197 +99,183 @@ def cleanup(): class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that should not cache newly created entities.''' + """Proxy cache that should not cache newly created entities.""" def set(self, key, value): - '''Set *value* for *key*.''' + """Set *value* for *key*.""" if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): + if ftrack_api.inspection.state(value) is ftrack_api.symbol.CREATED: return super(SelectiveCache, self).set(key, value) def test_get_entity(session, user): - '''Retrieve an entity by type and id.''' + """Retrieve an entity by type and id.""" matching = session.get(*ftrack_api.inspection.identity(user)) assert matching == user def test_get_non_existant_entity(session): - '''Retrieve a non-existant entity by type and id.''' - matching = session.get('User', 'non-existant-id') + """Retrieve a non-existant entity by type and id.""" + matching = session.get("User", "non-existant-id") assert matching is None def test_get_entity_of_invalid_type(session): - '''Fail to retrieve an entity using an invalid type.''' + """Fail to retrieve an entity using an invalid type.""" with pytest.raises(KeyError): - session.get('InvalidType', 'id') + session.get("InvalidType", "id") def test_create(session): - '''Create entity.''' - user = session.create('User', {'username': 'martin'}) + """Create entity.""" + user = session.create("User", {"username": "martin"}) with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] == 'martin' - assert user['email'] is ftrack_api.symbol.NOT_SET + assert user["id"] is not ftrack_api.symbol.NOT_SET + assert user["username"] == "martin" + assert user["email"] is ftrack_api.symbol.NOT_SET def test_create_using_only_defaults(session): - '''Create entity using defaults only.''' - user = session.create('User') + """Create entity using defaults only.""" + user = session.create("User") with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET + assert user["id"] is not ftrack_api.symbol.NOT_SET + assert user["username"] is ftrack_api.symbol.NOT_SET def test_create_using_server_side_defaults(session): - '''Create entity using server side defaults.''' - user = session.create('User') + """Create entity using server side defaults.""" + user = session.create("User") with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET + assert user["id"] is not ftrack_api.symbol.NOT_SET + assert user["username"] is ftrack_api.symbol.NOT_SET session.commit() - assert user['username'] is not ftrack_api.symbol.NOT_SET + assert user["username"] is not ftrack_api.symbol.NOT_SET def test_create_overriding_defaults(session): - '''Create entity overriding defaults.''' + """Create entity overriding defaults.""" uid = str(uuid.uuid4()) - user = session.create('User', {'id': uid}) + user = session.create("User", {"id": uid}) with session.auto_populating(False): - assert user['id'] == uid + assert user["id"] == uid def test_create_with_reference(session): - '''Create entity with a reference to another.''' - status = session.query('Status')[0] - task = session.create('Task', {'status': status}) - assert task['status'] is status + """Create entity with a reference to another.""" + status = session.query("Status")[0] + task = session.create("Task", {"status": status}) + assert task["status"] is status def test_ensure_new_entity(session, unique_name): - '''Ensure entity, creating first.''' - entity = session.ensure('User', {'username': unique_name}) - assert entity['username'] == unique_name + """Ensure entity, creating first.""" + entity = session.ensure("User", {"username": unique_name}) + assert entity["username"] == unique_name def test_ensure_entity_with_non_string_data_types(session, mocker): - '''Ensure entity against non-string data types, creating first.''' + """Ensure entity against non-string data types, creating first.""" datetime = arrow.get() - task = session.query('Task').first() + task = session.query("Task").first() # if session.api_user contained '@', we'd need to work some more on queries. - user = session.query( - 'User where username is {}'.format(session.api_user) - ).first() + user = session.query("User where username is {}".format(session.api_user)).first() first = session.ensure( - 'Timelog', + "Timelog", { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } + "start": datetime, + "duration": 10, + "user_id": user["id"], + "context_id": task["id"], + }, ) - mocker.patch.object(session, 'create') + mocker.patch.object(session, "create") session.ensure( - 'Timelog', + "Timelog", { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } + "start": datetime, + "duration": 10, + "user_id": user["id"], + "context_id": task["id"], + }, ) - assert first['start'] == datetime - assert first['duration'] == 10 + assert first["start"] == datetime + assert first["duration"] == 10 def test_ensure_entity_with_identifying_keys(session, unique_name): - '''Ensure entity, checking using keys subset and then creating.''' + """Ensure entity, checking using keys subset and then creating.""" - unique_mail = 'test{0}@example.com'.format( - str(uuid.uuid4()) - ) + unique_mail = "test{0}@example.com".format(str(uuid.uuid4())) entity = session.ensure( - 'User', {'username': unique_name, 'email': unique_mail}, - identifying_keys=['username'] + "User", + {"username": unique_name, "email": unique_mail}, + identifying_keys=["username"], ) - assert entity['username'] == unique_name + assert entity["username"] == unique_name def test_ensure_entity_with_invalid_identifying_keys(session, unique_name): - '''Fail to ensure entity when identifying key missing from data.''' + """Fail to ensure entity when identifying key missing from data.""" with pytest.raises(KeyError): session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['invalid'] + "User", + {"username": unique_name, "email": "test@example.com"}, + identifying_keys=["invalid"], ) def test_ensure_entity_with_missing_identifying_keys(session): - '''Fail to ensure entity when no identifying keys determined.''' + """Fail to ensure entity when no identifying keys determined.""" with pytest.raises(ValueError): - session.ensure('User', {}) + session.ensure("User", {}) def test_ensure_existing_entity(session, unique_name): - '''Ensure existing entity.''' - entity = session.ensure('User', {'first_name': unique_name}) + """Ensure existing entity.""" + entity = session.ensure("User", {"first_name": unique_name}) # Second call should not commit any new entity, just retrieve the existing. - mock.patch.object(session, 'create') - retrieved = session.ensure('User', {'first_name': unique_name}) + mock.patch.object(session, "create") + retrieved = session.ensure("User", {"first_name": unique_name}) assert retrieved == entity def test_ensure_update_existing_entity(session, unique_name): - '''Ensure and update existing entity.''' + """Ensure and update existing entity.""" - mail = 'test{0}@example.com'.format( - str(uuid.uuid4()) - ) + mail = "test{0}@example.com".format(str(uuid.uuid4())) - entity = session.ensure( - 'User', {'first_name': unique_name, 'email': mail} - ) - assert entity['email'] == mail + entity = session.ensure("User", {"first_name": unique_name, "email": mail}) + assert entity["email"] == mail - updated_mail = 'test{0}@example.com'.format( - str(uuid.uuid4()) - ) + updated_mail = "test{0}@example.com".format(str(uuid.uuid4())) # Second call should commit updates. retrieved = session.ensure( - 'User', {'first_name': unique_name, 'email': updated_mail}, - identifying_keys=['first_name'] + "User", + {"first_name": unique_name, "email": updated_mail}, + identifying_keys=["first_name"], ) assert retrieved == entity - assert retrieved['email'] == updated_mail + assert retrieved["email"] == updated_mail def test_reconstruct_entity(session): - '''Reconstruct entity.''' + """Reconstruct entity.""" uid = str(uuid.uuid4()) - data = { - 'id': uid, - 'username': 'martin', - 'email': 'martin@example.com' - } - user = session.create('User', data, reconstructing=True) + data = {"id": uid, "username": "martin", "email": "martin@example.com"} + user = session.create("User", data, reconstructing=True) for attribute in user.attributes: # No local attributes should be set. @@ -304,17 +290,17 @@ def test_reconstruct_entity(session): def test_reconstruct_entity_does_not_apply_defaults(session): - '''Reconstruct entity does not apply defaults.''' + """Reconstruct entity does not apply defaults.""" # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) + user = session._create("User", {}, reconstructing=True) with session.auto_populating(False): - assert user['id'] is ftrack_api.symbol.NOT_SET + assert user["id"] is ftrack_api.symbol.NOT_SET def test_reconstruct_empty_entity(session): - '''Reconstruct empty entity.''' + """Reconstruct empty entity.""" # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) + user = session._create("User", {}, reconstructing=True) for attribute in user.attributes: # No local attributes should be set. @@ -325,19 +311,19 @@ def test_reconstruct_empty_entity(session): def test_delete_operation_ordering(session, unique_name): - '''Delete entities in valid order.''' + """Delete entities in valid order.""" # Construct entities. - project_schema = session.query('ProjectSchema').first() - project = session.create('Project', { - 'name': unique_name, - 'full_name': unique_name, - 'project_schema': project_schema - }) - - sequence = session.create('Sequence', { - 'name': unique_name, - 'parent': project - }) + project_schema = session.query("ProjectSchema").first() + project = session.create( + "Project", + { + "name": unique_name, + "full_name": unique_name, + "project_schema": project_schema, + }, + ) + + sequence = session.create("Sequence", {"name": unique_name, "parent": project}) session.commit() @@ -349,65 +335,61 @@ def test_delete_operation_ordering(session, unique_name): def test_create_then_delete_operation_ordering(session, unique_name): - '''Create and delete entity in one transaction.''' - entity = session.create('User', {'username': unique_name}) + """Create and delete entity in one transaction.""" + entity = session.create("User", {"username": unique_name}) session.delete(entity) session.commit() def test_create_and_modify_to_have_required_attribute(session, unique_name): - '''Create and modify entity to have required attribute in transaction.''' - entity = session.create('Scope', {}) - other = session.create('Scope', {'name': unique_name}) - entity['name'] = '{0}2'.format(unique_name) + """Create and modify entity to have required attribute in transaction.""" + entity = session.create("Scope", {}) + other = session.create("Scope", {"name": unique_name}) + entity["name"] = "{0}2".format(unique_name) session.commit() def test_ignore_in_create_entity_payload_values_set_to_not_set( mocker, unique_name, session ): - '''Ignore in commit, created entity data set to NOT_SET''' - mocked = mocker.patch.object(session, 'call') + """Ignore in commit, created entity data set to NOT_SET""" + mocked = mocker.patch.object(session, "call") # Should ignore 'email' attribute in payload. - new_user = session.create( - 'User', {'username': unique_name, 'email': 'test'} - ) - new_user['email'] = ftrack_api.symbol.NOT_SET + new_user = session.create("User", {"username": unique_name, "email": "test"}) + new_user["email"] = ftrack_api.symbol.NOT_SET session.commit() payloads = mocked.call_args[0][0] assert len(payloads) == 1 -def test_ignore_operation_that_modifies_attribute_to_not_set( - mocker, session, user -): - '''Ignore in commit, operation that sets attribute value to NOT_SET''' - mocked = mocker.patch.object(session, 'call') +def test_ignore_operation_that_modifies_attribute_to_not_set(mocker, session, user): + """Ignore in commit, operation that sets attribute value to NOT_SET""" + mocked = mocker.patch.object(session, "call") # Should result in no call to server. - user['email'] = ftrack_api.symbol.NOT_SET + user["email"] = ftrack_api.symbol.NOT_SET session.commit() assert not mocked.called def test_operation_optimisation_on_commit(session, mocker): - '''Optimise operations on commit.''' - mocked = mocker.patch.object(session, 'call') + """Optimise operations on commit.""" + mocked = mocker.patch.object(session, "call") - user_a = session.create('User', {'username': 'bob'}) - user_a['username'] = 'foo' - user_a['email'] = 'bob@example.com' + user_a = session.create("User", {"username": "bob"}) + user_a["username"] = "foo" + user_a["email"] = "bob@example.com" - user_b = session.create('User', {'username': 'martin'}) - user_b['email'] = 'martin@ftrack.com' + user_b = session.create("User", {"username": "martin"}) + user_b["email"] = "martin@ftrack.com" - user_a['email'] = 'bob@example.com' - user_a['first_name'] = 'Bob' + user_a["email"] = "bob@example.com" + user_a["first_name"] = "Bob" - user_c = session.create('User', {'username': 'neverexist'}) - user_c['email'] = 'ignore@example.com' + user_c = session.create("User", {"username": "neverexist"}) + user_c["email"] = "ignore@example.com" session.delete(user_c) user_a_entity_key = ftrack_api.inspection.primary_key(user_a).values() @@ -420,41 +402,41 @@ def test_operation_optimisation_on_commit(session, mocker): payloads = mocked.call_args[0][0] assert len(payloads) == 3 - assert payloads[0]['action'] == 'create' - assert payloads[0]['entity_key'] == list(user_a_entity_key) - assert set(list(payloads[0]['entity_data'].keys())) == set([ - '__entity_type__', 'id', 'resource_type', 'username' - ]) + assert payloads[0]["action"] == "create" + assert payloads[0]["entity_key"] == list(user_a_entity_key) + assert set(list(payloads[0]["entity_data"].keys())) == set( + ["__entity_type__", "id", "resource_type", "username"] + ) - assert payloads[1]['action'] == 'create' - assert payloads[1]['entity_key'] == list(user_b_entity_key) - assert set(list(payloads[1]['entity_data'].keys())) == set([ - '__entity_type__', 'id', 'resource_type', 'username', 'email' - ]) + assert payloads[1]["action"] == "create" + assert payloads[1]["entity_key"] == list(user_b_entity_key) + assert set(list(payloads[1]["entity_data"].keys())) == set( + ["__entity_type__", "id", "resource_type", "username", "email"] + ) - assert payloads[2]['action'] == 'update' - assert payloads[2]['entity_key'] == list(user_a_entity_key) - assert set(list(payloads[2]['entity_data'].keys())) == set([ - '__entity_type__', 'email', 'first_name' - ]) + assert payloads[2]["action"] == "update" + assert payloads[2]["entity_key"] == list(user_a_entity_key) + assert set(list(payloads[2]["entity_data"].keys())) == set( + ["__entity_type__", "email", "first_name"] + ) def test_state_collection(session, unique_name, user): - '''Session state collection holds correct entities.''' + """Session state collection holds correct entities.""" # NOT_SET - user_a = session.create('User', {'username': unique_name}) + user_a = session.create("User", {"username": unique_name}) session.commit() # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' + user_b = session.create("User", {"username": unique_name}) + user_b["username"] = "changed" # MODIFIED user_c = user - user_c['username'] = 'changed' + user_c["username"] = "changed" # DELETED - user_d = session.create('User', {'username': unique_name}) + user_d = session.create("User", {"username": unique_name}) session.delete(user_d) assert session.created == [user_b] @@ -463,31 +445,39 @@ def test_state_collection(session, unique_name, user): def test_get_entity_with_composite_primary_key(session, new_project): - '''Retrieve entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) + """Retrieve entity that uses a composite primary key.""" + entity = session.create( + "Metadata", + { + "key": "key", + "value": "value", + "parent_type": new_project.entity_type, + "parent_id": new_project["id"], + }, + ) session.commit() # Avoid cache. new_session = ftrack_api.Session() retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() + "Metadata", ftrack_api.inspection.primary_key(entity).values() ) assert retrieved_entity == entity def test_get_entity_with_incomplete_composite_primary_key(session, new_project): - '''Fail to retrieve entity using incomplete composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) + """Fail to retrieve entity using incomplete composite primary key.""" + entity = session.create( + "Metadata", + { + "key": "key", + "value": "value", + "parent_type": new_project.entity_type, + "parent_id": new_project["id"], + }, + ) session.commit() @@ -495,83 +485,84 @@ def test_get_entity_with_incomplete_composite_primary_key(session, new_project): new_session = ftrack_api.Session() with pytest.raises(ValueError): new_session.get( - 'Metadata', list(ftrack_api.inspection.primary_key(entity).values())[0] + "Metadata", list(ftrack_api.inspection.primary_key(entity).values())[0] ) def test_populate_entity(session, new_user): - '''Populate entity that uses single primary key.''' + """Populate entity that uses single primary key.""" with session.auto_populating(False): - assert new_user['email'] is ftrack_api.symbol.NOT_SET + assert new_user["email"] is ftrack_api.symbol.NOT_SET - session.populate(new_user, 'email') - assert new_user['email'] is not ftrack_api.symbol.NOT_SET + session.populate(new_user, "email") + assert new_user["email"] is not ftrack_api.symbol.NOT_SET def test_populate_entities(session, unique_name): - '''Populate multiple entities that use single primary key.''' + """Populate multiple entities that use single primary key.""" users = [] for index in range(3): users.append( - session.create( - 'User', {'username': '{0}-{1}'.format(unique_name, index)} - ) + session.create("User", {"username": "{0}-{1}".format(unique_name, index)}) ) session.commit() with session.auto_populating(False): for user in users: - assert user['email'] is ftrack_api.symbol.NOT_SET + assert user["email"] is ftrack_api.symbol.NOT_SET - session.populate(users, 'email') + session.populate(users, "email") for user in users: - assert user['email'] is not ftrack_api.symbol.NOT_SET + assert user["email"] is not ftrack_api.symbol.NOT_SET def test_populate_entity_with_composite_primary_key(session, new_project): - '''Populate entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) + """Populate entity that uses a composite primary key.""" + entity = session.create( + "Metadata", + { + "key": "key", + "value": "value", + "parent_type": new_project.entity_type, + "parent_id": new_project["id"], + }, + ) session.commit() # Avoid cache. new_session = ftrack_api.Session() retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() + "Metadata", ftrack_api.inspection.primary_key(entity).values() ) # Manually change already populated remote value so can test it gets reset # on populate call. - retrieved_entity.attributes.get('value').set_remote_value( - retrieved_entity, 'changed' + retrieved_entity.attributes.get("value").set_remote_value( + retrieved_entity, "changed" ) - new_session.populate(retrieved_entity, 'value') - assert retrieved_entity['value'] == 'value' - - -@pytest.mark.parametrize('server_information, compatible', [ - pytest.param({}, False, id='No information'), - pytest.param({'version': '3.3.11'}, True, id='Valid current version'), - pytest.param({'version': '3.3.12'}, True, id='Valid higher version'), - pytest.param({'version': '3.4'}, True, id='Valid higher version'), - pytest.param({'version': '3.4.1'}, True, id='Valid higher version'), - pytest.param({'version': '3.5.16'}, True, id='Valid higher version'), - pytest.param({'version': '3.3.10'}, False, id='Invalid lower version'), -]) -def test_check_server_compatibility( - server_information, compatible, session -): - '''Check server compatibility.''' - with mock.patch.dict( - session._server_information, server_information, clear=True - ): + new_session.populate(retrieved_entity, "value") + assert retrieved_entity["value"] == "value" + + +@pytest.mark.parametrize( + "server_information, compatible", + [ + pytest.param({}, False, id="No information"), + pytest.param({"version": "3.3.11"}, True, id="Valid current version"), + pytest.param({"version": "3.3.12"}, True, id="Valid higher version"), + pytest.param({"version": "3.4"}, True, id="Valid higher version"), + pytest.param({"version": "3.4.1"}, True, id="Valid higher version"), + pytest.param({"version": "3.5.16"}, True, id="Valid higher version"), + pytest.param({"version": "3.3.10"}, False, id="Invalid lower version"), + ], +) +def test_check_server_compatibility(server_information, compatible, session): + """Check server compatibility.""" + with mock.patch.dict(session._server_information, server_information, clear=True): if compatible: session.check_server_compatibility() else: @@ -580,33 +571,30 @@ def test_check_server_compatibility( def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): - '''Encode entity using "all" entity_attribute_strategy.''' + """Encode entity using "all" entity_attribute_strategy.""" new_bar = mocked_schema_session.create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id' - } + "Bar", {"name": "myBar", "id": "bar_unique_id"} ) new_foo = mocked_schema_session.create( - 'Foo', + "Foo", { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42, - 'number': 12345678.9, - 'boolean': False, - 'date': arrow.get('2015-11-18 15:24:09'), - 'bars': [new_bar] - } + "id": "a_unique_id", + "string": "abc", + "integer": 42, + "number": 12345678.9, + "boolean": False, + "date": arrow.get("2015-11-18 15:24:09"), + "bars": [new_bar], + }, ) - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='all' - ) + encoded = mocked_schema_session.encode(new_foo, entity_attribute_strategy="all") - assert encoded == textwrap.dedent(''' + assert ( + encoded + == textwrap.dedent( + """ {"__entity_type__": "Foo", "bars": [{"__entity_type__": "Bar", "id": "bar_unique_id"}], "boolean": false, @@ -615,126 +603,133 @@ def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): "integer": 42, "number": 12345678.9, "string": "abc"} - ''').replace('\n', '') + """ + ).replace("\n", "") + ) -def test_encode_entity_using_only_set_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "set_only" entity_attribute_strategy.''' +def test_encode_entity_using_only_set_attributes_strategy(mocked_schema_session): + """Encode entity using "set_only" entity_attribute_strategy.""" new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - } + "Foo", {"id": "a_unique_id", "string": "abc", "integer": 42} ) encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='set_only' + new_foo, entity_attribute_strategy="set_only" ) - assert encoded == textwrap.dedent(''' + assert ( + encoded + == textwrap.dedent( + """ {"__entity_type__": "Foo", "id": "a_unique_id", "integer": 42, "string": "abc"} - ''').replace('\n', '') + """ + ).replace("\n", "") + ) def test_encode_computed_attribute_using_persisted_only_attributes_strategy( - mocked_schema_session + mocked_schema_session, ): - '''Encode computed attribute, "persisted_only" entity_attribute_strategy.''' + """Encode computed attribute, "persisted_only" entity_attribute_strategy.""" new_bar = mocked_schema_session._create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id', - 'computed_value': 'FOO' - }, - reconstructing=True + "Bar", + {"name": "myBar", "id": "bar_unique_id", "computed_value": "FOO"}, + reconstructing=True, ) encoded = mocked_schema_session.encode( - new_bar, entity_attribute_strategy='persisted_only' + new_bar, entity_attribute_strategy="persisted_only" ) - assert encoded == textwrap.dedent(''' + assert ( + encoded + == textwrap.dedent( + """ {"__entity_type__": "Bar", "id": "bar_unique_id", "name": "myBar"} - ''').replace('\n', '') + """ + ).replace("\n", "") + ) -def test_encode_entity_using_only_modified_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "modified_only" entity_attribute_strategy.''' +def test_encode_entity_using_only_modified_attributes_strategy(mocked_schema_session): + """Encode entity using "modified_only" entity_attribute_strategy.""" new_foo = mocked_schema_session._create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - }, - reconstructing=True + "Foo", + {"id": "a_unique_id", "string": "abc", "integer": 42}, + reconstructing=True, ) - new_foo['string'] = 'Modified' + new_foo["string"] = "Modified" encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='modified_only' + new_foo, entity_attribute_strategy="modified_only" ) - assert encoded == textwrap.dedent(''' + assert ( + encoded + == textwrap.dedent( + """ {"__entity_type__": "Foo", "id": "a_unique_id", "string": "Modified"} - ''').replace('\n', '') + """ + ).replace("\n", "") + ) def test_encode_entity_using_invalid_strategy(session, new_task): - '''Fail to encode entity using invalid strategy.''' + """Fail to encode entity using invalid strategy.""" with pytest.raises(ValueError): - session.encode(new_task, entity_attribute_strategy='invalid') + session.encode(new_task, entity_attribute_strategy="invalid") def test_encode_operation_payload(session): - '''Encode operation payload.''' + """Encode operation payload.""" sequence_component = session.create_component( "/path/to/sequence.%d.jpg [1]", location=None ) file_component = sequence_component["members"][0] - encoded = session.encode([ - ftrack_api.session.OperationPayload({ - 'action': 'create', - 'entity_data': { - '__entity_type__': u'FileComponent', - u'container': sequence_component, - 'id': file_component['id'] - }, - 'entity_key': [file_component['id']], - 'entity_type': u'FileComponent' - }), - ftrack_api.session.OperationPayload({ - 'action': 'update', - 'entity_data': { - '__entity_type__': u'SequenceComponent', - u'members': ftrack_api.collection.Collection( - sequence_component, - sequence_component.attributes.get('members'), - data=[file_component] - ) - }, - 'entity_key': [sequence_component['id']], - 'entity_type': u'SequenceComponent' - }) - ]) + encoded = session.encode( + [ + ftrack_api.session.OperationPayload( + { + "action": "create", + "entity_data": { + "__entity_type__": "FileComponent", + "container": sequence_component, + "id": file_component["id"], + }, + "entity_key": [file_component["id"]], + "entity_type": "FileComponent", + } + ), + ftrack_api.session.OperationPayload( + { + "action": "update", + "entity_data": { + "__entity_type__": "SequenceComponent", + "members": ftrack_api.collection.Collection( + sequence_component, + sequence_component.attributes.get("members"), + data=[file_component], + ), + }, + "entity_key": [sequence_component["id"]], + "entity_type": "SequenceComponent", + } + ), + ] + ) - expected = textwrap.dedent(''' + expected = textwrap.dedent( + """ [{{"action": "create", "entity_data": {{"__entity_type__": "FileComponent", "container": {{"__entity_type__": "SequenceComponent", @@ -747,18 +742,17 @@ def test_encode_operation_payload(session): "members": [{{"__entity_type__": "FileComponent", "id": "{1[id]}"}}]}}, "entity_key": ["{0[id]}"], "entity_type": "SequenceComponent"}}] - '''.format(sequence_component, file_component)).replace('\n', '') + """.format( + sequence_component, file_component + ) + ).replace("\n", "") assert encoded == expected -def test_decode_partial_entity( - session, new_task -): - '''Decode partially encoded entity.''' - encoded = session.encode( - new_task, entity_attribute_strategy='set_only' - ) +def test_decode_partial_entity(session, new_task): + """Decode partially encoded entity.""" + encoded = session.encode(new_task, entity_attribute_strategy="set_only") entity = session.decode(encoded) @@ -767,23 +761,23 @@ def test_decode_partial_entity( def test_reset(mocker): - '''Reset session.''' + """Reset session.""" plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') + os.path.join(os.path.dirname(__file__), "..", "fixture", "plugin") ) session = ftrack_api.Session(plugin_paths=[plugin_path]) - assert hasattr(session.types.get('User'), 'stub') + assert hasattr(session.types.get("User"), "stub") location = session.query('Location where name is "test.location"').one() assert location.accessor is not ftrack_api.symbol.NOT_SET - mocked_close = mocker.patch.object(session._request, 'close') - mocked_fetch = mocker.patch.object(session, '_load_schemas') + mocked_close = mocker.patch.object(session._request, "close") + mocked_fetch = mocker.patch.object(session, "_load_schemas") session.reset() # Assert custom entity type maintained. - assert hasattr(session.types.get('User'), 'stub') + assert hasattr(session.types.get("User"), "stub") # Assert location plugin re-configured. location = session.query('Location where name is "test.location"').one() @@ -795,42 +789,42 @@ def test_reset(mocker): def test_rollback_scalar_attribute_change(session, new_user): - '''Rollback scalar attribute change via session.''' + """Rollback scalar attribute change via session.""" assert not session.recorded_operations - current_first_name = new_user['first_name'] + current_first_name = new_user["first_name"] - new_user['first_name'] = 'NewName' - assert new_user['first_name'] == 'NewName' + new_user["first_name"] = "NewName" + assert new_user["first_name"] == "NewName" assert session.recorded_operations session.rollback() assert not session.recorded_operations - assert new_user['first_name'] == current_first_name + assert new_user["first_name"] == current_first_name def test_rollback_collection_attribute_change(session, new_user): - '''Rollback collection attribute change via session.''' + """Rollback collection attribute change via session.""" assert not session.recorded_operations - current_timelogs = new_user['timelogs'] + current_timelogs = new_user["timelogs"] assert list(current_timelogs) == [] - timelog = session.create('Timelog', {}) - new_user['timelogs'].append(timelog) - assert list(new_user['timelogs']) == [timelog] + timelog = session.create("Timelog", {}) + new_user["timelogs"].append(timelog) + assert list(new_user["timelogs"]) == [timelog] assert session.recorded_operations session.rollback() assert not session.recorded_operations - assert list(new_user['timelogs']) == [] + assert list(new_user["timelogs"]) == [] def test_rollback_entity_creation(session): - '''Rollback entity creation via session.''' + """Rollback entity creation via session.""" assert not session.recorded_operations - new_user = session.create('User') + new_user = session.create("User") assert session.recorded_operations assert new_user in session.created @@ -842,7 +836,7 @@ def test_rollback_entity_creation(session): def test_rollback_entity_deletion(session, new_user): - '''Rollback entity deletion via session.''' + """Rollback entity deletion via session.""" assert not session.recorded_operations session.delete(new_user) @@ -858,9 +852,10 @@ def test_rollback_entity_deletion(session, new_user): # Caching # ------------------------------------------------------------------------------ + def test_get_entity_bypassing_cache(session, user, mocker): - '''Retrieve an entity by type and id bypassing cache.''' - mocker.patch.object(session, 'call', wraps=session.call) + """Retrieve an entity by type and id bypassing cache.""" + mocker.patch.object(session, "call", wraps=session.call) session.cache.remove( session.cache_key_maker.key(ftrack_api.inspection.identity(user)) @@ -879,19 +874,19 @@ def test_get_entity_bypassing_cache(session, user, mocker): def test_get_entity_from_cache(cache, task, mocker): - '''Retrieve an entity by type and id from cache.''' + """Retrieve an entity by type and id from cache.""" session = ftrack_api.Session(cache=cache) # Prepare cache. session.merge(task) # Disable server calls. - mocker.patch.object(session, 'call') + mocker.patch.object(session, "call") # Retrieve entity from cache. entity = session.get(*ftrack_api.inspection.identity(task)) - assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity is not None, "Failed to retrieve entity from cache." assert entity == task assert entity is not task @@ -900,37 +895,36 @@ def test_get_entity_from_cache(cache, task, mocker): def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): - '''Retrieve an entity tree from cache.''' + """Retrieve an entity tree from cache.""" session = ftrack_api.Session(cache=cache) # Prepare cache. # TODO: Maybe cache should be prepopulated for a better check here. session.query( - 'select children, children.children, children.children.children, ' - 'children.children.children.assignments, ' - 'children.children.children.assignments.resource ' - 'from Project where id is "{0}"' - .format(new_project_tree['id']) + "select children, children.children, children.children.children, " + "children.children.children.assignments, " + "children.children.children.assignments.resource " + 'from Project where id is "{0}"'.format(new_project_tree["id"]) ).one() # Disable server calls. - mocker.patch.object(session, 'call') + mocker.patch.object(session, "call") # Retrieve entity from cache. entity = session.get(*ftrack_api.inspection.identity(new_project_tree)) - assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity is not None, "Failed to retrieve entity from cache." assert entity == new_project_tree assert entity is not new_project_tree # Check tree. with session.auto_populating(False): - for sequence in entity['children']: - for shot in sequence['children']: - for task in shot['children']: - assignments = task['assignments'] + for sequence in entity["children"]: + for shot in sequence["children"]: + for task in shot["children"]: + assignments = task["assignments"] for assignment in assignments: - resource = assignment['resource'] + resource = assignment["resource"] assert resource is not ftrack_api.symbol.NOT_SET @@ -939,39 +933,38 @@ def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): def test_get_metadata_from_cache(session, mocker, cache, new_task): - '''Retrieve an entity along with its metadata from cache.''' - new_task['metadata']['key'] = 'value' + """Retrieve an entity along with its metadata from cache.""" + new_task["metadata"]["key"] = "value" session.commit() fresh_session = ftrack_api.Session(cache=cache) # Prepare cache. fresh_session.query( - 'select metadata.key, metadata.value from ' - 'Task where id is "{0}"' - .format(new_task['id']) + "select metadata.key, metadata.value from " + 'Task where id is "{0}"'.format(new_task["id"]) ).all() # Disable server calls. - mocker.patch.object(fresh_session, 'call') + mocker.patch.object(fresh_session, "call") # Retrieve entity from cache. entity = fresh_session.get(*ftrack_api.inspection.identity(new_task)) - assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity is not None, "Failed to retrieve entity from cache." assert entity == new_task assert entity is not new_task # Check metadata cached correctly. with fresh_session.auto_populating(False): - metadata = entity['metadata'] - assert metadata['key'] == 'value' + metadata = entity["metadata"] + assert metadata["key"] == "value" assert not fresh_session.call.called def test_merge_circular_reference(cache, temporary_file): - '''Merge circular reference into cache.''' + """Merge circular reference into cache.""" session = ftrack_api.Session(cache=cache) # The following will test the condition as a FileComponent will be created # with corresponding ComponentLocation. The server will return the file @@ -985,14 +978,12 @@ def test_merge_circular_reference(cache, temporary_file): def test_create_with_selective_cache(session): - '''Create entity does not store entity in selective cache.''' + """Create entity does not store entity in selective cache.""" cache = ftrack_api.cache.MemoryCache() session.cache.caches.append(SelectiveCache(cache)) try: - user = session.create('User', {'username': 'martin'}) - cache_key = session.cache_key_maker.key( - ftrack_api.inspection.identity(user) - ) + user = session.create("User", {"username": "martin"}) + cache_key = session.cache_key_maker.key(ftrack_api.inspection.identity(user)) with pytest.raises(KeyError): cache.get(cache_key) @@ -1002,22 +993,18 @@ def test_create_with_selective_cache(session): def test_correct_file_type_on_sequence_component(session): - '''Create sequence component with correct file type.''' - path = '/path/to/image/sequence.%04d.dpx [1-10]' + """Create sequence component with correct file type.""" + path = "/path/to/image/sequence.%04d.dpx [1-10]" sequence_component = session.create_component(path) - assert sequence_component['file_type'] == '.dpx' + assert sequence_component["file_type"] == ".dpx" -def test_read_schemas_from_cache( - session, temporary_valid_schema_cache -): - '''Read valid content from schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' +def test_read_schemas_from_cache(session, temporary_valid_schema_cache): + """Read valid content from schema cache.""" + expected_hash = "a98d0627b5e33966e43e1cb89b082db7" - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) + schemas, hash_ = session._read_schemas_from_cache(temporary_valid_schema_cache) assert expected_hash == hash_ @@ -1025,49 +1012,39 @@ def test_read_schemas_from_cache( def test_fail_to_read_schemas_from_invalid_cache( session, temporary_invalid_schema_cache ): - '''Fail to read invalid content from schema cache.''' + """Fail to read invalid content from schema cache.""" with pytest.raises(ValueError): - session._read_schemas_from_cache( - temporary_invalid_schema_cache - ) + session._read_schemas_from_cache(temporary_invalid_schema_cache) -def test_write_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Write valid content to schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' +def test_write_schemas_to_cache(session, temporary_valid_schema_cache): + """Write valid content to schema cache.""" + expected_hash = "a98d0627b5e33966e43e1cb89b082db7" schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) + schemas, hash_ = session._read_schemas_from_cache(temporary_valid_schema_cache) assert expected_hash == hash_ -def test_fail_to_write_invalid_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Fail to write invalid content to schema cache.''' +def test_fail_to_write_invalid_schemas_to_cache(session, temporary_valid_schema_cache): + """Fail to write invalid content to schema cache.""" # Datetime not serialisable by default. invalid_content = datetime.datetime.now() with pytest.raises(TypeError): - session._write_schemas_to_cache( - invalid_content, temporary_valid_schema_cache - ) + session._write_schemas_to_cache(invalid_content, temporary_valid_schema_cache) def test_load_schemas_from_valid_cache( mocker, session, temporary_valid_schema_cache, mocked_schemas ): - '''Load schemas from cache.''' + """Load schemas from cache.""" expected_schemas = session._load_schemas(temporary_valid_schema_cache) - mocked = mocker.patch.object(session, 'call') + mocked = mocker.patch.object(session, "call") schemas = session._load_schemas(temporary_valid_schema_cache) assert schemas == expected_schemas @@ -1077,8 +1054,8 @@ def test_load_schemas_from_valid_cache( def test_load_schemas_from_server_when_cache_invalid( mocker, session, temporary_invalid_schema_cache ): - '''Load schemas from server when cache invalid.''' - mocked = mocker.patch.object(session, 'call', wraps=session.call) + """Load schemas from server when cache invalid.""" + mocked = mocker.patch.object(session, "call", wraps=session.call) session._load_schemas(temporary_invalid_schema_cache) assert mocked.called @@ -1087,14 +1064,12 @@ def test_load_schemas_from_server_when_cache_invalid( def test_load_schemas_from_server_when_cache_outdated( mocker, session, temporary_valid_schema_cache ): - '''Load schemas from server when cache outdated.''' + """Load schemas from server when cache outdated.""" schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - schemas.append({ - 'id': 'NewTest' - }) + schemas.append({"id": "NewTest"}) session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - mocked = mocker.patch.object(session, 'call', wraps=session.call) + mocked = mocker.patch.object(session, "call", wraps=session.call) session._load_schemas(temporary_valid_schema_cache) assert mocked.called @@ -1103,35 +1078,30 @@ def test_load_schemas_from_server_when_cache_outdated( def test_load_schemas_from_server_not_reporting_schema_hash( mocker, session, temporary_valid_schema_cache ): - '''Load schemas from server when server does not report schema hash.''' + """Load schemas from server when server does not report schema hash.""" mocked_write = mocker.patch.object( - session, '_write_schemas_to_cache', - wraps=session._write_schemas_to_cache + session, "_write_schemas_to_cache", wraps=session._write_schemas_to_cache ) server_information = session._server_information.copy() - server_information.pop('schema_hash') - mocker.patch.object( - session, '_server_information', new=server_information - ) + server_information.pop("schema_hash") + mocker.patch.object(session, "_server_information", new=server_information) session._load_schemas(temporary_valid_schema_cache) # Cache still written even if hash not reported. assert mocked_write.called - mocked = mocker.patch.object(session, 'call', wraps=session.call) + mocked = mocker.patch.object(session, "call", wraps=session.call) session._load_schemas(temporary_valid_schema_cache) # No hash reported by server so cache should have been bypassed. assert mocked.called -def test_load_schemas_bypassing_cache( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas bypassing cache when set to False.''' - mocker.patch.object(session, 'call', wraps=session.call) +def test_load_schemas_bypassing_cache(mocker, session, temporary_valid_schema_cache): + """Load schemas bypassing cache when set to False.""" + mocker.patch.object(session, "call", wraps=session.call) session._load_schemas(temporary_valid_schema_cache) assert session.call.call_count == 1 @@ -1141,117 +1111,104 @@ def test_load_schemas_bypassing_cache( def test_get_tasks_widget_url(session): - '''Tasks widget URL returns valid HTTP status.''' - url = session.get_widget_url('tasks') + """Tasks widget URL returns valid HTTP status.""" + url = session.get_widget_url("tasks") response = requests.get(url) response.raise_for_status() def test_get_info_widget_url(session, task): - '''Info widget URL for *task* returns valid HTTP status.''' - url = session.get_widget_url('info', entity=task, theme='light') + """Info widget URL for *task* returns valid HTTP status.""" + url = session.get_widget_url("info", entity=task, theme="light") response = requests.get(url) response.raise_for_status() @pytest.mark.xfail( raises=ftrack_api.exception.ServerError, - reason='Testing environment does not support encoding' + reason="Testing environment does not support encoding", ) def test_encode_media_from_path(session, video_path): - '''Encode media based on a file path.''' + """Encode media based on a file path.""" job = session.encode_media(video_path) - assert job.entity_type == 'Job' + assert job.entity_type == "Job" - job_data = json.loads(job['data']) - assert 'output' in job_data - assert 'source_component_id' in job_data - assert 'keep_original' in job_data and job_data['keep_original'] is False - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - assert 'format' in job_data['output'][0] + job_data = json.loads(job["data"]) + assert "output" in job_data + assert "source_component_id" in job_data + assert "keep_original" in job_data and job_data["keep_original"] is False + assert len(job_data["output"]) + assert "component_id" in job_data["output"][0] + assert "format" in job_data["output"][0] @pytest.mark.xfail( raises=ftrack_api.exception.ServerError, - reason='Testing environment does not support encoding' + reason="Testing environment does not support encoding", ) def test_encode_media_from_component(session, video_path): - '''Encode media based on a component.''' + """Encode media based on a component.""" location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - video_path, - location=location - ) + component = session.create_component(video_path, location=location) session.commit() job = session.encode_media(component) - assert job.entity_type == 'Job' + assert job.entity_type == "Job" - job_data = json.loads(job['data']) - assert 'keep_original' in job_data and job_data['keep_original'] is True + job_data = json.loads(job["data"]) + assert "keep_original" in job_data and job_data["keep_original"] is True def test_create_sequence_component_with_size(session, temporary_sequence): - '''Create a sequence component and verify that is has a size.''' + """Create a sequence component and verify that is has a size.""" location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - temporary_sequence - ) + component = session.create_component(temporary_sequence) - assert component['size'] > 0 + assert component["size"] > 0 def test_plugin_arguments(mocker): - '''Pass plugin arguments to plugin discovery mechanism.''' - mock = mocker.patch( - 'ftrack_api.plugin.discover' - ) - session = ftrack_api.Session( - plugin_paths=[], plugin_arguments={"test": "value"} - ) + """Pass plugin arguments to plugin discovery mechanism.""" + mock = mocker.patch("ftrack_api.plugin.discover") + session = ftrack_api.Session(plugin_paths=[], plugin_arguments={"test": "value"}) assert mock.called mock.assert_called_once_with([], [session], {"test": "value"}) def test_remote_reset(session, new_user): - '''Reset user api key.''' - key_1 = session.reset_remote( - 'api_key', entity=new_user - ) + """Reset user api key.""" + key_1 = session.reset_remote("api_key", entity=new_user) - key_2 = session.reset_remote( - 'api_key', entity=new_user - ) + key_2 = session.reset_remote("api_key", entity=new_user) assert key_1 != key_2 -@pytest.mark.parametrize('attribute', [ - pytest.param(('id',), id='Fail resetting primary key'), - pytest.param(('email',), id='Fail resetting attribute without default value') - -]) +@pytest.mark.parametrize( + "attribute", + [ + pytest.param(("id",), id="Fail resetting primary key"), + pytest.param(("email",), id="Fail resetting attribute without default value"), + ], +) def test_fail_remote_reset(session, user, attribute): - '''Fail trying to rest invalid attributes.''' + """Fail trying to rest invalid attributes.""" with pytest.raises(ftrack_api.exception.ServerError): - session.reset_remote( - attribute, user - ) + session.reset_remote(attribute, user) def test_close(session): - '''Close session.''' + """Close session.""" assert session.closed is False session.close() assert session.closed is True def test_close_already_closed_session(session): - '''Close session that is already closed.''' + """Close session that is already closed.""" session.close() assert session.closed is True session.close() @@ -1259,16 +1216,16 @@ def test_close_already_closed_session(session): def test_server_call_after_close(session): - '''Fail to issue calls to server after session closed.''' + """Fail to issue calls to server after session closed.""" session.close() assert session.closed is True with pytest.raises(ftrack_api.exception.ConnectionClosedError): - session.query('User').first() + session.query("User").first() def test_context_manager(session): - '''Use session as context manager.''' + """Use session as context manager.""" with session: assert session.closed is False @@ -1276,17 +1233,15 @@ def test_context_manager(session): def test_query_nested_custom_attributes(session, new_asset_version): - '''Query custom attributes nested and update a value and query again. + """Query custom attributes nested and update a value and query again. This test will query custom attributes via 2 relations, then update the value in one API session and read it back in another to verify that it gets the new value. - ''' + """ session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) + session_two = ftrack_api.Session(auto_connect_event_hub=False) # Read the version via a relation in both sessions. def get_versions(sessions): @@ -1294,12 +1249,12 @@ def get_versions(sessions): for _session in sessions: asset = _session.query( 'select versions.custom_attributes from Asset where id is "{0}"'.format( - new_asset_version.get('asset_id') + new_asset_version.get("asset_id") ) ).first() - for version in asset['versions']: - if version.get('id') == new_asset_version.get('id'): + for version in asset["versions"]: + if version.get("id") == new_asset_version.get("id"): versions.append(version) return versions @@ -1309,48 +1264,43 @@ def get_versions(sessions): # Read attribute for both sessions. for version in versions: - version['custom_attributes']['versiontest'] + version["custom_attributes"]["versiontest"] # Set attribute on session_one. - versions[0]['custom_attributes']['versiontest'] = random.randint( - 0, 99999 - ) + versions[0]["custom_attributes"]["versiontest"] = random.randint(0, 99999) session.commit() # Read version from server for session_two. - session_two_version = get_versions((session_two, ))[0] + session_two_version = get_versions((session_two,))[0] # Verify that value in session 2 is the same as set and committed in # session 1. assert ( - session_two_version['custom_attributes']['versiontest'] == - versions[0]['custom_attributes']['versiontest'] + session_two_version["custom_attributes"]["versiontest"] + == versions[0]["custom_attributes"]["versiontest"] ) def test_query_nested(session, new_asset_version_with_component): - '''Query components nested and update a value and query again. + """Query components nested and update a value and query again. This test will query components via 2 relations, then update the value in one API session and read it back in another to verify that it gets the new value. - ''' + """ session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) + session_two = ftrack_api.Session(auto_connect_event_hub=False) - query = ( - 'select versions.components.name from Asset where id is ' - '{0}'.format(new_asset_version_with_component['asset_id']) + query = "select versions.components.name from Asset where id is " "{0}".format( + new_asset_version_with_component["asset_id"] ) def get_version(session): - '''Return the test version from *session*.''' + """Return the test version from *session*.""" asset = session.query(query).first() - return asset['versions'][0] + return asset["versions"][0] asset_version = get_version(session_one) asset_version2 = get_version(session_two) @@ -1358,71 +1308,75 @@ def get_version(session): # This assert is not needed, but reading the collections are to ensure they # are inflated. assert ( - asset_version2['components'][0]['name'] == - asset_version['components'][0]['name'] + asset_version2["components"][0]["name"] + == asset_version["components"][0]["name"] ) - asset_version['components'][0]['name'] = str(uuid.uuid4()) + asset_version["components"][0]["name"] = str(uuid.uuid4()) session.commit() asset_version2 = get_version(session_two) assert ( - asset_version['components'][0]['name'] == - asset_version2['components'][0]['name'] + asset_version["components"][0]["name"] + == asset_version2["components"][0]["name"] ) def test_merge_iterations(session, mocker, project): - '''Ensure merge does not happen to many times when querying.''' - mocker.spy(session, '_merge') + """Ensure merge does not happen to many times when querying.""" + mocker.spy(session, "_merge") session.query( - 'select status from Task where project_id is {} limit 10'.format( - project['id'] - ) + "select status from Task where project_id is {} limit 10".format(project["id"]) ).all() assert session._merge.call_count < 75 @pytest.mark.parametrize( - 'get_versions', + "get_versions", [ - pytest.param(lambda component, asset_version, asset: component['version']['asset']['versions'], id='from_component'), - pytest.param(lambda component, asset_version, asset: asset_version['asset']['versions'], id='from_asset_version'), - pytest.param(lambda component, asset_version, asset: asset['versions'], id='from_asset') - ] + pytest.param( + lambda component, asset_version, asset: component["version"]["asset"][ + "versions" + ], + id="from_component", + ), + pytest.param( + lambda component, asset_version, asset: asset_version["asset"]["versions"], + id="from_asset_version", + ), + pytest.param( + lambda component, asset_version, asset: asset["versions"], id="from_asset" + ), + ], ) def test_query_nested2(session, new_asset_version_with_component, get_versions): - '''Query version.asset.versions from component and then add new version. + """Query version.asset.versions from component and then add new version. This test will query versions via multiple relations and ensure a new version appears when added to a different session and then is queried again. - ''' + """ session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) + session_two = ftrack_api.Session(auto_connect_event_hub=False) # Get a component that is linked to a version and asset. - component_id = new_asset_version_with_component['components'][0]['id'] - query = ( - 'select version.asset.versions from Component where id is "{}"'.format(component_id) + component_id = new_asset_version_with_component["components"][0]["id"] + query = 'select version.asset.versions from Component where id is "{}"'.format( + component_id ) component = session_one.query(query).one() - asset_version = component['version'] - asset = component['version']['asset'] - versions = component['version']['asset']['versions'] + asset_version = component["version"] + asset = component["version"]["asset"] + versions = component["version"]["asset"]["versions"] length = len(versions) - session_two.create('AssetVersion', { - 'asset_id': asset['id'] - }) + session_two.create("AssetVersion", {"asset_id": asset["id"]}) session_two.commit() @@ -1434,22 +1388,22 @@ def test_query_nested2(session, new_asset_version_with_component, get_versions): def test_session_ready_reset_events(mocker): - '''Session ready and reset events.''' + """Session ready and reset events.""" plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') + os.path.join(os.path.dirname(__file__), "..", "fixture", "plugin") ) session = ftrack_api.Session(plugin_paths=[plugin_path]) - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 0 + assert session._test_called_events["ftrack.api.session.ready"] is 1 + assert session._test_called_events["ftrack.api.session.reset"] is 0 session.reset() - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 1 + assert session._test_called_events["ftrack.api.session.ready"] is 1 + assert session._test_called_events["ftrack.api.session.reset"] is 1 def test_entity_reference(mocker, session): - '''Return entity reference that uniquely identifies entity.''' + """Return entity reference that uniquely identifies entity.""" mock_entity = mocker.Mock(entity_type="MockEntityType") mock_auto_populating = mocker.patch.object(session, "auto_populating") mock_primary_key = mocker.patch( @@ -1458,105 +1412,79 @@ def test_entity_reference(mocker, session): reference = session.entity_reference(mock_entity) - assert reference == { - "__entity_type__": "MockEntityType", - "id": "mock-id" - } + assert reference == {"__entity_type__": "MockEntityType", "id": "mock-id"} mock_auto_populating.assert_called_once_with(False) mock_primary_key.assert_called_once_with(mock_entity) def test_auto_populate_is_thread_dependent(session, propagating_thread): - '''Make sure auto_populate is configured per thread''' - auto_populate_state = ( - session.auto_populate - ) + """Make sure auto_populate is configured per thread""" + auto_populate_state = session.auto_populate def _assert_auto_populate(): - assert ( - session.auto_populate == auto_populate_state - ) + assert session.auto_populate == auto_populate_state - task = session.query( - u'Task' - ).first() + task = session.query("Task").first() for attribute in task.attributes: - assert ( - task.get(attribute.name) is not ftrack_api.symbol.NOT_SET - ) + assert task.get(attribute.name) is not ftrack_api.symbol.NOT_SET with session.auto_populating(not auto_populate_state): - t = propagating_thread( - target=_assert_auto_populate - ) + t = propagating_thread(target=_assert_auto_populate) t.start() t.join() def test_operation_recoding_thread_dependent(session, propagating_thread): - '''Make sure operation recording is thread dependent.''' + """Make sure operation recording is thread dependent.""" _id = str(uuid.uuid4()) - _entity_type = 'User' + _entity_type = "User" with session.operation_recording(False): # Create entity in separate thread, should be recorded # in the session. - t = propagating_thread( - target=lambda: session.create( - _entity_type, {'id': _id} - ) - ) + t = propagating_thread(target=lambda: session.create(_entity_type, {"id": _id})) t.start() t.join() # Create entity that should be thrown away. - session.create( - _entity_type - ) + session.create(_entity_type) - assert ( - len(session.recorded_operations) == 1 - ) + assert len(session.recorded_operations) == 1 for operation in session.recorded_operations: - assert isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ) + assert isinstance(operation, ftrack_api.operation.CreateEntityOperation) - assert operation.entity_type == 'User' - assert operation.entity_key['id'] == _id + assert operation.entity_type == "User" + assert operation.entity_key["id"] == _id def test_strict_api_header(): - '''Create ftrack session containing ftrack-strict-api = True header.''' + """Create ftrack session containing ftrack-strict-api = True header.""" new_session = ftrack_api.Session(strict_api=True) - - assert( - 'ftrack-strict-api' in new_session._request.headers.keys(), - new_session._request.headers['ftrack-strict-api'] == 'true' + + assert ( + "ftrack-strict-api" in new_session._request.headers.keys(), + new_session._request.headers["ftrack-strict-api"] == "true", ) def test_custom_cookies_session(): - '''Create ftrack session containing custom cookies.''' - new_session = ftrack_api.Session(cookies={'abc': 'def'}) + """Create ftrack session containing custom cookies.""" + new_session = ftrack_api.Session(cookies={"abc": "def"}) cookies_dict = requests.utils.dict_from_cookiejar(new_session._request.cookies) - assert( - 'abc' in cookies_dict.keys(), - cookies_dict['abc'] == 'def' - ) + assert ("abc" in cookies_dict.keys(), cookies_dict["abc"] == "def") def test_custom_headers_session(): - '''Create ftrack session containing custom headers.''' - new_session = ftrack_api.Session(headers={'abc': 'def'}) + """Create ftrack session containing custom headers.""" + new_session = ftrack_api.Session(headers={"abc": "def"}) - assert( - 'abc' in new_session._request.headers.keys(), - new_session._request.headers['abc'] == 'def' - ) + assert ( + "abc" in new_session._request.headers.keys(), + new_session._request.headers["abc"] == "def", + ) diff --git a/test/unit/test_timer.py b/test/unit/test_timer.py index cf8b014e..43e2df93 100644 --- a/test/unit/test_timer.py +++ b/test/unit/test_timer.py @@ -6,17 +6,13 @@ def test_manually_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - session.create('Timer', { - 'user': new_user - }) + """Fail to create a second timer.""" + session.create("Timer", {"user": new_user}) session.commit() with pytest.raises(ftrack_api.exception.ServerError): - session.create('Timer', { - 'user': new_user - }) + session.create("Timer", {"user": new_user}) session.commit() @@ -24,7 +20,7 @@ def test_manually_create_multiple_timers_with_error(session, new_user): def test_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' + """Fail to create a second timer.""" new_user.start_timer() with pytest.raises(ftrack_api.exception.NotUniqueError): @@ -34,21 +30,21 @@ def test_create_multiple_timers_with_error(session, new_user): def test_start_and_stop_a_timer(session, new_user, new_task): - '''Start a new timer and stop it to create a timelog.''' + """Start a new timer and stop it to create a timelog.""" new_user.start_timer(new_task) new_user.stop_timer() timelog = session.query( - 'Timelog where context_id = "{0}"'.format(new_task['id']) + 'Timelog where context_id = "{0}"'.format(new_task["id"]) ).one() - assert timelog['user_id'] == new_user['id'], 'User id is correct.' - assert timelog['context_id'] == new_task['id'], 'Task id is correct.' + assert timelog["user_id"] == new_user["id"], "User id is correct." + assert timelog["context_id"] == new_task["id"], "Task id is correct." def test_start_a_timer_when_timer_is_running(session, new_user, new_task): - '''Start a timer when an existing timer is already running.''' + """Start a timer when an existing timer is already running.""" new_user.start_timer(new_task) # Create the second timer without context. @@ -56,19 +52,17 @@ def test_start_a_timer_when_timer_is_running(session, new_user, new_task): # There should be only one existing timelog for this user. timelogs = session.query( - 'Timelog where user_id = "{0}"'.format(new_user['id']) + 'Timelog where user_id = "{0}"'.format(new_user["id"]) ).all() - assert len(timelogs) == 1, 'One timelog exists.' + assert len(timelogs) == 1, "One timelog exists." - timelog = session.query( - 'Timer where user_id = "{0}"'.format(new_user['id']) - ).one() + timelog = session.query('Timer where user_id = "{0}"'.format(new_user["id"])).one() # Make sure running timer has no context. - assert timelog['context_id'] is None, 'Timer does not have a context.' + assert timelog["context_id"] is None, "Timer does not have a context." def test_stop_timer_without_timer_running(session, new_user): - '''Stop a timer when no timer is running.''' + """Stop a timer when no timer is running.""" with pytest.raises(ftrack_api.exception.NoResultFoundError): new_user.stop_timer()