diff --git a/celeryconfig.py b/celeryconfig.py
index c5817d17..07b8e218 100644
--- a/celeryconfig.py
+++ b/celeryconfig.py
@@ -1,7 +1,7 @@
from celery import Celery
from django.conf import settings
-app = Celery('event_routing_backends')
+app = Celery("event_routing_backends")
app.config_from_object(settings)
diff --git a/docs/conf.py b/docs/conf.py
index 6d6f957d..244b73e6 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -32,13 +32,13 @@ def get_version(*file_paths):
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
- raise RuntimeError('Unable to find version string.')
+ raise RuntimeError("Unable to find version string.")
REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(REPO_ROOT)
-VERSION = get_version('../event_routing_backends', '__init__.py')
+VERSION = get_version("../event_routing_backends", "__init__.py")
# Configure Django for autodoc usage
settings.configure()
@@ -62,40 +62,40 @@ def get_version(*file_paths):
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.doctest',
- 'sphinx.ext.intersphinx',
- 'sphinx.ext.ifconfig',
- 'sphinx.ext.napoleon',
- 'sphinx.ext.autosectionlabel'
+ "sphinx.ext.autodoc",
+ "sphinx.ext.doctest",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.ifconfig",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.autosectionlabel",
]
# A list of warning types to suppress arbitrary warning messages.
suppress_warnings = [
- 'image.nonlocal_uri',
+ "image.nonlocal_uri",
]
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The top level toctree document.
-top_level_doc = 'index'
+top_level_doc = "index"
# General information about the project.
-project = 'event-routing-backends'
-copyright = f'{datetime.now().year}, Axim Collaborative, Inc' # pylint: disable=redefined-builtin
-author = 'Axim Collaborative, Inc'
-project_title = 'event-routing-backends'
+project = "event-routing-backends"
+copyright = f"{datetime.now().year}, Axim Collaborative, Inc" # pylint: disable=redefined-builtin
+author = "Axim Collaborative, Inc"
+project_title = "event-routing-backends"
documentation_title = f"{project_title}"
# The version info for the project you're documenting, acts as replacement for
@@ -126,7 +126,7 @@ def get_version(*file_paths):
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'changelog.rst']
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "changelog.rst"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -148,7 +148,7 @@ def get_version(*file_paths):
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@@ -165,22 +165,22 @@ def get_version(*file_paths):
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'sphinx_book_theme'
+html_theme = "sphinx_book_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
- "repository_url": "https://github.com/openedx/event-routing-backends",
- "repository_branch": "master",
- "path_to_docs": "docs/",
- "home_page_in_toc": True,
- "use_repository_button": True,
- "use_issues_button": True,
- "use_edit_page_button": True,
- # Please don't change unless you know what you're doing.
- "extra_footer": """
+ "repository_url": "https://github.com/openedx/event-routing-backends",
+ "repository_branch": "master",
+ "path_to_docs": "docs/",
+ "home_page_in_toc": True,
+ "use_repository_button": True,
+ "use_issues_button": True,
+ "use_edit_page_button": True,
+ # Please don't change unless you know what you're doing.
+ "extra_footer": """
Creative Commons Attribution-ShareAlike 4.0 International License.
- """
+ """,
}
# Add any paths that contain custom themes here, relative to this directory.
@@ -229,10 +229,10 @@ def get_version(*file_paths):
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = ["_static"]
html_css_files = [
- 'theme_overrides.css',
+ "theme_overrides.css",
]
# Add any extra paths that contain custom files (such as robots.txt or
@@ -313,7 +313,7 @@ def get_version(*file_paths):
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
-htmlhelp_basename = f'{project}doc'
+htmlhelp_basename = f"{project}doc"
# -- Options for LaTeX output ---------------------------------------------
@@ -321,15 +321,12 @@ def get_version(*file_paths):
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
-
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
-
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
-
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
@@ -338,10 +335,9 @@ def get_version(*file_paths):
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
-latex_target = f'{project}.tex'
+latex_target = f"{project}.tex"
latex_documents = [
- (top_level_doc, latex_target, documentation_title,
- author, 'manual'),
+ (top_level_doc, latex_target, documentation_title, author, "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -381,10 +377,7 @@ def get_version(*file_paths):
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [
- (top_level_doc, project_title, documentation_title,
- [author], 1)
-]
+man_pages = [(top_level_doc, project_title, documentation_title, [author], 1)]
# If true, show URL addresses after external links.
#
@@ -397,9 +390,15 @@ def get_version(*file_paths):
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- (top_level_doc, project_title, documentation_title,
- author, project_title, 'Various backends for receiving edX LMS events.',
- 'Miscellaneous'),
+ (
+ top_level_doc,
+ project_title,
+ documentation_title,
+ author,
+ project_title,
+ "Various backends for receiving edX LMS events.",
+ "Miscellaneous",
+ ),
]
# Documents to append as an appendix to all manuals.
@@ -473,7 +472,7 @@ def get_version(*file_paths):
# epub_post_files = []
# A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
+epub_exclude_files = ["search.html"]
# The depth of the table of contents in toc.ncx.
#
@@ -506,9 +505,12 @@ def get_version(*file_paths):
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- 'python': ('https://docs.python.org/3.6', None),
- 'django': ('https://docs.djangoproject.com/en/1.11/', 'https://docs.djangoproject.com/en/1.11/_objects/'),
- 'model_utils': ('https://django-model-utils.readthedocs.io/en/latest/', None),
+ "python": ("https://docs.python.org/3.6", None),
+ "django": (
+ "https://docs.djangoproject.com/en/1.11/",
+ "https://docs.djangoproject.com/en/1.11/_objects/",
+ ),
+ "model_utils": ("https://django-model-utils.readthedocs.io/en/latest/", None),
}
@@ -520,17 +522,24 @@ def on_init(app): # pylint: disable=unused-argument
avoid checking in the generated reStructuredText files.
"""
docs_path = os.path.abspath(os.path.dirname(__file__))
- root_path = os.path.abspath(os.path.join(docs_path, '..'))
- apidoc_path = 'sphinx-apidoc'
- if hasattr(sys, 'real_prefix'): # Check to see if we are in a virtualenv
+ root_path = os.path.abspath(os.path.join(docs_path, ".."))
+ apidoc_path = "sphinx-apidoc"
+ if hasattr(sys, "real_prefix"): # Check to see if we are in a virtualenv
# If we are, assemble the path manually
- bin_path = os.path.abspath(os.path.join(sys.prefix, 'bin'))
+ bin_path = os.path.abspath(os.path.join(sys.prefix, "bin"))
apidoc_path = os.path.join(bin_path, apidoc_path)
- check_call([apidoc_path, '-o', docs_path, os.path.join(root_path, 'event_routing_backends'),
- os.path.join(root_path, 'event_routing_backends/migrations')])
+ check_call(
+ [
+ apidoc_path,
+ "-o",
+ docs_path,
+ os.path.join(root_path, "event_routing_backends"),
+ os.path.join(root_path, "event_routing_backends/migrations"),
+ ]
+ )
def setup(app):
"""Sphinx extension: run sphinx-apidoc."""
- event = 'builder-inited'
+ event = "builder-inited"
app.connect(event, on_init)
diff --git a/event_routing_backends/__init__.py b/event_routing_backends/__init__.py
index 3fd57025..3fbb3ea3 100644
--- a/event_routing_backends/__init__.py
+++ b/event_routing_backends/__init__.py
@@ -2,4 +2,4 @@
Various backends for receiving edX LMS events..
"""
-__version__ = '9.3.1'
+__version__ = "9.3.1"
diff --git a/event_routing_backends/admin.py b/event_routing_backends/admin.py
index 5695fcd5..4fcf8a3d 100644
--- a/event_routing_backends/admin.py
+++ b/event_routing_backends/admin.py
@@ -1,6 +1,7 @@
"""
Contains Admin class(es) for the django app.
"""
+
from config_models.admin import KeyedConfigurationModelAdmin
from django.contrib import admin
@@ -13,11 +14,11 @@ class RouterConfigurationAdmin(KeyedConfigurationModelAdmin):
Admin model class for RouterConfiguration model.
"""
- history_list_display = 'status'
- change_form_template = 'admin/router_conf_change_form.html'
+ history_list_display = "status"
+ change_form_template = "admin/router_conf_change_form.html"
def get_displayable_field_names(self):
"""
Get the list display.
"""
- return ['backend_name', 'enabled', 'route_url', 'configurations']
+ return ["backend_name", "enabled", "route_url", "configurations"]
diff --git a/event_routing_backends/apps.py b/event_routing_backends/apps.py
index f8cd99d4..238dd3d2 100644
--- a/event_routing_backends/apps.py
+++ b/event_routing_backends/apps.py
@@ -11,21 +11,21 @@ class EventRoutingBackendsConfig(AppConfig):
Configuration for the event_routing_backends Django application.
"""
- name = 'event_routing_backends'
+ name = "event_routing_backends"
verbose_name = "Event Routing Backends"
plugin_app = {
PluginSettings.CONFIG: {
- 'lms.djangoapp': {
- 'production': {PluginSettings.RELATIVE_PATH: 'settings.production'},
- 'common': {PluginSettings.RELATIVE_PATH: 'settings.common'},
- 'devstack': {PluginSettings.RELATIVE_PATH: 'settings.devstack'},
+ "lms.djangoapp": {
+ "production": {PluginSettings.RELATIVE_PATH: "settings.production"},
+ "common": {PluginSettings.RELATIVE_PATH: "settings.common"},
+ "devstack": {PluginSettings.RELATIVE_PATH: "settings.devstack"},
+ },
+ "cms.djangoapp": {
+ "production": {PluginSettings.RELATIVE_PATH: "settings.production"},
+ "common": {PluginSettings.RELATIVE_PATH: "settings.common"},
+ "devstack": {PluginSettings.RELATIVE_PATH: "settings.devstack"},
},
- 'cms.djangoapp': {
- 'production': {PluginSettings.RELATIVE_PATH: 'settings.production'},
- 'common': {PluginSettings.RELATIVE_PATH: 'settings.common'},
- 'devstack': {PluginSettings.RELATIVE_PATH: 'settings.devstack'},
- }
}
}
diff --git a/event_routing_backends/backends/async_events_router.py b/event_routing_backends/backends/async_events_router.py
index c25e938d..2e46e451 100644
--- a/event_routing_backends/backends/async_events_router.py
+++ b/event_routing_backends/backends/async_events_router.py
@@ -4,6 +4,7 @@
This events router will trigger a celery task to send the events to the
configured hosts.
"""
+
from event_routing_backends.backends.events_router import EventsRouter
from event_routing_backends.tasks import dispatch_bulk_events, dispatch_event, dispatch_event_persistent
diff --git a/event_routing_backends/backends/events_router.py b/event_routing_backends/backends/events_router.py
index 6d6965b9..23b1a053 100644
--- a/event_routing_backends/backends/events_router.py
+++ b/event_routing_backends/backends/events_router.py
@@ -1,6 +1,7 @@
"""
Generic router to send events to hosts.
"""
+
import json
import logging
from datetime import datetime, timedelta
@@ -15,9 +16,9 @@
logger = logging.getLogger(__name__)
-EVENTS_ROUTER_QUEUE_FORMAT = 'events_router_queue_{}'
-EVENTS_ROUTER_DEAD_QUEUE_FORMAT = 'dead_queue_{}'
-EVENTS_ROUTER_LAST_SENT_FORMAT = 'last_sent_{}'
+EVENTS_ROUTER_QUEUE_FORMAT = "events_router_queue_{}"
+EVENTS_ROUTER_DEAD_QUEUE_FORMAT = "dead_queue_{}"
+EVENTS_ROUTER_LAST_SENT_FORMAT = "last_sent_{}"
class EventsRouter:
@@ -43,24 +44,24 @@ def configure_host(self, host, router):
"""
Create host_configurations for the given host and router.
"""
- host['host_configurations'] = {}
- host['host_configurations'].update({'url': router.route_url})
- host['host_configurations'].update({'auth_scheme': router.auth_scheme})
+ host["host_configurations"] = {}
+ host["host_configurations"].update({"url": router.route_url})
+ host["host_configurations"].update({"auth_scheme": router.auth_scheme})
if router.auth_scheme == RouterConfiguration.AUTH_BASIC:
- host['host_configurations'].update({'username': router.username})
- host['host_configurations'].update({'password': router.password})
+ host["host_configurations"].update({"username": router.username})
+ host["host_configurations"].update({"password": router.password})
elif router.auth_scheme == RouterConfiguration.AUTH_BEARER:
- host['host_configurations'].update({'auth_key': router.auth_key})
+ host["host_configurations"].update({"auth_key": router.auth_key})
if router.backend_name == RouterConfiguration.CALIPER_BACKEND:
- host.update({'router_type': 'AUTH_HEADERS'})
- if 'headers' in host:
- host['host_configurations'].update({'headers': host['headers']})
+ host.update({"router_type": "AUTH_HEADERS"})
+ if "headers" in host:
+ host["host_configurations"].update({"headers": host["headers"]})
elif router.backend_name == RouterConfiguration.XAPI_BACKEND:
- host.update({'router_type': 'XAPI_LRS'})
+ host.update({"router_type": "XAPI_LRS"})
else:
- host.update({'router_type': 'INVALID_TYPE'})
+ host.update({"router_type": "INVALID_TYPE"})
return host
@@ -76,14 +77,17 @@ def prepare_to_send(self, events):
# If operators do not wish to log and have no enabled routers they should set XAPI_EVENTS_ENABLED
# or CALIPER_EVENTS_ENABLED to false.
if not routers:
- logger.debug('Could not find any enabled router configuration for backend %s', self.backend_name)
+ logger.debug(
+ "Could not find any enabled router configuration for backend %s",
+ self.backend_name,
+ )
routers = []
for event in events:
try:
- event_name = event['name']
+ event_name = event["name"]
except TypeError as exc:
- raise ValueError('Expected event as dict but {type} was given.'.format(type=type(event))) from exc
+ raise ValueError("Expected event as dict but {type} was given.".format(type=type(event))) from exc
try:
logger.debug(
@@ -96,7 +100,7 @@ def prepare_to_send(self, events):
'Could not process edx event "%s" for backend %s\'s router',
event_name,
self.backend_name,
- exc_info=True
+ exc_info=True,
)
continue
@@ -104,7 +108,7 @@ def prepare_to_send(self, events):
'Successfully processed edx event "%s" for router with backend %s. Processed events: %s',
event_name,
self.backend_name,
- processed_events
+ processed_events,
)
for router in routers:
@@ -114,7 +118,9 @@ def prepare_to_send(self, events):
if not host:
logger.info(
'Event %s is not allowed to be sent to any host for router ID %s with backend "%s"',
- event_name, router_pk, self.backend_name
+ event_name,
+ router_pk,
+ self.backend_name,
)
else:
host = self.configure_host(host, router)
@@ -137,7 +143,7 @@ def get_failed_events(self, batch_size):
failed_events = redis.rpop(self.dead_queue, batch_size)
if not failed_events:
return []
- return [json.loads(event.decode('utf-8')) for event in failed_events]
+ return [json.loads(event.decode("utf-8")) for event in failed_events]
def bulk_send(self, events):
"""
@@ -164,11 +170,7 @@ def bulk_send(self, events):
ids.add(updated_event["id"])
if prepared_events: # pragma: no cover
- self.dispatch_bulk_events(
- prepared_events,
- host['router_type'],
- host['host_configurations']
- )
+ self.dispatch_bulk_events(prepared_events, host["router_type"], host["host_configurations"])
def send(self, event):
"""
@@ -189,34 +191,37 @@ def send(self, event):
try:
redis.set(self.last_sent_key, datetime.now().isoformat())
- self.bulk_send([json.loads(queued_event.decode('utf-8')) for queued_event in batch])
+ self.bulk_send([json.loads(queued_event.decode("utf-8")) for queued_event in batch])
except Exception: # pylint: disable=broad-except
logger.exception(
- 'Exception occurred while trying to bulk dispatch {} events.'.format(
- len(batch)
- ),
- exc_info=True
+ "Exception occurred while trying to bulk dispatch {} events.".format(len(batch)),
+ exc_info=True,
)
- logger.info(f'Pushing failed events to the dead queue: {self.dead_queue}')
+ logger.info(f"Pushing failed events to the dead queue: {self.dead_queue}")
redis.lpush(self.dead_queue, *batch)
return
event_routes = self.prepare_to_send([event])
for events_for_route in event_routes.values():
- for event_name, updated_event, host, is_business_critical in events_for_route:
+ for (
+ event_name,
+ updated_event,
+ host,
+ is_business_critical,
+ ) in events_for_route:
if is_business_critical:
self.dispatch_event_persistent(
event_name,
updated_event,
- host['router_type'],
- host['host_configurations'],
+ host["router_type"],
+ host["host_configurations"],
)
else:
self.dispatch_event(
event_name,
updated_event,
- host['router_type'],
- host['host_configurations'],
+ host["router_type"],
+ host["host_configurations"],
)
def queue_event(self, redis, event):
@@ -236,12 +241,14 @@ def queue_event(self, redis, event):
# Deduplicate list, in some misconfigured cases tracking events can be emitted to the
# bus twice, causing them to be processed twice, which LRSs will reject.
# See: https://github.com/openedx/event-routing-backends/issues/410
- batch = [i for n, i in enumerate(batch) if i not in batch[n + 1:]]
+ batch = [i for n, i in enumerate(batch) if i not in batch[n + 1 :]]
final_size = len(batch)
if final_size != orig_size: # pragma: no cover
- logger.warning(f"{orig_size - final_size} duplicate events in event-routing-backends batch queue! "
- f"This is a likely due to misconfiguration of EVENT_TRACKING_BACKENDS.")
+ logger.warning(
+ f"{orig_size - final_size} duplicate events in event-routing-backends batch queue! "
+ f"This is a likely due to misconfiguration of EVENT_TRACKING_BACKENDS."
+ )
return batch
return None
@@ -253,7 +260,7 @@ def time_to_send(self, redis):
last_sent = redis.get(self.last_sent_key)
if not last_sent:
return True
- time_passed = (datetime.now() - datetime.fromisoformat(last_sent.decode('utf-8')))
+ time_passed = datetime.now() - datetime.fromisoformat(last_sent.decode("utf-8"))
ready = time_passed > timedelta(seconds=settings.EVENT_ROUTING_BACKEND_BATCH_INTERVAL)
return ready
@@ -291,13 +298,14 @@ def overwrite_event_data(self, event, host, event_name):
Returns:
dict
"""
- if 'override_args' in host and isinstance(event, dict):
+ if "override_args" in host and isinstance(event, dict):
event = event.copy()
- event.update(host['override_args'])
- logger.debug('Overwriting processed version of edx event "{}" with values {}'.format(
- event_name,
- host['override_args']
- ))
+ event.update(host["override_args"])
+ logger.debug(
+ 'Overwriting processed version of edx event "{}" with values {}'.format(
+ event_name, host["override_args"]
+ )
+ )
return event
def dispatch_event(self, event_name, updated_event, router_type, host_configurations):
@@ -310,7 +318,7 @@ def dispatch_event(self, event_name, updated_event, router_type, host_configurat
router_type (str): type of the router
host_configurations (dict): host configurations dict
"""
- raise NotImplementedError('dispatch_event is not implemented')
+ raise NotImplementedError("dispatch_event is not implemented")
def dispatch_bulk_events(self, events, router_type, host_configurations):
"""
@@ -321,7 +329,7 @@ def dispatch_bulk_events(self, events, router_type, host_configurations):
router_type (str): type of the router
host_configurations (dict): host configurations dict
"""
- raise NotImplementedError('dispatch_bulk_events is not implemented')
+ raise NotImplementedError("dispatch_bulk_events is not implemented")
def dispatch_event_persistent(self, event_name, updated_event, router_type, host_configurations):
"""
@@ -333,4 +341,4 @@ def dispatch_event_persistent(self, event_name, updated_event, router_type, host
router_type (str): type of the router
host_configurations (dict): host configurations dict
"""
- raise NotImplementedError('dispatch_event_persistent is not implemented')
+ raise NotImplementedError("dispatch_event_persistent is not implemented")
diff --git a/event_routing_backends/backends/sync_events_router.py b/event_routing_backends/backends/sync_events_router.py
index 8fe7f68d..5ba0a79c 100644
--- a/event_routing_backends/backends/sync_events_router.py
+++ b/event_routing_backends/backends/sync_events_router.py
@@ -5,6 +5,7 @@
can be configured to use this router to send events to hosts
in the same thread as it process the events.
"""
+
from event_routing_backends.backends.events_router import EventsRouter
from event_routing_backends.tasks import bulk_send_events, send_event
diff --git a/event_routing_backends/backends/tests/test_events_router.py b/event_routing_backends/backends/tests/test_events_router.py
index 511400ff..030f2e0b 100644
--- a/event_routing_backends/backends/tests/test_events_router.py
+++ b/event_routing_backends/backends/tests/test_events_router.py
@@ -1,6 +1,7 @@
"""
Test the EventsRouter
"""
+
import datetime
import json
from copy import copy
@@ -26,33 +27,18 @@
ROUTER_CONFIG_FIXTURE = [
{
- 'headers': {},
- 'match_params': {
- 'data.key': 'value'
- },
- 'override_args': {
- 'new_key': 'new_value'
- }
+ "headers": {},
+ "match_params": {"data.key": "value"},
+ "override_args": {"new_key": "new_value"},
},
+ {"match_params": {"data.key": "value1"}, "override_args": {"new_key": "new_value"}},
+ {"match_params": {"data.key": "value"}},
{
- 'match_params': {
- 'data.key': 'value1'
+ "match_params": {},
+ "host_configurations": {
+ "version": "1.0.1",
},
- 'override_args': {
- 'new_key': 'new_value'
- }
- },
- {
- 'match_params': {
- 'data.key': 'value'
- }
},
- {
- 'match_params': {},
- 'host_configurations': {
- 'version': '1.0.1',
- }
- }
]
@@ -65,109 +51,83 @@ class TestEventsRouter(TestCase):
def setUp(self):
super().setUp()
self.sample_event = {
- 'name': str(sentinel.name),
- 'event_type': 'edx.test.event',
- 'time': '2020-01-01T12:12:12.000000+00:00',
- 'data': {
- 'key': 'value'
- },
- 'context': {
- 'username': 'testuser'
- },
- 'session': '0000'
+ "name": str(sentinel.name),
+ "event_type": "edx.test.event",
+ "time": "2020-01-01T12:12:12.000000+00:00",
+ "data": {"key": "value"},
+ "context": {"username": "testuser"},
+ "session": "0000",
}
self.transformed_event = {
- 'id': 'some-random-uuid',
- 'name': str(sentinel.name),
- 'transformed': True,
- 'event_time': '2020-01-01T12:12:12.000000+00:00',
- 'data': {
- 'key': 'value'
- },
+ "id": "some-random-uuid",
+ "name": str(sentinel.name),
+ "transformed": True,
+ "event_time": "2020-01-01T12:12:12.000000+00:00",
+ "data": {"key": "value"},
}
self.bulk_sample_events = [
{
- 'name': str(sentinel.name),
- 'event_type': 'edx.test.event',
- 'time': '2020-01-01T12:12:12.000000+00:00',
- 'data': {
- 'key': 'value'
- },
- 'context': {
- 'username': 'testuser'
- },
- 'session': '0000'
+ "name": str(sentinel.name),
+ "event_type": "edx.test.event",
+ "time": "2020-01-01T12:12:12.000000+00:00",
+ "data": {"key": "value"},
+ "context": {"username": "testuser"},
+ "session": "0000",
},
{
- 'name': str(sentinel.name),
- 'event_type': 'edx.test.event',
- 'time': '2020-01-01T12:12:12.000000+00:01',
- 'data': {
- 'key': 'value 1'
- },
- 'context': {
- 'username': 'testuser1'
- },
- 'session': '0001'
+ "name": str(sentinel.name),
+ "event_type": "edx.test.event",
+ "time": "2020-01-01T12:12:12.000000+00:01",
+ "data": {"key": "value 1"},
+ "context": {"username": "testuser1"},
+ "session": "0001",
},
{
- 'name': str(sentinel.name),
- 'event_type': 'edx.test.event',
- 'time': '2020-01-01T12:12:12.000000+00:02',
- 'data': {
- 'key': 'value 2'
- },
- 'context': {
- 'username': 'testuser2'
- },
- 'session': '0002'
- }
+ "name": str(sentinel.name),
+ "event_type": "edx.test.event",
+ "time": "2020-01-01T12:12:12.000000+00:02",
+ "data": {"key": "value 2"},
+ "context": {"username": "testuser2"},
+ "session": "0002",
+ },
]
self.bulk_transformed_events = [
{
- 'id': 'some-uuid-1',
- 'name': str(sentinel.name),
- 'transformed': True,
- 'event_time': '2020-01-01T12:12:12.000000+00:00',
- 'data': {
- 'key': 'value'
- },
+ "id": "some-uuid-1",
+ "name": str(sentinel.name),
+ "transformed": True,
+ "event_time": "2020-01-01T12:12:12.000000+00:00",
+ "data": {"key": "value"},
},
{
- 'id': 'some-uuid-2',
- 'name': str(sentinel.name),
- 'transformed': True,
- 'event_time': '2020-01-01T12:12:12.000000+00:01',
- 'data': {
- 'key': 'value 1'
- },
+ "id": "some-uuid-2",
+ "name": str(sentinel.name),
+ "transformed": True,
+ "event_time": "2020-01-01T12:12:12.000000+00:01",
+ "data": {"key": "value 1"},
},
{
- 'id': 'some-uuid-3',
- 'name': str(sentinel.name),
- 'transformed': True,
- 'event_time': '2020-01-01T12:12:12.000000+00:02',
- 'data': {
- 'key': 'value 2'
- },
+ "id": "some-uuid-3",
+ "name": str(sentinel.name),
+ "transformed": True,
+ "event_time": "2020-01-01T12:12:12.000000+00:02",
+ "data": {"key": "value 2"},
},
{
- 'id': 'some-uuid-3',
- 'name': str(sentinel.name),
- 'transformed': True,
- 'event_time': '2020-01-01T12:12:12.000000+00:02',
- 'data': {
- 'key': 'value 2'
- },
- }
+ "id": "some-uuid-3",
+ "name": str(sentinel.name),
+ "transformed": True,
+ "event_time": "2020-01-01T12:12:12.000000+00:02",
+ "data": {"key": "value 2"},
+ },
]
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.backends.events_router.logger')
- @patch('event_routing_backends.models.RouterConfiguration.get_enabled_routers')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.backends.events_router.logger")
+ @patch("event_routing_backends.models.RouterConfiguration.get_enabled_routers")
def test_with_processor_exception(self, mocked_get_enabled_routers, mocked_logger, mocked_post):
processors = [
MagicMock(return_value=[self.transformed_event]),
@@ -176,9 +136,9 @@ def test_with_processor_exception(self, mocked_get_enabled_routers, mocked_logge
]
processors[1].side_effect = EventEmissionExit
- mocked_get_enabled_routers.return_value = ['test']
+ mocked_get_enabled_routers.return_value = ["test"]
- router = EventsRouter(processors=processors, backend_name='test')
+ router = EventsRouter(processors=processors, backend_name="test")
router.send(self.transformed_event)
processors[0].assert_called_once_with([self.transformed_event])
@@ -187,37 +147,40 @@ def test_with_processor_exception(self, mocked_get_enabled_routers, mocked_logge
mocked_post.assert_not_called()
- self.assertIn(call(
- 'Could not process edx event "%s" for backend %s\'s router',
- self.transformed_event['name'],
- 'test',
- exc_info=True
- ), mocked_logger.error.mock_calls)
+ self.assertIn(
+ call(
+ 'Could not process edx event "%s" for backend %s\'s router',
+ self.transformed_event["name"],
+ "test",
+ exc_info=True,
+ ),
+ mocked_logger.error.mock_calls,
+ )
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.backends.events_router.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.backends.events_router.logger")
def test_with_no_router_configurations_available(self, mocked_logger, mocked_post):
- router = EventsRouter(processors=[], backend_name='test')
+ router = EventsRouter(processors=[], backend_name="test")
router.send(self.transformed_event)
mocked_post.assert_not_called()
self.assertIn(
- call('Could not find any enabled router configuration for backend %s', 'test'),
- mocked_logger.debug.mock_calls
+ call("Could not find any enabled router configuration for backend %s", "test"),
+ mocked_logger.debug.mock_calls,
)
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.backends.events_router.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.backends.events_router.logger")
def test_with_no_available_hosts(self, mocked_logger, mocked_post):
router_config = RouterConfigurationFactory.create(
- backend_name='test_backend',
+ backend_name="test_backend",
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[1]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[1],
)
- router = EventsRouter(processors=[], backend_name='test_backend')
+ router = EventsRouter(processors=[], backend_name="test_backend")
TieredCache.dangerous_clear_all_tiers()
router.send(self.transformed_event)
@@ -226,17 +189,19 @@ def test_with_no_available_hosts(self, mocked_logger, mocked_post):
self.assertIn(
call(
'Event %s is not allowed to be sent to any host for router ID %s with backend "%s"',
- self.transformed_event['name'], router_config.pk, 'test_backend'
+ self.transformed_event["name"],
+ router_config.pk,
+ "test_backend",
),
- mocked_logger.info.mock_calls
+ mocked_logger.info.mock_calls,
)
def test_with_non_dict_event(self):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[3]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[3],
)
router = EventsRouter(processors=[], backend_name=RouterConfiguration.XAPI_BACKEND)
transformed_event = Statement()
@@ -245,16 +210,16 @@ def test_with_non_dict_event(self):
def test_unsuccessful_routing_of_event(self):
host_configurations = {
- 'url': 'http://test3.com',
- 'version': '1.0.1',
- 'auth_scheme': 'bearer',
- 'auth_key': 'key',
- }
+ "url": "http://test3.com",
+ "version": "1.0.1",
+ "auth_scheme": "bearer",
+ "auth_key": "key",
+ }
client = LrsClient(**host_configurations)
with self.assertRaises(EventNotDispatched):
- client.send(event_name='test', statement_data={})
+ client.send(event_name="test", statement_data={})
- @patch('event_routing_backends.utils.xapi_lrs_client.logger')
+ @patch("event_routing_backends.utils.xapi_lrs_client.logger")
def test_duplicate_xapi_event_id(self, mocked_logger):
"""
Test that when we receive a 409 response when inserting an XAPI statement
@@ -268,13 +233,13 @@ def test_duplicate_xapi_event_id(self, mocked_logger):
client.lrs_client = MagicMock()
client.lrs_client.save_statement.return_value = mock_duplicate_return
- client.send(event_name='test', statement_data={})
+ client.send(event_name="test", statement_data={})
self.assertIn(
- call('Event test received a 409 error indicating the event id already exists.'),
- mocked_logger.info.mock_calls
+ call("Event test received a 409 error indicating the event id already exists."),
+ mocked_logger.info.mock_calls,
)
- @patch('event_routing_backends.utils.xapi_lrs_client.logger')
+ @patch("event_routing_backends.utils.xapi_lrs_client.logger")
def test_duplicate_xapi_event_id_json(self, mocked_logger):
"""
Test that when we receive a 204 response (and the LRSClient fails to parse to JSON
@@ -286,19 +251,16 @@ def test_duplicate_xapi_event_id_json(self, mocked_logger):
client.bulk_send(statement_data=[])
self.assertIn(
- call('JSON Decode Error, this may indicate that all sent events are already stored: []'),
- mocked_logger.warning.mock_calls
+ call("JSON Decode Error, this may indicate that all sent events are already stored: []"),
+ mocked_logger.warning.mock_calls,
)
- @override_settings(
- EVENT_ROUTING_BACKEND_BATCHING_ENABLED=True,
- EVENT_ROUTING_BACKEND_BATCH_SIZE=2
- )
- @patch('event_routing_backends.backends.events_router.get_redis_connection')
- @patch('event_routing_backends.backends.events_router.logger')
- @patch('event_routing_backends.backends.events_router.EventsRouter.bulk_send')
+ @override_settings(EVENT_ROUTING_BACKEND_BATCHING_ENABLED=True, EVENT_ROUTING_BACKEND_BATCH_SIZE=2)
+ @patch("event_routing_backends.backends.events_router.get_redis_connection")
+ @patch("event_routing_backends.backends.events_router.logger")
+ @patch("event_routing_backends.backends.events_router.EventsRouter.bulk_send")
def test_queue_event(self, mock_bulk_send, mock_logger, mock_get_redis_connection):
- router = EventsRouter(processors=[], backend_name='test')
+ router = EventsRouter(processors=[], backend_name="test")
redis_mock = MagicMock()
mock_get_redis_connection.return_value = redis_mock
redis_mock.lpush.return_value = None
@@ -312,7 +274,7 @@ def test_queue_event(self, mock_bulk_send, mock_logger, mock_get_redis_connectio
for event in events:
formatted_event = copy(event)
formatted_event["timestamp"] = event["timestamp"].isoformat()
- formatted_events.append(json.dumps(formatted_event).encode('utf-8'))
+ formatted_events.append(json.dumps(formatted_event).encode("utf-8"))
event2["timestamp"] = event2_emission.isoformat()
@@ -331,22 +293,15 @@ def test_queue_event(self, mock_bulk_send, mock_logger, mock_get_redis_connectio
)
mock_bulk_send.assert_any_call(events)
- @override_settings(
- EVENT_ROUTING_BACKEND_BATCHING_ENABLED=True,
- EVENT_ROUTING_BACKEND_BATCH_SIZE=2
- )
- @patch('event_routing_backends.backends.events_router.get_redis_connection')
- @patch('event_routing_backends.backends.events_router.logger')
- @patch('event_routing_backends.backends.events_router.EventsRouter.bulk_send')
- @patch('event_routing_backends.backends.events_router.EventsRouter.queue_event')
+ @override_settings(EVENT_ROUTING_BACKEND_BATCHING_ENABLED=True, EVENT_ROUTING_BACKEND_BATCH_SIZE=2)
+ @patch("event_routing_backends.backends.events_router.get_redis_connection")
+ @patch("event_routing_backends.backends.events_router.logger")
+ @patch("event_routing_backends.backends.events_router.EventsRouter.bulk_send")
+ @patch("event_routing_backends.backends.events_router.EventsRouter.queue_event")
def test_send_event_with_bulk_exception(
- self,
- mock_queue_event,
- mock_bulk_send,
- mock_logger,
- mock_get_redis_connection
+ self, mock_queue_event, mock_bulk_send, mock_logger, mock_get_redis_connection
):
- router = EventsRouter(processors=[], backend_name='test')
+ router = EventsRouter(processors=[], backend_name="test")
redis_mock = MagicMock()
mock_get_redis_connection.return_value = redis_mock
mock_queue_event.return_value = [1]
@@ -355,21 +310,17 @@ def test_send_event_with_bulk_exception(
router.send(self.transformed_event)
mock_logger.exception.assert_called_once_with(
- 'Exception occurred while trying to bulk dispatch {} events.'.format(
- 1
- ),
- exc_info=True
- )
- mock_logger.info.assert_called_once_with(
- f'Pushing failed events to the dead queue: {router.dead_queue}'
+ "Exception occurred while trying to bulk dispatch {} events.".format(1),
+ exc_info=True,
)
+ mock_logger.info.assert_called_once_with(f"Pushing failed events to the dead queue: {router.dead_queue}")
redis_mock.lpush.assert_called_once_with(router.dead_queue, *[1])
@override_settings(
EVENT_ROUTING_BACKEND_BATCH_INTERVAL=1,
)
def test_time_to_send_no_data(self):
- router = EventsRouter(processors=[], backend_name='test')
+ router = EventsRouter(processors=[], backend_name="test")
redis_mock = MagicMock()
redis_mock.get.return_value = None
self.assertTrue(router.time_to_send(redis_mock))
@@ -380,77 +331,74 @@ class TestAsyncEventsRouter(TestEventsRouter): # pylint: disable=test-inherits-
"""
Test the AsyncEventsRouter
"""
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
def test_with_unsupported_routing_strategy(self, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
- backend_name='test_backend',
+ backend_name="test_backend",
enabled=True,
- route_url='http://test3.com',
+ route_url="http://test3.com",
auth_scheme=RouterConfiguration.AUTH_BEARER,
- auth_key='test_key',
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ auth_key="test_key",
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
- router = AsyncEventsRouter(processors=[], backend_name='test_backend')
+ router = AsyncEventsRouter(processors=[], backend_name="test_backend")
TieredCache.dangerous_clear_all_tiers()
router.send(self.transformed_event)
- mocked_logger.error.assert_called_once_with('Unsupported routing strategy detected: INVALID_TYPE')
+ mocked_logger.error.assert_called_once_with("Unsupported routing strategy detected: INVALID_TYPE")
mocked_post.assert_not_called()
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
def test_bulk_with_unsupported_routing_strategy(self, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
- backend_name='test_backend',
+ backend_name="test_backend",
enabled=True,
- route_url='http://test3.com',
+ route_url="http://test3.com",
auth_scheme=RouterConfiguration.AUTH_BEARER,
- auth_key='test_key',
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ auth_key="test_key",
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
- router = AsyncEventsRouter(processors=[], backend_name='test_backend')
+ router = AsyncEventsRouter(processors=[], backend_name="test_backend")
TieredCache.dangerous_clear_all_tiers()
router.bulk_send([self.transformed_event])
- mocked_logger.error.assert_called_once_with('Unsupported routing strategy detected: INVALID_TYPE')
+ mocked_logger.error.assert_called_once_with("Unsupported routing strategy detected: INVALID_TYPE")
mocked_post.assert_not_called()
- @ddt.data(
- (
- RouterConfiguration.XAPI_BACKEND,
- ),
- (
- RouterConfiguration.CALIPER_BACKEND,
- )
+ @ddt.data((RouterConfiguration.XAPI_BACKEND,), (RouterConfiguration.CALIPER_BACKEND,))
+ @patch.dict(
+ "event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING",
+ {"AUTH_HEADERS": MagicMock(side_effect=EventNotDispatched)},
)
- @patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', {
- 'AUTH_HEADERS': MagicMock(side_effect=EventNotDispatched)
- })
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_generic_exception(self, backend_name, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
backend_name=backend_name,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=backend_name)
router.send(self.transformed_event)
if backend_name == RouterConfiguration.CALIPER_BACKEND:
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
mocked_post.assert_not_called()
else:
mocked_logger.exception.assert_not_called()
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_failed_bulk_post(self, mocked_logger, mocked_post):
mock_response = MagicMock()
@@ -462,20 +410,24 @@ def test_failed_bulk_post(self, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.CALIPER_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=RouterConfiguration.CALIPER_BACKEND)
router.bulk_send([self.transformed_event])
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
- self.assertEqual(mocked_post.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
+ self.assertEqual(
+ mocked_post.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_failed_post(self, mocked_logger, mocked_post):
mock_response = MagicMock()
@@ -487,20 +439,24 @@ def test_failed_post(self, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.CALIPER_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=RouterConfiguration.CALIPER_BACKEND)
router.send(self.transformed_event)
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
- self.assertEqual(mocked_post.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
+ self.assertEqual(
+ mocked_post.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_failed_bulk_routing(self, mocked_logger, mocked_remote_lrs):
mock_response = MagicMock()
@@ -514,20 +470,24 @@ def test_failed_bulk_routing(self, mocked_logger, mocked_remote_lrs):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=RouterConfiguration.XAPI_BACKEND)
router.bulk_send([self.transformed_event])
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
- self.assertEqual(mocked_remote_lrs.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
+ self.assertEqual(
+ mocked_remote_lrs.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_failed_routing(self, mocked_logger, mocked_remote_lrs):
mock_response = MagicMock()
@@ -541,20 +501,24 @@ def test_failed_routing(self, mocked_logger, mocked_remote_lrs):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=RouterConfiguration.XAPI_BACKEND)
router.send(self.transformed_event)
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
- self.assertEqual(mocked_remote_lrs.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
+ self.assertEqual(
+ mocked_remote_lrs.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_duplicate_ids_in_bulk(self, mocked_logger, mocked_remote_lrs):
mock_response = MagicMock()
@@ -568,8 +532,8 @@ def test_duplicate_ids_in_bulk(self, mocked_logger, mocked_remote_lrs):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=RouterConfiguration.XAPI_BACKEND)
@@ -578,77 +542,77 @@ def test_duplicate_ids_in_bulk(self, mocked_logger, mocked_remote_lrs):
self.assertEqual(mocked_logger.exception.call_count, 0)
self.assertEqual(mocked_remote_lrs.call_count, 1)
- @ddt.data(
- (
- RouterConfiguration.XAPI_BACKEND,
- ),
- (
- RouterConfiguration.CALIPER_BACKEND,
- )
+ @ddt.data((RouterConfiguration.XAPI_BACKEND,), (RouterConfiguration.CALIPER_BACKEND,))
+ @patch.dict(
+ "event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING",
+ {"AUTH_HEADERS": MagicMock(side_effect=EventNotDispatched)},
)
- @patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', {
- 'AUTH_HEADERS': MagicMock(side_effect=EventNotDispatched)
- })
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
@ddt.unpack
def test_bulk_generic_exception(self, backend_name, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
backend_name=backend_name,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = AsyncEventsRouter(processors=[], backend_name=backend_name)
router.bulk_send([self.transformed_event])
if backend_name == RouterConfiguration.CALIPER_BACKEND:
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
mocked_post.assert_not_called()
else:
mocked_logger.exception.assert_not_called()
- @patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', {
- 'AUTH_HEADERS': MagicMock(side_effect=EventNotDispatched)
- })
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.tasks.logger')
+ @patch.dict(
+ "event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING",
+ {"AUTH_HEADERS": MagicMock(side_effect=EventNotDispatched)},
+ )
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.tasks.logger")
def test_generic_exception_business_critical_event(self, mocked_logger, mocked_post):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
+ route_url="http://test3.com",
auth_scheme=RouterConfiguration.AUTH_BEARER,
- auth_key='test_key',
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ auth_key="test_key",
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
router = AsyncEventsRouter(processors=[], backend_name=RouterConfiguration.CALIPER_BACKEND)
event_data = self.transformed_event.copy()
business_critical_events = get_business_critical_events()
- event_data['name'] = business_critical_events[0]
+ event_data["name"] = business_critical_events[0]
router.send(event_data)
- self.assertEqual(mocked_logger.exception.call_count,
- getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 3) + 1)
+ self.assertEqual(
+ mocked_logger.exception.call_count,
+ getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 3) + 1,
+ )
mocked_post.assert_not_called()
@ddt.data(
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.CALIPER_BACKEND,
- 'http://test1.com'
- ),
+ (
+ RouterConfiguration.AUTH_BASIC,
+ None,
+ "abc",
+ "xyz",
+ RouterConfiguration.CALIPER_BACKEND,
+ "http://test1.com",
+ ),
(
RouterConfiguration.AUTH_BEARER,
- 'test_key',
+ "test_key",
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test2.com'
+ "http://test2.com",
),
(
None,
@@ -656,34 +620,28 @@ def test_generic_exception_business_critical_event(self, mocked_logger, mocked_p
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test3.com'
+ "http://test3.com",
),
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.XAPI_BACKEND,
- 'http://test1.com'
- ),
(
- RouterConfiguration.AUTH_BEARER,
- 'test_key',
- None,
+ RouterConfiguration.AUTH_BASIC,
None,
+ "abc",
+ "xyz",
RouterConfiguration.XAPI_BACKEND,
- 'http://test2.com'
+ "http://test1.com",
),
(
- None,
- None,
+ RouterConfiguration.AUTH_BEARER,
+ "test_key",
None,
None,
RouterConfiguration.XAPI_BACKEND,
- 'http://test3.com'
+ "http://test2.com",
),
+ (None, None, None, None, RouterConfiguration.XAPI_BACKEND, "http://test3.com"),
)
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
@ddt.unpack
def test_successful_routing_of_event(
self,
@@ -701,10 +659,10 @@ def test_successful_routing_of_event(
mocked_api_key_client = MagicMock()
MOCKED_MAP = {
- 'AUTH_HEADERS': HttpClient,
- 'OAUTH2': mocked_oauth_client,
- 'API_KEY': mocked_api_key_client,
- 'XAPI_LRS': LrsClient,
+ "AUTH_HEADERS": HttpClient,
+ "OAUTH2": mocked_oauth_client,
+ "API_KEY": mocked_api_key_client,
+ "XAPI_LRS": LrsClient,
}
RouterConfigurationFactory.create(
backend_name=backend_name,
@@ -714,58 +672,62 @@ def test_successful_routing_of_event(
auth_key=auth_key,
username=username,
password=password,
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
router = AsyncEventsRouter(processors=[], backend_name=backend_name)
- with patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', MOCKED_MAP):
+ with patch.dict("event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING", MOCKED_MAP):
router.send(self.transformed_event)
overridden_event = self.transformed_event.copy()
- overridden_event['new_key'] = 'new_value'
+ overridden_event["new_key"] = "new_value"
if backend_name == RouterConfiguration.XAPI_BACKEND:
# test LRS Client
- mocked_lrs().save_statement.assert_has_calls([
- call(overridden_event),
- ])
+ mocked_lrs().save_statement.assert_has_calls(
+ [
+ call(overridden_event),
+ ]
+ )
else:
# test the HTTP client
if auth_scheme == RouterConfiguration.AUTH_BASIC:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_event,
- headers={
- },
- auth=(username, password)
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_event,
+ headers={},
+ auth=(username, password),
+ ),
+ ]
+ )
elif auth_scheme == RouterConfiguration.AUTH_BEARER:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_event,
- headers={
- 'Authorization': RouterConfiguration.AUTH_BEARER + ' ' + auth_key
- }
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_event,
+ headers={"Authorization": RouterConfiguration.AUTH_BEARER + " " + auth_key},
+ ),
+ ]
+ )
else:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_event,
- headers={
- },
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_event,
+ headers={},
+ ),
+ ]
+ )
# test mocked oauth client
mocked_oauth_client.assert_not_called()
- @patch('event_routing_backends.utils.http_client.requests.post')
+ @patch("event_routing_backends.utils.http_client.requests.post")
def test_unsuccessful_routing_of_event_http(self, mocked_post):
mock_response = MagicMock()
mock_response.status_code = 500
@@ -774,29 +736,30 @@ def test_unsuccessful_routing_of_event_http(self, mocked_post):
mocked_post.return_value = mock_response
host_configurations = {
- 'url': 'http://test4.com',
- 'auth_scheme': 'bearer',
- 'auth_key': 'key',
- }
+ "url": "http://test4.com",
+ "auth_scheme": "bearer",
+ "auth_key": "key",
+ }
client = HttpClient(**host_configurations)
with self.assertRaises(EventNotDispatched):
- client.send(event=self.transformed_event, event_name=self.transformed_event['name'])
+ client.send(event=self.transformed_event, event_name=self.transformed_event["name"])
@ddt.data(
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.CALIPER_BACKEND,
- 'http://test1.com'
- ),
+ (
+ RouterConfiguration.AUTH_BASIC,
+ None,
+ "abc",
+ "xyz",
+ RouterConfiguration.CALIPER_BACKEND,
+ "http://test1.com",
+ ),
(
RouterConfiguration.AUTH_BEARER,
- 'test_key',
+ "test_key",
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test2.com'
+ "http://test2.com",
),
(
None,
@@ -804,34 +767,28 @@ def test_unsuccessful_routing_of_event_http(self, mocked_post):
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test3.com'
+ "http://test3.com",
),
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.XAPI_BACKEND,
- 'http://test1.com'
- ),
(
- RouterConfiguration.AUTH_BEARER,
- 'test_key',
- None,
+ RouterConfiguration.AUTH_BASIC,
None,
+ "abc",
+ "xyz",
RouterConfiguration.XAPI_BACKEND,
- 'http://test2.com'
+ "http://test1.com",
),
(
- None,
- None,
+ RouterConfiguration.AUTH_BEARER,
+ "test_key",
None,
None,
RouterConfiguration.XAPI_BACKEND,
- 'http://test3.com'
+ "http://test2.com",
),
+ (None, None, None, None, RouterConfiguration.XAPI_BACKEND, "http://test3.com"),
)
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
@ddt.unpack
def test_successful_routing_of_bulk_events(
self,
@@ -849,10 +806,10 @@ def test_successful_routing_of_bulk_events(
mocked_api_key_client = MagicMock()
MOCKED_MAP = {
- 'AUTH_HEADERS': HttpClient,
- 'OAUTH2': mocked_oauth_client,
- 'API_KEY': mocked_api_key_client,
- 'XAPI_LRS': LrsClient,
+ "AUTH_HEADERS": HttpClient,
+ "OAUTH2": mocked_oauth_client,
+ "API_KEY": mocked_api_key_client,
+ "XAPI_LRS": LrsClient,
}
RouterConfigurationFactory.create(
backend_name=backend_name,
@@ -862,12 +819,12 @@ def test_successful_routing_of_bulk_events(
auth_key=auth_key,
username=username,
password=password,
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
router = AsyncEventsRouter(processors=[], backend_name=backend_name)
- with patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', MOCKED_MAP):
+ with patch.dict("event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING", MOCKED_MAP):
router.bulk_send(self.bulk_transformed_events)
overridden_events = self.bulk_transformed_events.copy()
@@ -875,44 +832,48 @@ def test_successful_routing_of_bulk_events(
overridden_events.pop()
for event in overridden_events:
- event['new_key'] = 'new_value'
+ event["new_key"] = "new_value"
if backend_name == RouterConfiguration.XAPI_BACKEND:
# test LRS Client
- mocked_lrs().save_statements.assert_has_calls([
- call(overridden_events),
- ])
+ mocked_lrs().save_statements.assert_has_calls(
+ [
+ call(overridden_events),
+ ]
+ )
else:
# test the HTTP client
if auth_scheme == RouterConfiguration.AUTH_BASIC:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_events,
- headers={
- },
- auth=(username, password)
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_events,
+ headers={},
+ auth=(username, password),
+ ),
+ ]
+ )
elif auth_scheme == RouterConfiguration.AUTH_BEARER:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_events,
- headers={
- 'Authorization': RouterConfiguration.AUTH_BEARER + ' ' + auth_key
- }
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_events,
+ headers={"Authorization": RouterConfiguration.AUTH_BEARER + " " + auth_key},
+ ),
+ ]
+ )
else:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_events,
- headers={
- },
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_events,
+ headers={},
+ ),
+ ]
+ )
# test mocked oauth client
mocked_oauth_client.assert_not_called()
@@ -923,43 +884,46 @@ class TestSyncEventsRouter(TestEventsRouter): # pylint: disable=test-inherits-t
"""
Test the SyncEventsRouter
"""
- @patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', {
- 'AUTH_HEADERS': MagicMock(side_effect=EventNotDispatched)
- })
- @patch('event_routing_backends.utils.http_client.requests.post')
+
+ @patch.dict(
+ "event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING",
+ {"AUTH_HEADERS": MagicMock(side_effect=EventNotDispatched)},
+ )
+ @patch("event_routing_backends.utils.http_client.requests.post")
def test_generic_exception_business_critical_event(self, mocked_post):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
+ route_url="http://test3.com",
auth_scheme=RouterConfiguration.AUTH_BEARER,
- auth_key='test_key',
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ auth_key="test_key",
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
router = SyncEventsRouter(processors=[], backend_name=RouterConfiguration.CALIPER_BACKEND)
event_data = self.transformed_event.copy()
business_critical_events = get_business_critical_events()
- event_data['name'] = business_critical_events[0]
+ event_data["name"] = business_critical_events[0]
router.send(event_data)
mocked_post.assert_not_called()
@ddt.data(
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.CALIPER_BACKEND,
- 'http://test1.com'
- ),
+ (
+ RouterConfiguration.AUTH_BASIC,
+ None,
+ "abc",
+ "xyz",
+ RouterConfiguration.CALIPER_BACKEND,
+ "http://test1.com",
+ ),
(
RouterConfiguration.AUTH_BEARER,
- 'test_key',
+ "test_key",
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test2.com'
+ "http://test2.com",
),
(
None,
@@ -967,34 +931,28 @@ def test_generic_exception_business_critical_event(self, mocked_post):
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test3.com'
+ "http://test3.com",
),
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.XAPI_BACKEND,
- 'http://test1.com'
- ),
(
- RouterConfiguration.AUTH_BEARER,
- 'test_key',
- None,
+ RouterConfiguration.AUTH_BASIC,
None,
+ "abc",
+ "xyz",
RouterConfiguration.XAPI_BACKEND,
- 'http://test2.com'
+ "http://test1.com",
),
(
- None,
- None,
+ RouterConfiguration.AUTH_BEARER,
+ "test_key",
None,
None,
RouterConfiguration.XAPI_BACKEND,
- 'http://test3.com'
+ "http://test2.com",
),
+ (None, None, None, None, RouterConfiguration.XAPI_BACKEND, "http://test3.com"),
)
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
@ddt.unpack
def test_successful_routing_of_event(
self,
@@ -1012,10 +970,10 @@ def test_successful_routing_of_event(
mocked_api_key_client = MagicMock()
MOCKED_MAP = {
- 'AUTH_HEADERS': HttpClient,
- 'OAUTH2': mocked_oauth_client,
- 'API_KEY': mocked_api_key_client,
- 'XAPI_LRS': LrsClient,
+ "AUTH_HEADERS": HttpClient,
+ "OAUTH2": mocked_oauth_client,
+ "API_KEY": mocked_api_key_client,
+ "XAPI_LRS": LrsClient,
}
RouterConfigurationFactory.create(
backend_name=backend_name,
@@ -1025,7 +983,7 @@ def test_successful_routing_of_event(
auth_key=auth_key,
username=username,
password=password,
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
mock_response = MagicMock()
@@ -1037,67 +995,72 @@ def test_successful_routing_of_event(
self.transformed_event["name"] = get_business_critical_events()[0]
- with patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', MOCKED_MAP):
+ with patch.dict("event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING", MOCKED_MAP):
router.send(self.transformed_event)
overridden_event = self.transformed_event.copy()
- overridden_event['new_key'] = 'new_value'
+ overridden_event["new_key"] = "new_value"
if backend_name == RouterConfiguration.XAPI_BACKEND:
# test LRS Client
- mocked_lrs().save_statement.assert_has_calls([
- call(overridden_event),
- ])
+ mocked_lrs().save_statement.assert_has_calls(
+ [
+ call(overridden_event),
+ ]
+ )
else:
# test the HTTP client
if auth_scheme == RouterConfiguration.AUTH_BASIC:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_event,
- headers={
- },
- auth=(username, password)
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_event,
+ headers={},
+ auth=(username, password),
+ ),
+ ]
+ )
elif auth_scheme == RouterConfiguration.AUTH_BEARER:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_event,
- headers={
- 'Authorization': RouterConfiguration.AUTH_BEARER + ' ' + auth_key
- }
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_event,
+ headers={"Authorization": RouterConfiguration.AUTH_BEARER + " " + auth_key},
+ ),
+ ]
+ )
else:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_event,
- headers={
- },
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_event,
+ headers={},
+ ),
+ ]
+ )
# test mocked oauth client
mocked_oauth_client.assert_not_called()
@ddt.data(
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.CALIPER_BACKEND,
- 'http://test1.com'
- ),
+ (
+ RouterConfiguration.AUTH_BASIC,
+ None,
+ "abc",
+ "xyz",
+ RouterConfiguration.CALIPER_BACKEND,
+ "http://test1.com",
+ ),
(
RouterConfiguration.AUTH_BEARER,
- 'test_key',
+ "test_key",
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test2.com'
+ "http://test2.com",
),
(
None,
@@ -1105,34 +1068,28 @@ def test_successful_routing_of_event(
None,
None,
RouterConfiguration.CALIPER_BACKEND,
- 'http://test3.com'
+ "http://test3.com",
),
- (RouterConfiguration.AUTH_BASIC,
- None,
- 'abc',
- 'xyz',
- RouterConfiguration.XAPI_BACKEND,
- 'http://test1.com'
- ),
(
- RouterConfiguration.AUTH_BEARER,
- 'test_key',
- None,
+ RouterConfiguration.AUTH_BASIC,
None,
+ "abc",
+ "xyz",
RouterConfiguration.XAPI_BACKEND,
- 'http://test2.com'
+ "http://test1.com",
),
(
- None,
- None,
+ RouterConfiguration.AUTH_BEARER,
+ "test_key",
None,
None,
RouterConfiguration.XAPI_BACKEND,
- 'http://test3.com'
+ "http://test2.com",
),
+ (None, None, None, None, RouterConfiguration.XAPI_BACKEND, "http://test3.com"),
)
- @patch('event_routing_backends.utils.http_client.requests.post')
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
+ @patch("event_routing_backends.utils.http_client.requests.post")
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
@ddt.unpack
def test_successful_routing_of_bulk_events(
self,
@@ -1150,10 +1107,10 @@ def test_successful_routing_of_bulk_events(
mocked_api_key_client = MagicMock()
MOCKED_MAP = {
- 'AUTH_HEADERS': HttpClient,
- 'OAUTH2': mocked_oauth_client,
- 'API_KEY': mocked_api_key_client,
- 'XAPI_LRS': LrsClient,
+ "AUTH_HEADERS": HttpClient,
+ "OAUTH2": mocked_oauth_client,
+ "API_KEY": mocked_api_key_client,
+ "XAPI_LRS": LrsClient,
}
RouterConfigurationFactory.create(
backend_name=backend_name,
@@ -1163,7 +1120,7 @@ def test_successful_routing_of_bulk_events(
auth_key=auth_key,
username=username,
password=password,
- configurations=ROUTER_CONFIG_FIXTURE[0]
+ configurations=ROUTER_CONFIG_FIXTURE[0],
)
mock_response = MagicMock()
@@ -1173,7 +1130,7 @@ def test_successful_routing_of_bulk_events(
router = SyncEventsRouter(processors=[], backend_name=backend_name)
- with patch.dict('event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING', MOCKED_MAP):
+ with patch.dict("event_routing_backends.tasks.ROUTER_STRATEGY_MAPPING", MOCKED_MAP):
router.bulk_send(self.bulk_transformed_events)
overridden_events = self.bulk_transformed_events.copy()
@@ -1181,49 +1138,53 @@ def test_successful_routing_of_bulk_events(
overridden_events.pop()
for event in overridden_events:
- event['new_key'] = 'new_value'
+ event["new_key"] = "new_value"
if backend_name == RouterConfiguration.XAPI_BACKEND:
# test LRS Client
- mocked_lrs().save_statements.assert_has_calls([
- call(overridden_events),
- ])
+ mocked_lrs().save_statements.assert_has_calls(
+ [
+ call(overridden_events),
+ ]
+ )
else:
# test the HTTP client
if auth_scheme == RouterConfiguration.AUTH_BASIC:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_events,
- headers={
- },
- auth=(username, password)
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_events,
+ headers={},
+ auth=(username, password),
+ ),
+ ]
+ )
elif auth_scheme == RouterConfiguration.AUTH_BEARER:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_events,
- headers={
- 'Authorization': RouterConfiguration.AUTH_BEARER + ' ' + auth_key
- }
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_events,
+ headers={"Authorization": RouterConfiguration.AUTH_BEARER + " " + auth_key},
+ ),
+ ]
+ )
else:
- mocked_post.assert_has_calls([
- call(
- url=route_url,
- json=overridden_events,
- headers={
- },
- ),
- ])
+ mocked_post.assert_has_calls(
+ [
+ call(
+ url=route_url,
+ json=overridden_events,
+ headers={},
+ ),
+ ]
+ )
# test mocked oauth client
mocked_oauth_client.assert_not_called()
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
@ddt.unpack
def test_failed_bulk_routing(self, mocked_remote_lrs):
mock_response = MagicMock()
@@ -1237,15 +1198,15 @@ def test_failed_bulk_routing(self, mocked_remote_lrs):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = SyncEventsRouter(processors=[], backend_name=RouterConfiguration.XAPI_BACKEND)
with self.assertRaises(EventNotDispatched):
router.bulk_send([self.transformed_event])
- @patch('event_routing_backends.utils.xapi_lrs_client.RemoteLRS')
+ @patch("event_routing_backends.utils.xapi_lrs_client.RemoteLRS")
@ddt.unpack
def test_failed_routing(self, mocked_remote_lrs):
mock_response = MagicMock()
@@ -1260,32 +1221,32 @@ def test_failed_routing(self, mocked_remote_lrs):
RouterConfigurationFactory.create(
backend_name=RouterConfiguration.XAPI_BACKEND,
enabled=True,
- route_url='http://test3.com',
- configurations=ROUTER_CONFIG_FIXTURE[2]
+ route_url="http://test3.com",
+ configurations=ROUTER_CONFIG_FIXTURE[2],
)
router = SyncEventsRouter(processors=[], backend_name=RouterConfiguration.XAPI_BACKEND)
with self.assertRaises(EventNotDispatched):
router.send(self.transformed_event)
- @patch('event_routing_backends.backends.events_router.get_redis_connection')
+ @patch("event_routing_backends.backends.events_router.get_redis_connection")
def test_get_failed_events(self, mock_get_redis_connection):
redis_mock = MagicMock()
mock_get_redis_connection.return_value = redis_mock
- redis_mock.rpop.return_value = [json.dumps({'name': 'test', 'data': {'key': 'value'}}).encode('utf-8')]
+ redis_mock.rpop.return_value = [json.dumps({"name": "test", "data": {"key": "value"}}).encode("utf-8")]
- router = SyncEventsRouter(processors=[], backend_name='test')
+ router = SyncEventsRouter(processors=[], backend_name="test")
router.get_failed_events(1)
redis_mock.rpop.assert_called_once_with(router.dead_queue, 1)
- @patch('event_routing_backends.backends.events_router.get_redis_connection')
+ @patch("event_routing_backends.backends.events_router.get_redis_connection")
def test_get_failed_events_empty(self, mock_get_redis_connection):
redis_mock = MagicMock()
mock_get_redis_connection.return_value = redis_mock
redis_mock.rpop.return_value = None
- router = SyncEventsRouter(processors=[], backend_name='test')
+ router = SyncEventsRouter(processors=[], backend_name="test")
events = router.get_failed_events(1)
self.assertEqual(events, [])
diff --git a/event_routing_backends/helpers.py b/event_routing_backends/helpers.py
index 2fe357cc..0c73eac1 100644
--- a/event_routing_backends/helpers.py
+++ b/event_routing_backends/helpers.py
@@ -1,6 +1,7 @@
"""
Helper utilities for event transformers.
"""
+
import datetime
import logging
import uuid
@@ -30,8 +31,8 @@
User = get_user_model()
-UTC_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
-BLOCK_ID_FORMAT = '{block_version}:{course_id}+type@{block_type}+block@{block_id}'
+UTC_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
+BLOCK_ID_FORMAT = "{block_version}:{course_id}+type@{block_type}+block@{block_id}"
def get_uuid5(namespace_key, name):
@@ -48,7 +49,7 @@ def get_uuid5(namespace_key, name):
"""
# We are not pulling base uuid from settings to avoid
# data discrepancies incase setting is changed inadvertently
- base_uuid = uuid.UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
+ base_uuid = uuid.UUID("6ba7b810-9dad-11d1-80b4-00c04fd430c8")
base_namespace = uuid.uuid5(base_uuid, namespace_key)
return uuid.uuid5(base_namespace, name)
@@ -73,8 +74,10 @@ def get_anonymous_user_id(username_or_id, external_type):
user = get_user(username_or_id)
if not user:
- logger.warning('User with username "%s" does not exist. '
- 'Cannot generate anonymous ID', username_or_id)
+ logger.warning(
+ 'User with username "%s" does not exist. Cannot generate anonymous ID',
+ username_or_id,
+ )
raise ValueError(f"User with username {username_or_id} does not exist.")
@@ -145,7 +148,7 @@ def get_user_email(username_or_id):
if not user:
logger.info('User with username "%s" does not exist.', username_or_id)
- user_email = 'unknown@example.com'
+ user_email = "unknown@example.com"
else:
user_email = user.email
@@ -185,9 +188,7 @@ def convert_seconds_to_iso(seconds):
"""
if seconds is None:
return None
- return duration_isoformat(datetime.timedelta(
- seconds=seconds
- ))
+ return duration_isoformat(datetime.timedelta(seconds=seconds))
def convert_seconds_to_float(seconds):
@@ -223,7 +224,7 @@ def convert_datetime_to_iso(current_datetime):
utc_offset = current_datetime.utcoffset()
utc_datetime = current_datetime - utc_offset
- formatted_datetime = utc_datetime.strftime(UTC_DATETIME_FORMAT)[:-3] + 'Z'
+ formatted_datetime = utc_datetime.strftime(UTC_DATETIME_FORMAT)[:-3] + "Z"
return formatted_datetime
@@ -240,9 +241,11 @@ def get_block_id_from_event_referrer(referrer):
"""
if referrer is not None:
parsed = urlparse(referrer)
- block_id = parse_qs(parsed.query)['activate_block_id'][0]\
- if 'activate_block_id' in parse_qs(parsed.query) and parse_qs(parsed.query)['activate_block_id'][0] \
+ block_id = (
+ parse_qs(parsed.query)["activate_block_id"][0]
+ if "activate_block_id" in parse_qs(parsed.query) and parse_qs(parsed.query)["activate_block_id"][0]
else None
+ )
else:
block_id = None
@@ -262,15 +265,15 @@ def get_block_id_from_event_data(data, course_id):
str or None
"""
if data is not None and course_id is not None:
- data_array = data.split('_')
- course_id_array = course_id.split(':')
+ data_array = data.split("_")
+ course_id_array = course_id.split(":")
block_version = get_block_version(course_id)
if len(data_array) > 1 and len(course_id_array) > 1:
block_id = BLOCK_ID_FORMAT.format(
block_version=block_version,
course_id=course_id_array[1],
- block_type='problem',
- block_id=data_array[1]
+ block_type="problem",
+ block_id=data_array[1],
)
else:
block_id = None # pragma: no cover
@@ -294,10 +297,7 @@ def get_problem_block_id(referrer, data, course_id):
"""
block_id = get_block_id_from_event_referrer(referrer)
if block_id is None:
- block_id = get_block_id_from_event_data(
- data,
- course_id
- )
+ block_id = get_block_id_from_event_data(data, course_id)
return block_id
@@ -313,13 +313,13 @@ def make_video_block_id(video_id, course_id):
Returns:
str
"""
- course_id_array = course_id.split(':')
+ course_id_array = course_id.split(":")
block_version = get_block_version(course_id)
return BLOCK_ID_FORMAT.format(
block_version=block_version,
course_id=course_id_array[1],
- block_type='video',
- block_id=video_id
+ block_type="video",
+ block_id=video_id,
)
@@ -330,7 +330,7 @@ def backend_cache_ttl():
Returns:
int
"""
- return getattr(settings, 'EVENT_TRACKING_BACKENDS_CACHE_TTL', 600)
+ return getattr(settings, "EVENT_TRACKING_BACKENDS_CACHE_TTL", 600)
def get_business_critical_events():
@@ -340,11 +340,15 @@ def get_business_critical_events():
Returns:
list
"""
- return getattr(settings, 'EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS', [
- 'edx.course.enrollment.activated',
- 'edx.course.enrollment.deactivated',
- 'edx.course.grade.passed.first_time'
- ])
+ return getattr(
+ settings,
+ "EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS",
+ [
+ "edx.course.enrollment.activated",
+ "edx.course.enrollment.deactivated",
+ "edx.course.grade.passed.first_time",
+ ],
+ )
def get_block_version(course_id):
@@ -358,7 +362,7 @@ def get_block_version(course_id):
Returns:
str
"""
- course_id_array = course_id.split(':')
+ course_id_array = course_id.split(":")
block_version = "block-{0}".format(course_id_array[0].split("-")[-1])
if "ccx" in course_id_array[0]:
block_version = "ccx-{block_version}".format(block_version=block_version)
diff --git a/event_routing_backends/management/commands/helpers/event_log_parser.py b/event_routing_backends/management/commands/helpers/event_log_parser.py
index eecf9b91..304a4ac6 100644
--- a/event_routing_backends/management/commands/helpers/event_log_parser.py
+++ b/event_routing_backends/management/commands/helpers/event_log_parser.py
@@ -3,6 +3,7 @@
Taken entirely from edx-analytics-pipeline.
"""
+
import json
import logging
import re
@@ -10,7 +11,7 @@
log = logging.getLogger(__name__)
-PATTERN_JSON = re.compile(r'^.*?(\{.*\})\s*$')
+PATTERN_JSON = re.compile(r"^.*?(\{.*\})\s*$")
def parse_json_event(line):
diff --git a/event_routing_backends/management/commands/helpers/queued_sender.py b/event_routing_backends/management/commands/helpers/queued_sender.py
index 8a7ecf5e..5dc499fb 100644
--- a/event_routing_backends/management/commands/helpers/queued_sender.py
+++ b/event_routing_backends/management/commands/helpers/queued_sender.py
@@ -1,6 +1,7 @@
"""
Class to handle batching and sending bulk transformed statements.
"""
+
import datetime
import json
import os
@@ -16,6 +17,7 @@ class QueuedSender:
"""
Handles queuing and sending events to the destination.
"""
+
def __init__( # pylint: disable=too-many-positional-arguments
self,
destination,
@@ -24,7 +26,7 @@ def __init__( # pylint: disable=too-many-positional-arguments
transformer_type,
max_queue_size=10000,
sleep_between_batches_secs=1.0,
- dry_run=False
+ dry_run=False,
):
self.destination = destination
self.destination_container = destination_container
@@ -51,9 +53,9 @@ def is_known_event(self, event):
"""
if "name" in event:
for processor in self.engine.processors:
- if hasattr(processor, 'whitelist') and event["name"] in processor.whitelist:
+ if hasattr(processor, "whitelist") and event["name"] in processor.whitelist:
return True
- elif hasattr(processor, 'registry') and event["name"] in processor.registry.mapping:
+ elif hasattr(processor, "registry") and event["name"] in processor.registry.mapping:
return True
return False
@@ -120,7 +122,7 @@ def store(self):
container = self.destination.get_container(self.destination_container)
- datestr = datetime.datetime.now().strftime('%y-%m-%d_%H-%M-%S')
+ datestr = datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S")
object_name = f"{self.destination_prefix}/{datestr}_{self.transformer_type}.log"
print(f"Writing to {self.destination_container}/{object_name}")
@@ -131,11 +133,7 @@ def store(self):
out.write(str.encode("\n"))
out.seek(0)
- self.destination.upload_object_via_stream(
- out,
- container,
- object_name
- )
+ self.destination.upload_object_via_stream(out, container, object_name)
def finalize(self):
"""
@@ -156,7 +154,9 @@ def finalize(self):
self.store()
self.batches_sent += 1
- print(f"Queued {self.queued_lines} log lines, "
- f"could not parse {self.unparsable_lines} log lines, "
- f"skipped {self.skipped_lines} log lines, "
- f"sent {self.batches_sent} batches.")
+ print(
+ f"Queued {self.queued_lines} log lines, "
+ f"could not parse {self.unparsable_lines} log lines, "
+ f"skipped {self.skipped_lines} log lines, "
+ f"sent {self.batches_sent} batches."
+ )
diff --git a/event_routing_backends/management/commands/recover_failed_events.py b/event_routing_backends/management/commands/recover_failed_events.py
index ccb65975..dad95faf 100644
--- a/event_routing_backends/management/commands/recover_failed_events.py
+++ b/event_routing_backends/management/commands/recover_failed_events.py
@@ -57,11 +57,7 @@ def handle(self, *args, **options):
failed = 0
while failed_events := backend.get_failed_events(batch_size):
- logger.info(
- "Recovering {} failed events for backend {}".format(
- len(failed_events), transformer_type
- )
- )
+ logger.info("Recovering {} failed events for backend {}".format(len(failed_events), transformer_type))
for event in failed_events:
try:
backend.send(event)
diff --git a/event_routing_backends/management/commands/tests/test_recover_failed_events.py b/event_routing_backends/management/commands/tests/test_recover_failed_events.py
index fa15bf2f..5bef2351 100644
--- a/event_routing_backends/management/commands/tests/test_recover_failed_events.py
+++ b/event_routing_backends/management/commands/tests/test_recover_failed_events.py
@@ -51,9 +51,7 @@ class TestRecoverFailedEvents(TestCase):
},
}
)
- @patch(
- "event_routing_backends.management.commands.recover_failed_events.get_tracker"
- )
+ @patch("event_routing_backends.management.commands.recover_failed_events.get_tracker")
def test_send_tracking_log_to_backends(self, mock_get_tracker):
"""
Test for send_tracking_log_to_backends
@@ -78,13 +76,9 @@ def test_send_tracking_log_to_backends(self, mock_get_tracker):
},
}
)
- @patch(
- "event_routing_backends.management.commands.recover_failed_events.get_tracker"
- )
+ @patch("event_routing_backends.management.commands.recover_failed_events.get_tracker")
@patch("event_routing_backends.management.commands.recover_failed_events.logger")
- def test_send_tracking_log_to_backends_with_exception(
- self, mock_logger, mock_get_tracker
- ):
+ def test_send_tracking_log_to_backends_with_exception(self, mock_logger, mock_get_tracker):
"""
Test for send_tracking_log_to_backends
"""
@@ -109,13 +103,9 @@ def test_send_tracking_log_to_backends_with_exception(
},
}
)
- @patch(
- "event_routing_backends.management.commands.recover_failed_events.get_tracker"
- )
+ @patch("event_routing_backends.management.commands.recover_failed_events.get_tracker")
@patch("event_routing_backends.management.commands.recover_failed_events.logger")
- def test_send_tracking_log_to_backends_with_event_exception(
- self, mock_logger, mock_get_tracker
- ):
+ def test_send_tracking_log_to_backends_with_event_exception(self, mock_logger, mock_get_tracker):
"""
Test for send_tracking_log_to_backends
"""
@@ -146,9 +136,7 @@ def test_send_tracking_log_to_backends_with_event_exception(
},
}
)
- @patch(
- "event_routing_backends.management.commands.recover_failed_events.get_tracker"
- )
+ @patch("event_routing_backends.management.commands.recover_failed_events.get_tracker")
def test_send_tracking_log_to_backends_no_failed_events(self, mock_get_tracker):
"""
Test for send_tracking_log_to_backends
diff --git a/event_routing_backends/management/commands/tests/test_transform_tracking_logs.py b/event_routing_backends/management/commands/tests/test_transform_tracking_logs.py
index 60cc412d..bb85781c 100644
--- a/event_routing_backends/management/commands/tests/test_transform_tracking_logs.py
+++ b/event_routing_backends/management/commands/tests/test_transform_tracking_logs.py
@@ -1,6 +1,7 @@
"""
Tests for the transform_tracking_logs management command.
"""
+
import json
import os
from unittest.mock import MagicMock, patch
@@ -31,15 +32,17 @@
LOCAL_CONFIG = json.dumps({"key": "/openedx/", "container": "data", "prefix": ""})
-REMOTE_CONFIG = json.dumps({
- "key": "api key",
- "secret": "api secret key",
- "prefix": "/xapi_statements/",
- "container": "test_bucket",
- "secure": False,
- "host": "127.0.0.1",
- "port": 9191
-})
+REMOTE_CONFIG = json.dumps(
+ {
+ "key": "api key",
+ "secret": "api secret key",
+ "prefix": "/xapi_statements/",
+ "container": "test_bucket",
+ "secure": False,
+ "host": "127.0.0.1",
+ "port": 9191,
+ }
+)
@pytest.fixture
@@ -48,8 +51,8 @@ def mock_common_calls():
Mock out calls that we test elsewhere and aren't relevant to the command tests.
"""
command_path = "event_routing_backends.management.commands.transform_tracking_logs"
- with patch(command_path+".Provider") as mock_libcloud_provider:
- with patch(command_path+".get_driver") as mock_libcloud_get_driver:
+ with patch(command_path + ".Provider") as mock_libcloud_provider:
+ with patch(command_path + ".get_driver") as mock_libcloud_get_driver:
yield mock_libcloud_provider, mock_libcloud_get_driver
@@ -73,8 +76,8 @@ def command_options():
"Max queue size of 1 reached, sending.",
"Sending 1 events to LRS...",
"Queued 2 log lines, could not parse 2 log lines, skipped 8 log lines, sent 3 batches.",
- "Sending to LRS!"
- ]
+ "Sending to LRS!",
+ ],
},
"registry_mapping": {"problem_check": 1},
},
@@ -93,7 +96,7 @@ def command_options():
"Finalizing 2 events to LRS",
"Dry run, skipping final storage.",
"Queued 2 log lines, could not parse 2 log lines, skipped 8 log lines, sent 0 batches.",
- ]
+ ],
},
"registry_mapping": {"problem_check": 1},
},
@@ -112,9 +115,9 @@ def command_options():
"Sending to LRS!",
"Sending 2 events to LRS...",
"Queued 2 log lines, could not parse 2 log lines, skipped 8 log lines, sent 1 batches.",
- ]
+ ],
},
- "whitelist": ["problem_check"]
+ "whitelist": ["problem_check"],
},
# Local file to remote file
{
@@ -136,9 +139,9 @@ def command_options():
"Storing 2 events to libcloud destination test_bucket/xapi_statements/",
"Storing 0 events to libcloud destination test_bucket/xapi_statements/",
"Queued 2 log lines, could not parse 2 log lines, skipped 8 log lines, sent 2 batches.",
- ]
+ ],
},
- "whitelist": ["problem_check"]
+ "whitelist": ["problem_check"],
},
# Remote file dry run
{
@@ -159,9 +162,9 @@ def command_options():
"Dry run, skipping, but still clearing the queue.",
"Dry run, skipping final storage.",
"Queued 2 log lines, could not parse 2 log lines, skipped 8 log lines, sent 0 batches.",
- ]
+ ],
},
- "whitelist": ["problem_check"]
+ "whitelist": ["problem_check"],
},
]
@@ -171,7 +174,7 @@ def command_options():
def _get_tracking_log_file_path():
TEST_DIR_PATH = os.path.dirname(os.path.abspath(__file__))
- return '{test_dir}/fixtures/tracking.log'.format(test_dir=TEST_DIR_PATH)
+ return "{test_dir}/fixtures/tracking.log".format(test_dir=TEST_DIR_PATH)
def _get_raw_log_size():
@@ -196,7 +199,7 @@ def test_transform_command(command_opts, mock_common_calls, caplog, capsys):
mock_libcloud_provider, mock_libcloud_get_driver = mock_common_calls
expected_results = command_opts.pop("expected_results")
- transform_tracking_logs.CHUNK_SIZE = command_opts.pop("chunk_size", 1024*1024*2)
+ transform_tracking_logs.CHUNK_SIZE = command_opts.pop("chunk_size", 1024 * 1024 * 2)
mm = MagicMock()
@@ -222,10 +225,7 @@ def test_transform_command(command_opts, mock_common_calls, caplog, capsys):
for backend in tracker.backends["event_transformer"].backends.values():
backend.bulk_send = MagicMock()
- call_command(
- 'transform_tracking_logs',
- **command_opts
- )
+ call_command("transform_tracking_logs", **command_opts)
captured = capsys.readouterr()
print(captured.out)
@@ -294,7 +294,8 @@ def test_queued_sender_container_does_not_exist(mock_common_calls, caplog):
"""
mock_destination = MagicMock()
mock_destination.get_container.side_effect = ContainerDoesNotExistError(
- "Container 'fake_container' doesn't exist.", None, "fake")
+ "Container 'fake_container' doesn't exist.", None, "fake"
+ )
with pytest.raises(ContainerDoesNotExistError):
qs = QueuedSender(mock_destination, "fake_container", "fake_prefix", "xapi")
qs.queued_lines = ["fake"]
@@ -360,8 +361,10 @@ def test_required_dest_libcloud_keys(capsys):
captured = capsys.readouterr()
print(captured.out)
- assert "If not using the 'LRS' destination, the following keys must be defined in destination_config: " \
- "'prefix', 'container'" in captured.out
+ assert (
+ "If not using the 'LRS' destination, the following keys must be defined in destination_config: "
+ "'prefix', 'container'" in captured.out
+ )
def test_get_source_config():
@@ -371,7 +374,7 @@ def test_get_source_config():
options = {
"key": "fake test key",
"container": "fake container",
- "prefix": "fake prefix"
+ "prefix": "fake prefix",
}
config, container, prefix = get_source_config_from_options(json.dumps(options))
@@ -389,7 +392,7 @@ def test_get_dest_config():
options = {
"key": "fake test key",
"container": "fake container",
- "prefix": "fake prefix"
+ "prefix": "fake prefix",
}
config, container, prefix = get_dest_config_from_options("fake provider", json.dumps(options))
diff --git a/event_routing_backends/management/commands/transform_tracking_logs.py b/event_routing_backends/management/commands/transform_tracking_logs.py
index 4b8b7750..2fd2427b 100644
--- a/event_routing_backends/management/commands/transform_tracking_logs.py
+++ b/event_routing_backends/management/commands/transform_tracking_logs.py
@@ -1,6 +1,7 @@
"""
Management command for transforming tracking log files.
"""
+
import json
import os
from io import BytesIO
@@ -26,18 +27,14 @@ def _get_chunks(source, file):
tries to handle any of those cases gracefully.
"""
chunks = None
- num_retries = getattr(settings, 'EVENT_ROUTING_BACKEND_BULK_DOWNLOAD_MAX_RETRIES', 3)
- retry_countdown = getattr(settings, 'EVENT_ROUTING_BACKEND_BULK_DOWNLOAD_COUNTDOWN', 1)
+ num_retries = getattr(settings, "EVENT_ROUTING_BACKEND_BULK_DOWNLOAD_MAX_RETRIES", 3)
+ retry_countdown = getattr(settings, "EVENT_ROUTING_BACKEND_BULK_DOWNLOAD_COUNTDOWN", 1)
# Skipping coverage here because it wants to test a branch that will never
# be hit (for -> return)
- for try_number in range(1, num_retries+1): # pragma: no cover
+ for try_number in range(1, num_retries + 1): # pragma: no cover
try:
- chunks = source.download_object_range_as_stream(
- file,
- start_bytes=0,
- chunk_size=CHUNK_SIZE
- )
+ chunks = source.download_object_range_as_stream(file, start_bytes=0, chunk_size=CHUNK_SIZE)
break
# Catching all exceptions here because there's no telling what all
# the possible errors from different libcloud providers are.
@@ -52,12 +49,7 @@ def _get_chunks(source, file):
return chunks
-def transform_tracking_logs(
- source,
- source_container,
- source_prefix,
- sender
-):
+def transform_tracking_logs(source, source_container, source_prefix, sender):
"""
Transform one or more tracking log files from the given source to the given destination.
"""
@@ -77,7 +69,7 @@ def transform_tracking_logs(
chunks = _get_chunks(source, file)
for chunk in chunks:
- chunk = chunk.decode('utf-8')
+ chunk = chunk.decode("utf-8")
# Loop through this chunk, if we find a newline it's time to process
# otherwise just keep appending.
@@ -123,8 +115,10 @@ def get_dest_config_from_options(destination_provider, dest_config_options):
dest_container = dest_config.pop("container")
dest_prefix = dest_config.pop("prefix")
except KeyError as e:
- print("If not using the 'LRS' destination, the following keys must be defined in "
- "destination_config: 'prefix', 'container'")
+ print(
+ "If not using the 'LRS' destination, the following keys must be defined in "
+ "destination_config: 'prefix', 'container'"
+ )
raise e
else:
dest_config = dest_container = dest_prefix = None
@@ -151,11 +145,7 @@ def validate_destination(driver, container_name, prefix, source_objects):
container = driver.get_container(container_name)
full_path = f"{prefix}/manifest.log"
file_list = "\n".join(source_objects)
- driver.upload_object_via_stream(
- iterator=BytesIO(file_list.encode()),
- container=container,
- object_name=full_path
- )
+ driver.upload_object_via_stream(iterator=BytesIO(file_list.encode()), container=container, object_name=full_path)
print(f"Wrote source file list to '{container_name}/{full_path}'")
@@ -189,69 +179,70 @@ class Command(BaseCommand):
"""
Transform tracking logs to an LRS or other output destination.
"""
+
help = dedent(__doc__).strip()
def add_arguments(self, parser):
parser.add_argument(
- '--source_provider',
+ "--source_provider",
type=str,
help="An Apache Libcloud 'provider constant' from: "
- "https://libcloud.readthedocs.io/en/stable/storage/supported_providers.html . "
- "Ex: LOCAL for local storage or S3 for AWS S3.",
+ "https://libcloud.readthedocs.io/en/stable/storage/supported_providers.html . "
+ "Ex: LOCAL for local storage or S3 for AWS S3.",
required=True,
)
parser.add_argument(
- '--source_config',
+ "--source_config",
type=str,
help="A JSON dictionary of configuration for the source provider. Leave"
- "blank the destination_provider is 'LRS'. See the Libcloud docs for the necessary options"
- "for your destination. If your destination (S3, MinIO, etc) needs a 'bucket' or 'container' add them "
- "to the config here under the key 'container'. If your source needs a prefix (ex: directory path, "
- "or wildcard beginning of a filename), add it here under the key 'prefix'. If no prefix is given, "
- "all files in the given location will be attempted!",
+ "blank the destination_provider is 'LRS'. See the Libcloud docs for the necessary options"
+ "for your destination. If your destination (S3, MinIO, etc) needs a 'bucket' or 'container' add them "
+ "to the config here under the key 'container'. If your source needs a prefix (ex: directory path, "
+ "or wildcard beginning of a filename), add it here under the key 'prefix'. If no prefix is given, "
+ "all files in the given location will be attempted!",
required=True,
)
parser.add_argument(
- '--destination_provider',
+ "--destination_provider",
type=str,
default="LRS",
help="Either 'LRS' to use the default configured xAPI and/or Caliper servers"
- "or an Apache Libcloud 'provider constant' from this list: "
- "https://libcloud.readthedocs.io/en/stable/storage/supported_providers.html . "
- "Ex: LOCAL for local storage or S3 for AWS S3.",
+ "or an Apache Libcloud 'provider constant' from this list: "
+ "https://libcloud.readthedocs.io/en/stable/storage/supported_providers.html . "
+ "Ex: LOCAL for local storage or S3 for AWS S3.",
)
parser.add_argument(
- '--destination_config',
+ "--destination_config",
type=str,
help="A JSON dictionary of configuration for the destination provider. Not needed for the 'LRS' "
- "destination_provider. See the Libcloud docs for the necessary options for your destination. If your "
- "destination (S3, MinIO, etc) needs a 'bucket' or 'container' add them to the config here under the "
- "key 'container'. If your destination needs a prefix (ex: directory path), add it here under the key "
- "'prefix'. If no prefix is given, the output file(s) will be written to the base path.",
+ "destination_provider. See the Libcloud docs for the necessary options for your destination. If your "
+ "destination (S3, MinIO, etc) needs a 'bucket' or 'container' add them to the config here under the "
+ "key 'container'. If your destination needs a prefix (ex: directory path), add it here under the key "
+ "'prefix'. If no prefix is given, the output file(s) will be written to the base path.",
)
parser.add_argument(
- '--transformer_type',
+ "--transformer_type",
choices=["xapi", "caliper"],
required=True,
help="The type of transformation to do, only one can be done at a time.",
)
parser.add_argument(
- '--batch_size',
+ "--batch_size",
type=int,
default=10000,
help="How many events to send at a time. For the LRS destination this will be one POST per this many "
- "events, for all other destinations a new file will be created containing up to this many events. "
- "This helps reduce memory usage in the script and increases helps with LRS performance.",
+ "events, for all other destinations a new file will be created containing up to this many events. "
+ "This helps reduce memory usage in the script and increases helps with LRS performance.",
)
parser.add_argument(
- '--sleep_between_batches_secs',
+ "--sleep_between_batches_secs",
type=float,
default=10.0,
help="Fractional seconds to sleep between sending batches to a destination, used to reduce load on the LMS "
- "and LRSs when performing large operations.",
+ "and LRSs when performing large operations.",
)
parser.add_argument(
- '--dry_run',
+ "--dry_run",
action="store_true",
help="Attempt to transform all lines from all files, but do not send to the destination.",
)
@@ -262,22 +253,25 @@ def handle(self, *args, **options):
"""
source_config, source_container, source_prefix = get_source_config_from_options(options["source_config"])
dest_config, dest_container, dest_prefix = get_dest_config_from_options(
- options["destination_provider"],
- options["destination_config"]
+ options["destination_provider"], options["destination_config"]
)
source_driver, dest_driver = get_libcloud_drivers(
options["source_provider"],
source_config,
options["destination_provider"],
- dest_config
+ dest_config,
)
source_file_list = validate_source_and_files(source_driver, source_container, source_prefix)
if dest_driver != "LRS":
validate_destination(dest_driver, dest_container, dest_prefix, source_file_list)
else:
- print(f"Found {len(source_file_list)} source files: ", *source_file_list, sep="\n")
+ print(
+ f"Found {len(source_file_list)} source files: ",
+ *source_file_list,
+ sep="\n",
+ )
sender = QueuedSender(
dest_driver,
@@ -286,12 +280,7 @@ def handle(self, *args, **options):
options["transformer_type"],
max_queue_size=options["batch_size"],
sleep_between_batches_secs=options["sleep_between_batches_secs"],
- dry_run=options["dry_run"]
+ dry_run=options["dry_run"],
)
- transform_tracking_logs(
- source_driver,
- source_container,
- source_prefix,
- sender
- )
+ transform_tracking_logs(source_driver, source_container, source_prefix, sender)
diff --git a/event_routing_backends/migrations/0001_initial.py b/event_routing_backends/migrations/0001_initial.py
index 692fb1f8..aa685045 100644
--- a/event_routing_backends/migrations/0001_initial.py
+++ b/event_routing_backends/migrations/0001_initial.py
@@ -18,18 +18,54 @@ class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
- name='RouterConfiguration',
+ name="RouterConfiguration",
fields=[
- ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
- ('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
- ('backend_name', models.CharField(help_text='Name of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.', max_length=50, verbose_name='Backend name')),
- ('configurations', event_routing_backends.utils.fields.EncryptedJSONField(dump_kwargs={'cls': jsonfield.encoder.JSONEncoder, 'separators': (',', ':')}, load_kwargs={})),
- ('changed_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Changed by')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "change_date",
+ models.DateTimeField(auto_now_add=True, verbose_name="Change date"),
+ ),
+ ("enabled", models.BooleanField(default=False, verbose_name="Enabled")),
+ (
+ "backend_name",
+ models.CharField(
+ help_text="Name of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.",
+ max_length=50,
+ verbose_name="Backend name",
+ ),
+ ),
+ (
+ "configurations",
+ event_routing_backends.utils.fields.EncryptedJSONField(
+ dump_kwargs={
+ "cls": jsonfield.encoder.JSONEncoder,
+ "separators": (",", ":"),
+ },
+ load_kwargs={},
+ ),
+ ),
+ (
+ "changed_by",
+ models.ForeignKey(
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.PROTECT,
+ to=settings.AUTH_USER_MODEL,
+ verbose_name="Changed by",
+ ),
+ ),
],
options={
- 'verbose_name': 'Router Configuration',
- 'verbose_name_plural': 'Router Configurations',
+ "verbose_name": "Router Configuration",
+ "verbose_name_plural": "Router Configurations",
},
),
]
diff --git a/event_routing_backends/migrations/0002_auto_20210503_0648.py b/event_routing_backends/migrations/0002_auto_20210503_0648.py
index f7bef6e4..8348d463 100644
--- a/event_routing_backends/migrations/0002_auto_20210503_0648.py
+++ b/event_routing_backends/migrations/0002_auto_20210503_0648.py
@@ -6,19 +6,29 @@
class Migration(migrations.Migration):
dependencies = [
- ('event_routing_backends', '0001_initial'),
+ ("event_routing_backends", "0001_initial"),
]
operations = [
migrations.AddField(
- model_name='routerconfiguration',
- name='route_url',
- field=models.CharField(default='', help_text='Route Url of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.', max_length=255, verbose_name='Route url'),
+ model_name="routerconfiguration",
+ name="route_url",
+ field=models.CharField(
+ default="",
+ help_text="Route Url of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.",
+ max_length=255,
+ verbose_name="Route url",
+ ),
preserve_default=False,
),
migrations.AlterField(
- model_name='routerconfiguration',
- name='backend_name',
- field=models.CharField(db_index=True, help_text='Name of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.', max_length=50, verbose_name='Backend name'),
+ model_name="routerconfiguration",
+ name="backend_name",
+ field=models.CharField(
+ db_index=True,
+ help_text="Name of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.",
+ max_length=50,
+ verbose_name="Backend name",
+ ),
),
]
diff --git a/event_routing_backends/migrations/0003_auto_20210713_0344.py b/event_routing_backends/migrations/0003_auto_20210713_0344.py
index 64df6acc..e13a701d 100644
--- a/event_routing_backends/migrations/0003_auto_20210713_0344.py
+++ b/event_routing_backends/migrations/0003_auto_20210713_0344.py
@@ -8,13 +8,13 @@
class Migration(migrations.Migration):
dependencies = [
- ('event_routing_backends', '0002_auto_20210503_0648'),
+ ("event_routing_backends", "0002_auto_20210503_0648"),
]
operations = [
migrations.AlterField(
- model_name='routerconfiguration',
- name='configurations',
+ model_name="routerconfiguration",
+ name="configurations",
field=event_routing_backends.utils.fields.EncryptedJSONField(),
),
]
diff --git a/event_routing_backends/migrations/0004_auto_20211025_1053.py b/event_routing_backends/migrations/0004_auto_20211025_1053.py
index 8f6384c5..18b4c3e9 100644
--- a/event_routing_backends/migrations/0004_auto_20211025_1053.py
+++ b/event_routing_backends/migrations/0004_auto_20211025_1053.py
@@ -9,38 +9,57 @@
class Migration(migrations.Migration):
dependencies = [
- ('event_routing_backends', '0003_auto_20210713_0344'),
+ ("event_routing_backends", "0003_auto_20210713_0344"),
]
operations = [
migrations.AddField(
- model_name='routerconfiguration',
- name='auth_key',
- field=fernet_fields.fields.EncryptedCharField(blank=True, max_length=256, null=True, verbose_name='Auth Key'),
+ model_name="routerconfiguration",
+ name="auth_key",
+ field=fernet_fields.fields.EncryptedCharField(
+ blank=True, max_length=256, null=True, verbose_name="Auth Key"
+ ),
),
migrations.AddField(
- model_name='routerconfiguration',
- name='auth_scheme',
- field=models.CharField(blank=True, choices=[('Basic', 'Basic'), ('Bearer', 'Bearer')], default=None, max_length=6, null=True, verbose_name='Auth Scheme'),
+ model_name="routerconfiguration",
+ name="auth_scheme",
+ field=models.CharField(
+ blank=True,
+ choices=[("Basic", "Basic"), ("Bearer", "Bearer")],
+ default=None,
+ max_length=6,
+ null=True,
+ verbose_name="Auth Scheme",
+ ),
),
migrations.AddField(
- model_name='routerconfiguration',
- name='password',
- field=fernet_fields.fields.EncryptedCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
+ model_name="routerconfiguration",
+ name="password",
+ field=fernet_fields.fields.EncryptedCharField(
+ blank=True, max_length=256, null=True, verbose_name="Password"
+ ),
),
migrations.AddField(
- model_name='routerconfiguration',
- name='username',
- field=fernet_fields.fields.EncryptedCharField(blank=True, max_length=256, null=True, verbose_name='Username'),
+ model_name="routerconfiguration",
+ name="username",
+ field=fernet_fields.fields.EncryptedCharField(
+ blank=True, max_length=256, null=True, verbose_name="Username"
+ ),
),
migrations.AlterField(
- model_name='routerconfiguration',
- name='backend_name',
- field=models.CharField(choices=[('Caliper', 'Caliper'), ('xAPI', 'xAPI')], db_index=True, default='xAPI', help_text='Name of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.', max_length=50),
+ model_name="routerconfiguration",
+ name="backend_name",
+ field=models.CharField(
+ choices=[("Caliper", "Caliper"), ("xAPI", "xAPI")],
+ db_index=True,
+ default="xAPI",
+ help_text="Name of the tracking backend on which this router should be applied.
Please note that this field is case sensitive.",
+ max_length=50,
+ ),
),
migrations.AlterField(
- model_name='routerconfiguration',
- name='configurations',
+ model_name="routerconfiguration",
+ name="configurations",
field=event_routing_backends.utils.fields.EncryptedJSONField(blank=True, default=None),
),
]
diff --git a/event_routing_backends/models.py b/event_routing_backends/models.py
index cc86c1d0..98677b94 100644
--- a/event_routing_backends/models.py
+++ b/event_routing_backends/models.py
@@ -1,7 +1,7 @@
-
"""
Database models for event_routing_backends.
"""
+
import logging
import re
@@ -39,7 +39,7 @@ def get_value_from_dotted_path(dict_obj, dotted_key):
ANY : Returns the value found in the dict or `None` if
no value exists for provided dotted path.
"""
- nested_keys = dotted_key.split('.')
+ nested_keys = dotted_key.split(".")
result = dict_obj
try:
for key in nested_keys:
@@ -69,7 +69,7 @@ def get_routers(self, backend_name):
if cached_response.is_found:
return cached_response.value
- current = self.current_set().filter(backend_name=backend_name, enabled=True).order_by('-change_date')
+ current = self.current_set().filter(backend_name=backend_name, enabled=True).order_by("-change_date")
TieredCache.set_all_tiers(cache_key, current, backend_cache_ttl())
return current
@@ -114,13 +114,19 @@ class RouterConfiguration(ConfigurationModel):
"""
- AUTH_BASIC = 'Basic'
- AUTH_BEARER = 'Bearer'
- AUTH_CHOICES = ((AUTH_BASIC, 'Basic'), (AUTH_BEARER, 'Bearer'),)
- CALIPER_BACKEND = 'Caliper'
- XAPI_BACKEND = 'xAPI'
- BACKEND_CHOICES = ((CALIPER_BACKEND, 'Caliper'), (XAPI_BACKEND, 'xAPI'),)
- KEY_FIELDS = ('route_url',)
+ AUTH_BASIC = "Basic"
+ AUTH_BEARER = "Bearer"
+ AUTH_CHOICES = (
+ (AUTH_BASIC, "Basic"),
+ (AUTH_BEARER, "Bearer"),
+ )
+ CALIPER_BACKEND = "Caliper"
+ XAPI_BACKEND = "xAPI"
+ BACKEND_CHOICES = (
+ (CALIPER_BACKEND, "Caliper"),
+ (XAPI_BACKEND, "xAPI"),
+ )
+ KEY_FIELDS = ("route_url",)
backend_name = models.CharField(
choices=BACKEND_CHOICES,
max_length=50,
@@ -129,50 +135,35 @@ class RouterConfiguration(ConfigurationModel):
db_index=True,
default=XAPI_BACKEND,
help_text=(
- 'Name of the tracking backend on which this router should be applied.'
- '
'
- 'Please note that this field is case sensitive.'
- )
+ "Name of the tracking backend on which this router should be applied."
+ "
"
+ "Please note that this field is case sensitive."
+ ),
)
route_url = models.CharField(
max_length=255,
- verbose_name='Route url',
+ verbose_name="Route url",
null=False,
blank=False,
help_text=(
- 'Route Url of the tracking backend on which this router should be applied.'
- '
'
- 'Please note that this field is case sensitive.'
- )
+ "Route Url of the tracking backend on which this router should be applied."
+ "
"
+ "Please note that this field is case sensitive."
+ ),
)
auth_scheme = models.CharField(
choices=AUTH_CHOICES,
- verbose_name='Auth Scheme',
+ verbose_name="Auth Scheme",
max_length=6,
default=None,
blank=True,
- null=True
- )
- auth_key = EncryptedCharField(
- verbose_name='Auth Key',
- max_length=256,
- blank=True,
- null=True
- )
- username = EncryptedCharField(
- verbose_name='Username',
- max_length=256,
- blank=True,
- null=True
- )
- password = EncryptedCharField(
- verbose_name='Password',
- max_length=256,
- blank=True,
- null=True
+ null=True,
)
+ auth_key = EncryptedCharField(verbose_name="Auth Key", max_length=256, blank=True, null=True)
+ username = EncryptedCharField(verbose_name="Username", max_length=256, blank=True, null=True)
+ password = EncryptedCharField(verbose_name="Password", max_length=256, blank=True, null=True)
configurations = EncryptedJSONField(blank=True, default=None)
objects = RouterConfigurationManager()
@@ -181,17 +172,17 @@ class Meta:
Addition of class names.
"""
- verbose_name = 'Router Configuration'
- verbose_name_plural = 'Router Configurations'
+ verbose_name = "Router Configuration"
+ verbose_name_plural = "Router Configurations"
def __str__(self):
"""
Return string representation for class instance.
"""
- return '{id} - {backend} - {enabled}'.format(
+ return "{id} - {backend} - {enabled}".format(
id=self.pk,
backend=self.backend_name,
- enabled='Enabled' if self.enabled else 'Disabled'
+ enabled="Enabled" if self.enabled else "Disabled",
)
@classmethod
@@ -255,7 +246,7 @@ def get_allowed_host(self, original_event):
dict
"""
if not self.configurations:
- return {'host_configurations': {}}
+ return {"host_configurations": {}}
is_allowed = self._match_event_for_host(original_event, self.configurations)
@@ -275,7 +266,7 @@ def _match_event_for_host(self, original_event, host_config):
Returns:
bool
"""
- for key, value in host_config.get('match_params', {}).items():
+ for key, value in host_config.get("match_params", {}).items():
original_event_value = get_value_from_dotted_path(original_event, key)
if isinstance(value, list):
matched = False
@@ -302,7 +293,5 @@ def _is_match(self, regex_exp, value_str):
try:
return bool(re.compile(str(regex_exp))) and re.search(regex_exp, value_str)
except TypeError as err:
- logger.info(
- 'Invalid regex %s with error: %s', regex_exp, err
- )
+ logger.info("Invalid regex %s with error: %s", regex_exp, err)
return False
diff --git a/event_routing_backends/processors/caliper/__init__.py b/event_routing_backends/processors/caliper/__init__.py
index 4f34d7ba..bea04203 100644
--- a/event_routing_backends/processors/caliper/__init__.py
+++ b/event_routing_backends/processors/caliper/__init__.py
@@ -2,7 +2,6 @@
Caliper processors and spec implementation.
"""
-
from edx_toggles.toggles import SettingToggle
# .. toggle_name: CALIPER_EVENTS_ENABLED
diff --git a/event_routing_backends/processors/caliper/constants.py b/event_routing_backends/processors/caliper/constants.py
index 9e41541b..3608537b 100644
--- a/event_routing_backends/processors/caliper/constants.py
+++ b/event_routing_backends/processors/caliper/constants.py
@@ -2,4 +2,4 @@
Constants related to IMS Caliper and events transformation into Caliper.
"""
-CALIPER_EVENT_CONTEXT = 'http://purl.imsglobal.org/ctx/caliper/v1p2'
+CALIPER_EVENT_CONTEXT = "http://purl.imsglobal.org/ctx/caliper/v1p2"
diff --git a/event_routing_backends/processors/caliper/envelope_processor.py b/event_routing_backends/processors/caliper/envelope_processor.py
index 4d1f2819..d93d063a 100644
--- a/event_routing_backends/processors/caliper/envelope_processor.py
+++ b/event_routing_backends/processors/caliper/envelope_processor.py
@@ -1,6 +1,7 @@
"""
Envelope the caliper transformed event.
"""
+
from datetime import datetime
from pytz import UTC
@@ -13,6 +14,7 @@ class CaliperEnvelopeProcessor:
"""
Envelope the caliper transformed event.
"""
+
def __init__(self, sensor_id):
"""
Initialize the processor.
@@ -31,10 +33,12 @@ def __call__(self, events):
"""
enveloped_events = []
for event in events:
- enveloped_events.append({
- 'sensor': self.sensor_id,
- 'sendTime': convert_datetime_to_iso(datetime.now(UTC)),
- 'data': [event],
- 'dataVersion': CALIPER_EVENT_CONTEXT
- })
+ enveloped_events.append(
+ {
+ "sensor": self.sensor_id,
+ "sendTime": convert_datetime_to_iso(datetime.now(UTC)),
+ "data": [event],
+ "dataVersion": CALIPER_EVENT_CONTEXT,
+ }
+ )
return enveloped_events
diff --git a/event_routing_backends/processors/caliper/event_transformers/__init__.py b/event_routing_backends/processors/caliper/event_transformers/__init__.py
index 94212c7f..de2d1a9e 100644
--- a/event_routing_backends/processors/caliper/event_transformers/__init__.py
+++ b/event_routing_backends/processors/caliper/event_transformers/__init__.py
@@ -1,6 +1,7 @@
"""
Contains all available caliper transformers
"""
+
from event_routing_backends.processors.caliper.event_transformers.enrollment_events import EnrollmentEventTransformers
from event_routing_backends.processors.caliper.event_transformers.navigation_events import NavigationEventsTransformers
from event_routing_backends.processors.caliper.event_transformers.problem_interaction_events import (
diff --git a/event_routing_backends/processors/caliper/event_transformers/enrollment_events.py b/event_routing_backends/processors/caliper/event_transformers/enrollment_events.py
index 74c27a55..f5fa30cd 100644
--- a/event_routing_backends/processors/caliper/event_transformers/enrollment_events.py
+++ b/event_routing_backends/processors/caliper/event_transformers/enrollment_events.py
@@ -7,9 +7,9 @@
from event_routing_backends.processors.caliper.transformer import CaliperTransformer
-@CaliperTransformersRegistry.register('edx.course.enrollment.activated')
-@CaliperTransformersRegistry.register('edx.course.enrollment.deactivated')
-@CaliperTransformersRegistry.register('edx.course.grade.passed.first_time')
+@CaliperTransformersRegistry.register("edx.course.enrollment.activated")
+@CaliperTransformersRegistry.register("edx.course.enrollment.deactivated")
+@CaliperTransformersRegistry.register("edx.course.grade.passed.first_time")
class EnrollmentEventTransformers(CaliperTransformer):
"""
This transformer transforms three events:
@@ -23,7 +23,7 @@ class EnrollmentEventTransformers(CaliperTransformer):
Generated when a user complete a course.
"""
- type = 'Event'
+ type = "Event"
def get_action(self):
"""
@@ -32,11 +32,11 @@ def get_action(self):
Returns:
str
"""
- if self.get_data('name', True) == 'edx.course.enrollment.activated':
- return 'Activated'
- if self.get_data('name', True) == 'edx.course.grade.passed.first_time':
- return 'Completed'
- return 'Deactivated'
+ if self.get_data("name", True) == "edx.course.enrollment.activated":
+ return "Activated"
+ if self.get_data("name", True) == "edx.course.grade.passed.first_time":
+ return "Completed"
+ return "Deactivated"
def get_object(self):
"""
@@ -45,16 +45,20 @@ def get_object(self):
Returns:
dict
"""
- self.backend_name = 'caliper'
- course = get_course_from_id(self.get_data('context.course_id'))
+ self.backend_name = "caliper"
+ course = get_course_from_id(self.get_data("context.course_id"))
# TODO: replace with anonymous enrollment id?
- course_root_url = self.get_object_iri('course', self.get_data('data.course_id', True))
+ course_root_url = self.get_object_iri("course", self.get_data("data.course_id", True))
caliper_object = super().get_object()
- caliper_object.update({
- 'id': course_root_url,
- 'type': 'CourseOffering',
- 'name': course['display_name'],
- 'extensions': {'mode': self.get_data('data.mode')} if self.get_data('data.mode') is not None else None,
- })
+ caliper_object.update(
+ {
+ "id": course_root_url,
+ "type": "CourseOffering",
+ "name": course["display_name"],
+ "extensions": (
+ {"mode": self.get_data("data.mode")} if self.get_data("data.mode") is not None else None
+ ),
+ }
+ )
return caliper_object
diff --git a/event_routing_backends/processors/caliper/event_transformers/navigation_events.py b/event_routing_backends/processors/caliper/event_transformers/navigation_events.py
index 50171dc8..1c8bef49 100644
--- a/event_routing_backends/processors/caliper/event_transformers/navigation_events.py
+++ b/event_routing_backends/processors/caliper/event_transformers/navigation_events.py
@@ -1,25 +1,26 @@
"""
Transformers for navigation related events.
"""
+
from event_routing_backends.processors.caliper.registry import CaliperTransformersRegistry
from event_routing_backends.processors.caliper.transformer import CaliperTransformer
OBJECT_TYPE_MAP = {
- 'edx.ui.lms.sequence.next_selected': 'DigitalResourceCollection',
- 'edx.ui.lms.sequence.previous_selected': 'DigitalResourceCollection',
- 'edx.ui.lms.sequence.tab_selected': 'DigitalResourceCollection',
- 'edx.ui.lms.link_clicked': 'Webpage',
- 'edx.ui.lms.sequence.outline.selected': 'DigitalResource',
- 'edx.ui.lms.outline.selected': 'DigitalResource'
+ "edx.ui.lms.sequence.next_selected": "DigitalResourceCollection",
+ "edx.ui.lms.sequence.previous_selected": "DigitalResourceCollection",
+ "edx.ui.lms.sequence.tab_selected": "DigitalResourceCollection",
+ "edx.ui.lms.link_clicked": "Webpage",
+ "edx.ui.lms.sequence.outline.selected": "DigitalResource",
+ "edx.ui.lms.outline.selected": "DigitalResource",
}
-@CaliperTransformersRegistry.register('edx.ui.lms.sequence.next_selected')
-@CaliperTransformersRegistry.register('edx.ui.lms.sequence.previous_selected')
-@CaliperTransformersRegistry.register('edx.ui.lms.sequence.tab_selected')
-@CaliperTransformersRegistry.register('edx.ui.lms.link_clicked')
-@CaliperTransformersRegistry.register('edx.ui.lms.sequence.outline.selected')
-@CaliperTransformersRegistry.register('edx.ui.lms.outline.selected')
+@CaliperTransformersRegistry.register("edx.ui.lms.sequence.next_selected")
+@CaliperTransformersRegistry.register("edx.ui.lms.sequence.previous_selected")
+@CaliperTransformersRegistry.register("edx.ui.lms.sequence.tab_selected")
+@CaliperTransformersRegistry.register("edx.ui.lms.link_clicked")
+@CaliperTransformersRegistry.register("edx.ui.lms.sequence.outline.selected")
+@CaliperTransformersRegistry.register("edx.ui.lms.outline.selected")
class NavigationEventsTransformers(CaliperTransformer):
"""
These events are generated when the user navigates through
@@ -28,8 +29,9 @@ class NavigationEventsTransformers(CaliperTransformer):
"edx.ui.lms.sequence.outline.selected" and "edx.ui.lms.outline.selected" are
actually same events.
"""
- action = 'NavigatedTo'
- type = 'NavigationEvent'
+
+ action = "NavigatedTo"
+ type = "NavigationEvent"
def get_object(self):
"""
@@ -38,45 +40,44 @@ def get_object(self):
Returns:
dict
"""
- self.backend_name = 'caliper'
+ self.backend_name = "caliper"
caliper_object = super().get_object()
- data = self.get_data('data')
+ data = self.get_data("data")
extensions = {}
- event_name = self.get_data('name', True)
- if event_name in (
- 'edx.ui.lms.link_clicked',
- 'edx.ui.lms.outline.selected'
- ):
- object_id = self.get_data('data.target_url', True)
- object_name = self.get_data('data.target_name')
+ event_name = self.get_data("name", True)
+ if event_name in ("edx.ui.lms.link_clicked", "edx.ui.lms.outline.selected"):
+ object_id = self.get_data("data.target_url", True)
+ object_name = self.get_data("data.target_name")
else:
- object_id = self.get_object_iri('xblock', self.get_data('data.id', True))
- object_name = 'Unit'
- data.pop('id')
- extensions['tab_count'] = self.get_data('data.tab_count')
- extensions['current_tab'] = self.get_data('data.current_tab')
- if event_name == 'edx.ui.lms.sequence.next_selected':
- extensions['target'] = "next unit"
- elif event_name == 'edx.ui.lms.sequence.previous_selected':
- extensions['target'] = 'previous unit'
+ object_id = self.get_object_iri("xblock", self.get_data("data.id", True))
+ object_name = "Unit"
+ data.pop("id")
+ extensions["tab_count"] = self.get_data("data.tab_count")
+ extensions["current_tab"] = self.get_data("data.current_tab")
+ if event_name == "edx.ui.lms.sequence.next_selected":
+ extensions["target"] = "next unit"
+ elif event_name == "edx.ui.lms.sequence.previous_selected":
+ extensions["target"] = "previous unit"
else:
- extensions['target'] = self.get_data('data.target_tab')
+ extensions["target"] = self.get_data("data.target_tab")
- caliper_object.update({
- 'id': object_id,
- 'type': OBJECT_TYPE_MAP.get(event_name, 'Webpage'),
- 'name': object_name
- })
+ caliper_object.update(
+ {
+ "id": object_id,
+ "type": OBJECT_TYPE_MAP.get(event_name, "Webpage"),
+ "name": object_name,
+ }
+ )
- caliper_object.pop('extensions', None)
- course_id = self.get_data('context.course_id')
+ caliper_object.pop("extensions", None)
+ course_id = self.get_data("context.course_id")
if course_id:
- extensions['isPartOf'] = {}
- extensions['isPartOf']['id'] = self.get_object_iri('course', course_id)
- extensions['isPartOf']['type'] = 'CourseOffering'
+ extensions["isPartOf"] = {}
+ extensions["isPartOf"]["id"] = self.get_object_iri("course", course_id)
+ extensions["isPartOf"]["type"] = "CourseOffering"
if extensions:
- caliper_object.update({'extensions': extensions})
+ caliper_object.update({"extensions": extensions})
return caliper_object
diff --git a/event_routing_backends/processors/caliper/event_transformers/problem_interaction_events.py b/event_routing_backends/processors/caliper/event_transformers/problem_interaction_events.py
index 23e007f5..f0e6139d 100644
--- a/event_routing_backends/processors/caliper/event_transformers/problem_interaction_events.py
+++ b/event_routing_backends/processors/caliper/event_transformers/problem_interaction_events.py
@@ -1,56 +1,57 @@
"""
Transformers for problem interaction events.
"""
+
from event_routing_backends.helpers import get_anonymous_user_id, get_problem_block_id
from event_routing_backends.processors.caliper.registry import CaliperTransformersRegistry
from event_routing_backends.processors.caliper.transformer import CaliperTransformer
EVENT_ACTION_MAP = {
- 'problem_check': 'Submitted',
- 'problem_check_server': 'Graded',
- 'edx.grades.problem.submitted': 'Submitted',
- 'showanswer': 'Viewed',
- 'problem_show': 'Viewed',
- 'edx.problem.hint.demandhint_displayed': 'Viewed',
- 'edx.problem.completed': 'Completed'
+ "problem_check": "Submitted",
+ "problem_check_server": "Graded",
+ "edx.grades.problem.submitted": "Submitted",
+ "showanswer": "Viewed",
+ "problem_show": "Viewed",
+ "edx.problem.hint.demandhint_displayed": "Viewed",
+ "edx.problem.completed": "Completed",
}
OBJECT_TYPE_MAP = {
- 'problem_check': 'Assessment',
- 'problem_check_server': 'Attempt',
- 'edx.grades.problem.submitted': 'Assessment',
- 'showanswer': 'Annotation',
- 'problem_show': 'Frame',
- 'edx.problem.hint.demandhint_displayed': 'Annotation',
- 'edx.problem.completed': 'AssessmentItem'
+ "problem_check": "Assessment",
+ "problem_check_server": "Attempt",
+ "edx.grades.problem.submitted": "Assessment",
+ "showanswer": "Annotation",
+ "problem_show": "Frame",
+ "edx.problem.hint.demandhint_displayed": "Annotation",
+ "edx.problem.completed": "AssessmentItem",
}
OBJECT_NAME_MAP = {
- 'problem_check': None,
- 'problem_check_server': None,
- 'edx.grades.problem.submitted': None,
- 'showanswer': 'Solution',
- 'problem_show': None,
- 'edx.problem.hint.demandhint_displayed': 'Hint',
- 'edx.problem.completed': None
+ "problem_check": None,
+ "problem_check_server": None,
+ "edx.grades.problem.submitted": None,
+ "showanswer": "Solution",
+ "problem_show": None,
+ "edx.problem.hint.demandhint_displayed": "Hint",
+ "edx.problem.completed": None,
}
EVENT_TYPE_MAP = {
- 'problem_check': 'AssessmentEvent',
- 'problem_check_server': 'GradeEvent',
- 'edx.grades.problem.submitted': 'AssessmentEvent',
- 'showanswer': 'Event',
- 'problem_show': 'ViewEvent',
- 'edx.problem.hint.demandhint_displayed': 'Event',
- 'edx.problem.completed': 'AssessmentItemEvent'
+ "problem_check": "AssessmentEvent",
+ "problem_check_server": "GradeEvent",
+ "edx.grades.problem.submitted": "AssessmentEvent",
+ "showanswer": "Event",
+ "problem_show": "ViewEvent",
+ "edx.problem.hint.demandhint_displayed": "Event",
+ "edx.problem.completed": "AssessmentItemEvent",
}
-@CaliperTransformersRegistry.register('problem_check')
-@CaliperTransformersRegistry.register('edx.grades.problem.submitted')
-@CaliperTransformersRegistry.register('showanswer')
-@CaliperTransformersRegistry.register('edx.problem.hint.demandhint_displayed')
-@CaliperTransformersRegistry.register('edx.problem.completed')
+@CaliperTransformersRegistry.register("problem_check")
+@CaliperTransformersRegistry.register("edx.grades.problem.submitted")
+@CaliperTransformersRegistry.register("showanswer")
+@CaliperTransformersRegistry.register("edx.problem.hint.demandhint_displayed")
+@CaliperTransformersRegistry.register("edx.problem.completed")
class ProblemEventsTransformers(CaliperTransformer):
"""
Transform problem interaction related events into caliper format.
@@ -59,7 +60,8 @@ class ProblemEventsTransformers(CaliperTransformer):
will be added in future as per the mapping document:
https://docs.google.com/spreadsheets/u/1/d/1z_1IGFVDF-wZToKS2EGXFR3s0NXoh6tTKhEtDkevFEM/edit?usp=sharing.
"""
- additional_fields = ('generated',)
+
+ additional_fields = ("generated",)
def get_event_name_key(self):
"""
@@ -68,9 +70,9 @@ def get_event_name_key(self):
Returns:
str
"""
- key = self.get_data('name', True)
- if key == 'problem_check' and self.get_data('context.event_source') == 'server':
- key = 'problem_check_server'
+ key = self.get_data("name", True)
+ if key == "problem_check" and self.get_data("context.event_source") == "server":
+ key = "problem_check_server"
return key
@@ -78,15 +80,15 @@ def get_generated(self):
"""
Add all generated information related to `scores`.
"""
- if self.get_event_name_key() == 'problem_check_server':
+ if self.get_event_name_key() == "problem_check_server":
return {
- 'score': {
- 'id': '_:score',
- 'type': 'Score',
- 'maxScore': self.get_data('max_grade'),
- 'scoreGiven': self.get_data('grade'),
- 'attempts': self.get_data('attempts'),
- 'extensions': {'success': "True" if self.get_data('success') == 'correct' else "False"},
+ "score": {
+ "id": "_:score",
+ "type": "Score",
+ "maxScore": self.get_data("max_grade"),
+ "scoreGiven": self.get_data("grade"),
+ "attempts": self.get_data("attempts"),
+ "extensions": {"success": ("True" if self.get_data("success") == "correct" else "False")},
}
}
@@ -119,55 +121,50 @@ def get_object(self):
Returns:
dict
"""
- self.backend_name = 'caliper'
+ self.backend_name = "caliper"
object_id = None
event_data = None
- data = self.get_data('data')
+ data = self.get_data("data")
if data and isinstance(data, dict):
event_data = data
- object_id = event_data.get('problem_id', event_data.get('module_id', None))
+ object_id = event_data.get("problem_id", event_data.get("module_id", None))
if not object_id:
object_id = get_problem_block_id(
- self.get_data('context.referer', True),
- self.get_data('data'),
- self.get_data('context.course_id')
+ self.get_data("context.referer", True),
+ self.get_data("data"),
+ self.get_data("context.course_id"),
)
key = self.get_event_name_key()
- anonymous_user_id = get_anonymous_user_id(self.extract_username_or_userid(), 'CALIPER')
- if key == 'showanswer':
- iri_url = '{}/solution'.format(object_id)
- elif key == 'edx.problem.hint.demandhint_displayed':
- iri_url = '{}/hint/{}'.format(object_id, event_data.get('hint_index', ''))
- elif key == 'problem_check_server':
- iri_url = '{}/user/{}/attempt/{}'.format(
- object_id,
- anonymous_user_id,
- str(event_data.get('attempts', ''))
- )
+ anonymous_user_id = get_anonymous_user_id(self.extract_username_or_userid(), "CALIPER")
+ if key == "showanswer":
+ iri_url = "{}/solution".format(object_id)
+ elif key == "edx.problem.hint.demandhint_displayed":
+ iri_url = "{}/hint/{}".format(object_id, event_data.get("hint_index", ""))
+ elif key == "problem_check_server":
+ iri_url = "{}/user/{}/attempt/{}".format(object_id, anonymous_user_id, str(event_data.get("attempts", "")))
else:
iri_url = object_id
caliper_object = super().get_object()
- caliper_object.update({
- 'id': self.get_object_iri('xblock', iri_url),
- 'type': OBJECT_TYPE_MAP.get(key, 'Attempt'),
- 'name': OBJECT_NAME_MAP.get(key, None),
- })
-
- if key == 'problem_check_server':
- extensions = caliper_object['extensions']
- extensions['assignee'] = {}
- extensions['assignee']['id'] = self.get_object_iri(
- 'user',
- anonymous_user_id
- )
- extensions['assignee']['type'] = 'Person'
- extensions['assignable'] = {}
- extensions['assignable']['id'] = self.get_object_iri('xblock', object_id)
- extensions['assignable']['type'] = 'Assessment'
- extensions['count'] = event_data.get('attempts', '')
- caliper_object['extensions'].update(extensions)
+ caliper_object.update(
+ {
+ "id": self.get_object_iri("xblock", iri_url),
+ "type": OBJECT_TYPE_MAP.get(key, "Attempt"),
+ "name": OBJECT_NAME_MAP.get(key, None),
+ }
+ )
+
+ if key == "problem_check_server":
+ extensions = caliper_object["extensions"]
+ extensions["assignee"] = {}
+ extensions["assignee"]["id"] = self.get_object_iri("user", anonymous_user_id)
+ extensions["assignee"]["type"] = "Person"
+ extensions["assignable"] = {}
+ extensions["assignable"]["id"] = self.get_object_iri("xblock", object_id)
+ extensions["assignable"]["type"] = "Assessment"
+ extensions["count"] = event_data.get("attempts", "")
+ caliper_object["extensions"].update(extensions)
return caliper_object
diff --git a/event_routing_backends/processors/caliper/event_transformers/video_events.py b/event_routing_backends/processors/caliper/event_transformers/video_events.py
index 87bc3516..425eed40 100644
--- a/event_routing_backends/processors/caliper/event_transformers/video_events.py
+++ b/event_routing_backends/processors/caliper/event_transformers/video_events.py
@@ -21,40 +21,33 @@
- edx.video.position.changed
- edx.video.completed (proposed)
"""
+
from event_routing_backends.helpers import convert_seconds_to_iso, make_video_block_id
from event_routing_backends.processors.caliper.registry import CaliperTransformersRegistry
from event_routing_backends.processors.caliper.transformer import CaliperTransformer
EVENTS_ACTION_MAP = {
- 'load_video': 'Started',
- 'edx.video.loaded': 'Started',
-
- 'play_video': 'Resumed',
- 'edx.video.played': 'Resumed',
-
- 'stop_video': 'Ended',
- 'edx.video.stopped': 'Ended',
-
- 'complete_video': 'Ended',
- 'edx.video.completed': 'Ended',
-
- 'pause_video': 'Paused',
- 'edx.video.paused': 'Paused',
-
- 'seek_video': 'JumpedTo',
- 'edx.video.position.changed': 'JumpedTo',
-
- 'hide_transcript': 'DisabledClosedCaptioning',
- 'edx.video.transcript.hidden': 'DisabledClosedCaptioning',
- 'edx.video.closed_captions.hidden': 'DisabledClosedCaptioning',
- 'video_hide_cc_menu': 'DisabledClosedCaptioning',
-
- 'show_transcript': 'EnabledClosedCaptioning',
- 'edx.video.transcript.shown': 'EnabledClosedCaptioning',
- 'edx.video.closed_captions.shown': 'EnabledClosedCaptioning',
- 'video_show_cc_menu': 'EnabledClosedCaptioning',
-
- 'speed_change_video': 'ChangedSpeed',
+ "load_video": "Started",
+ "edx.video.loaded": "Started",
+ "play_video": "Resumed",
+ "edx.video.played": "Resumed",
+ "stop_video": "Ended",
+ "edx.video.stopped": "Ended",
+ "complete_video": "Ended",
+ "edx.video.completed": "Ended",
+ "pause_video": "Paused",
+ "edx.video.paused": "Paused",
+ "seek_video": "JumpedTo",
+ "edx.video.position.changed": "JumpedTo",
+ "hide_transcript": "DisabledClosedCaptioning",
+ "edx.video.transcript.hidden": "DisabledClosedCaptioning",
+ "edx.video.closed_captions.hidden": "DisabledClosedCaptioning",
+ "video_hide_cc_menu": "DisabledClosedCaptioning",
+ "show_transcript": "EnabledClosedCaptioning",
+ "edx.video.transcript.shown": "EnabledClosedCaptioning",
+ "edx.video.closed_captions.shown": "EnabledClosedCaptioning",
+ "video_show_cc_menu": "EnabledClosedCaptioning",
+ "speed_change_video": "ChangedSpeed",
}
@@ -62,8 +55,9 @@ class BaseVideoTransformer(CaliperTransformer):
"""
Base transformer for video interaction events.
"""
- type = 'MediaEvent'
- additional_fields = ('target',)
+
+ type = "MediaEvent"
+ additional_fields = ("target",)
def get_action(self):
"""
@@ -72,7 +66,7 @@ def get_action(self):
Returns:
str
"""
- return EVENTS_ACTION_MAP[self.get_data('name', True)]
+ return EVENTS_ACTION_MAP[self.get_data("name", True)]
def get_object(self):
"""
@@ -81,20 +75,20 @@ def get_object(self):
Returns:
dict
"""
- self.backend_name = 'caliper'
+ self.backend_name = "caliper"
caliper_object = super().get_object()
- data = self.get_data('data')
- course_id = self.get_data('context.course_id', True)
- video_id = self.get_data('data.id', True)
+ data = self.get_data("data")
+ course_id = self.get_data("context.course_id", True)
+ video_id = self.get_data("data.id", True)
object_id = make_video_block_id(course_id=course_id, video_id=video_id)
- caliper_object.update({
- 'id': self.get_object_iri('xblock', object_id),
- 'type': 'VideoObject',
- 'duration': convert_seconds_to_iso(
- seconds=data.get('duration', 0)
- )
- })
+ caliper_object.update(
+ {
+ "id": self.get_object_iri("xblock", object_id),
+ "type": "VideoObject",
+ "duration": convert_seconds_to_iso(seconds=data.get("duration", 0)),
+ }
+ )
return caliper_object
@@ -107,42 +101,42 @@ def get_target(self):
"""
current_time = convert_seconds_to_iso(
- seconds=self.get_data('data.currentTime') or self.get_data('data.current_time')
+ seconds=self.get_data("data.currentTime") or self.get_data("data.current_time")
)
return {
- 'id': '_:MediaLocation',
- 'type': 'MediaLocation',
- 'currentTime': current_time
+ "id": "_:MediaLocation",
+ "type": "MediaLocation",
+ "currentTime": current_time,
}
-@CaliperTransformersRegistry.register('load_video')
-@CaliperTransformersRegistry.register('edx.video.loaded')
-@CaliperTransformersRegistry.register('stop_video')
-@CaliperTransformersRegistry.register('edx.video.stopped')
-@CaliperTransformersRegistry.register('complete_video')
-@CaliperTransformersRegistry.register('edx.video.completed')
-@CaliperTransformersRegistry.register('play_video')
-@CaliperTransformersRegistry.register('edx.video.played')
-@CaliperTransformersRegistry.register('pause_video')
-@CaliperTransformersRegistry.register('edx.video.paused')
-@CaliperTransformersRegistry.register('hide_transcript')
-@CaliperTransformersRegistry.register('edx.video.transcript.hidden')
-@CaliperTransformersRegistry.register('edx.video.closed_captions.hidden')
-@CaliperTransformersRegistry.register('video_hide_cc_menu')
-@CaliperTransformersRegistry.register('show_transcript')
-@CaliperTransformersRegistry.register('edx.video.transcript.shown')
-@CaliperTransformersRegistry.register('edx.video.closed_captions.shown')
-@CaliperTransformersRegistry.register('video_show_cc_menu')
+@CaliperTransformersRegistry.register("load_video")
+@CaliperTransformersRegistry.register("edx.video.loaded")
+@CaliperTransformersRegistry.register("stop_video")
+@CaliperTransformersRegistry.register("edx.video.stopped")
+@CaliperTransformersRegistry.register("complete_video")
+@CaliperTransformersRegistry.register("edx.video.completed")
+@CaliperTransformersRegistry.register("play_video")
+@CaliperTransformersRegistry.register("edx.video.played")
+@CaliperTransformersRegistry.register("pause_video")
+@CaliperTransformersRegistry.register("edx.video.paused")
+@CaliperTransformersRegistry.register("hide_transcript")
+@CaliperTransformersRegistry.register("edx.video.transcript.hidden")
+@CaliperTransformersRegistry.register("edx.video.closed_captions.hidden")
+@CaliperTransformersRegistry.register("video_hide_cc_menu")
+@CaliperTransformersRegistry.register("show_transcript")
+@CaliperTransformersRegistry.register("edx.video.transcript.shown")
+@CaliperTransformersRegistry.register("edx.video.closed_captions.shown")
+@CaliperTransformersRegistry.register("video_show_cc_menu")
class VideoTransformer(BaseVideoTransformer):
"""
Transform the events fired when a video is loaded.
"""
-@CaliperTransformersRegistry.register('seek_video')
-@CaliperTransformersRegistry.register('edx.video.position.changed')
+@CaliperTransformersRegistry.register("seek_video")
+@CaliperTransformersRegistry.register("edx.video.position.changed")
class SeekVideoTransformer(BaseVideoTransformer):
"""
Transform the events fired when a video is seeked.
@@ -157,31 +151,34 @@ def get_target(self):
"""
target = super().get_target()
current_time = convert_seconds_to_iso(
- seconds=self.get_data('data.currentTime') or self.get_data('data.old_time')
- )
- new_time = convert_seconds_to_iso(
- seconds=self.get_data('data.new_time')
+ seconds=self.get_data("data.currentTime") or self.get_data("data.old_time")
)
- target.update({
- 'currentTime': current_time,
- 'extensions': {
- 'newTime': new_time,
+ new_time = convert_seconds_to_iso(seconds=self.get_data("data.new_time"))
+ target.update(
+ {
+ "currentTime": current_time,
+ "extensions": {
+ "newTime": new_time,
+ },
}
- })
+ )
return target
-@CaliperTransformersRegistry.register('speed_change_video')
+@CaliperTransformersRegistry.register("speed_change_video")
class VideoSpeedChangedTransformer(BaseVideoTransformer):
"""
Transform the event fired when a video's speed is changed.
"""
- additional_fields = ('target',)
+
+ additional_fields = ("target",)
def get_extensions(self):
extensions = super().get_extensions()
- extensions.update({
- 'oldSpeed': self.get_data('old_speed'),
- 'newSpeed': self.get_data('new_speed'),
- })
+ extensions.update(
+ {
+ "oldSpeed": self.get_data("old_speed"),
+ "newSpeed": self.get_data("new_speed"),
+ }
+ )
return extensions
diff --git a/event_routing_backends/processors/caliper/registry.py b/event_routing_backends/processors/caliper/registry.py
index 7943e65f..a14ec375 100644
--- a/event_routing_backends/processors/caliper/registry.py
+++ b/event_routing_backends/processors/caliper/registry.py
@@ -1,6 +1,7 @@
"""
Registry to keep track of Caliper event transformers
"""
+
from event_routing_backends.processors.transformer_utils.registry import TransformerRegistry
@@ -8,4 +9,5 @@ class CaliperTransformersRegistry(TransformerRegistry):
"""
Registry to keep track of Caliper event transformers
"""
+
mapping = {}
diff --git a/event_routing_backends/processors/caliper/tests/test_caliper.py b/event_routing_backends/processors/caliper/tests/test_caliper.py
index f2c48a16..333732bb 100644
--- a/event_routing_backends/processors/caliper/tests/test_caliper.py
+++ b/event_routing_backends/processors/caliper/tests/test_caliper.py
@@ -1,6 +1,7 @@
"""
Test the caliper processor.
"""
+
import json
from django.test import SimpleTestCase
@@ -16,12 +17,10 @@ class TestCaliperProcessor(SimpleTestCase):
def setUp(self):
super().setUp()
- self.sample_event = {
- 'name': str(sentinel.name)
- }
+ self.sample_event = {"name": str(sentinel.name)}
self.routers = {
- '0': MagicMock(),
- '1': MagicMock(),
+ "0": MagicMock(),
+ "1": MagicMock(),
}
self.processor = CaliperProcessor()
@@ -30,42 +29,35 @@ def setUp(self):
def test_skip_event_when_disabled(self):
self.assertFalse(self.processor(self.sample_event))
- @patch('event_routing_backends.processors.mixins.base_transformer_processor.logger')
+ @patch("event_routing_backends.processors.mixins.base_transformer_processor.logger")
def test_send_method_with_no_transformer_implemented(self, mocked_logger):
self.assertFalse(self.processor([self.sample_event]))
mocked_logger.error.assert_called_once_with(
- 'Could not get transformer for %s event.',
- self.sample_event.get('name')
+ "Could not get transformer for %s event.", self.sample_event.get("name")
)
@patch(
- 'event_routing_backends.processors.caliper.transformer_processor.CaliperTransformersRegistry.get_transformer',
- side_effect=ValueError('Error Message')
+ "event_routing_backends.processors.caliper.transformer_processor.CaliperTransformersRegistry.get_transformer",
+ side_effect=ValueError("Error Message"),
)
- @patch('event_routing_backends.processors.mixins.base_transformer_processor.logger')
+ @patch("event_routing_backends.processors.mixins.base_transformer_processor.logger")
def test_send_method_with_unknown_exception(self, mocked_logger, _):
with self.assertRaises(ValueError):
self.processor([self.sample_event])
mocked_logger.exception.assert_called_once_with(
'There was an error while trying to transform event "sentinel.name" using CaliperProcessor'
- ' processor. Error: Error Message')
+ " processor. Error: Error Message"
+ )
@patch(
- 'event_routing_backends.processors.caliper.transformer_processor.CaliperTransformersRegistry.get_transformer'
+ "event_routing_backends.processors.caliper.transformer_processor.CaliperTransformersRegistry.get_transformer"
)
- @patch('event_routing_backends.processors.caliper.transformer_processor.logger')
- @patch('event_routing_backends.processors.caliper.transformer_processor.caliper_logger')
- def test_send_method_with_successfull_flow(
- self,
- mocked_caliper_logger,
- mocked_logger,
- mocked_get_transformer
- ):
- transformed_event = {
- 'transformed_key': 'transformed_value'
- }
+ @patch("event_routing_backends.processors.caliper.transformer_processor.logger")
+ @patch("event_routing_backends.processors.caliper.transformer_processor.caliper_logger")
+ def test_send_method_with_successfull_flow(self, mocked_caliper_logger, mocked_logger, mocked_get_transformer):
+ transformed_event = {"transformed_key": "transformed_value"}
mocked_transformer = MagicMock()
mocked_transformer.transform.return_value = transformed_event
mocked_get_transformer.return_value = mocked_transformer
@@ -75,33 +67,24 @@ def test_send_method_with_successfull_flow(
self.assertIn(
call(
'Caliper version of edx event "{}" is: {}'.format(
- self.sample_event.get('name'),
- json.dumps(transformed_event)
+ self.sample_event.get("name"), json.dumps(transformed_event)
)
),
- mocked_logger.debug.mock_calls
+ mocked_logger.debug.mock_calls,
)
- self.assertIn(
- call(json.dumps(transformed_event)),
- mocked_caliper_logger.info.mock_calls
- )
+ self.assertIn(call(json.dumps(transformed_event)), mocked_caliper_logger.info.mock_calls)
@override_settings(CALIPER_EVENT_LOGGING_ENABLED=False)
@patch(
- 'event_routing_backends.processors.caliper.transformer_processor.CaliperTransformersRegistry.get_transformer'
+ "event_routing_backends.processors.caliper.transformer_processor.CaliperTransformersRegistry.get_transformer"
)
- @patch('event_routing_backends.processors.caliper.transformer_processor.logger')
- @patch('event_routing_backends.processors.caliper.transformer_processor.caliper_logger')
+ @patch("event_routing_backends.processors.caliper.transformer_processor.logger")
+ @patch("event_routing_backends.processors.caliper.transformer_processor.caliper_logger")
def test_send_method_with_successfull_flow_logging_disabled(
- self,
- mocked_caliper_logger,
- mocked_logger,
- mocked_get_transformer
+ self, mocked_caliper_logger, mocked_logger, mocked_get_transformer
):
- transformed_event = {
- 'transformed_key': 'transformed_value'
- }
+ transformed_event = {"transformed_key": "transformed_value"}
mocked_transformer = MagicMock()
mocked_transformer.transform.return_value = transformed_event
mocked_get_transformer.return_value = mocked_transformer
@@ -111,19 +94,15 @@ def test_send_method_with_successfull_flow_logging_disabled(
self.assertIn(
call(
'Caliper version of edx event "{}" is: {}'.format(
- self.sample_event.get('name'),
- json.dumps(transformed_event)
+ self.sample_event.get("name"), json.dumps(transformed_event)
)
),
- mocked_logger.debug.mock_calls
+ mocked_logger.debug.mock_calls,
)
- self.assertNotIn(
- call(json.dumps(transformed_event)),
- mocked_caliper_logger.info.mock_calls
- )
+ self.assertNotIn(call(json.dumps(transformed_event)), mocked_caliper_logger.info.mock_calls)
- @patch('event_routing_backends.processors.mixins.base_transformer_processor.logger')
+ @patch("event_routing_backends.processors.mixins.base_transformer_processor.logger")
def test_with_no_registry(self, mocked_logger):
backend = CaliperProcessor()
backend.registry = None
diff --git a/event_routing_backends/processors/caliper/tests/test_envelope_processor.py b/event_routing_backends/processors/caliper/tests/test_envelope_processor.py
index 9876b80f..6e116083 100644
--- a/event_routing_backends/processors/caliper/tests/test_envelope_processor.py
+++ b/event_routing_backends/processors/caliper/tests/test_envelope_processor.py
@@ -1,6 +1,7 @@
"""
Test the CaliperEnvelopeProcessor.
"""
+
from datetime import datetime
from unittest import TestCase
from unittest.mock import patch, sentinel
@@ -21,19 +22,22 @@ class TestCaliperEnvelopeProcessor(TestCase):
def setUp(self):
super().setUp()
- self.sample_event = {
- 'name': str(sentinel.name)
- }
- self.sensor_id = 'http://test.sensor.com'
+ self.sample_event = {"name": str(sentinel.name)}
+ self.sensor_id = "http://test.sensor.com"
- @patch('event_routing_backends.processors.caliper.envelope_processor.datetime')
+ @patch("event_routing_backends.processors.caliper.envelope_processor.datetime")
def test_caliper_envelope_processor(self, mocked_datetime):
mocked_datetime.now.return_value = FROZEN_TIME
result = CaliperEnvelopeProcessor(sensor_id=self.sensor_id)([self.sample_event])
- self.assertEqual(result, [{
- 'sensor': self.sensor_id,
- 'sendTime': convert_datetime_to_iso(str(FROZEN_TIME)),
- 'data': [self.sample_event],
- 'dataVersion': CALIPER_EVENT_CONTEXT
- }])
+ self.assertEqual(
+ result,
+ [
+ {
+ "sensor": self.sensor_id,
+ "sendTime": convert_datetime_to_iso(str(FROZEN_TIME)),
+ "data": [self.sample_event],
+ "dataVersion": CALIPER_EVENT_CONTEXT,
+ }
+ ],
+ )
diff --git a/event_routing_backends/processors/caliper/tests/test_transformers.py b/event_routing_backends/processors/caliper/tests/test_transformers.py
index 3d3e3fe3..e016a31c 100644
--- a/event_routing_backends/processors/caliper/tests/test_transformers.py
+++ b/event_routing_backends/processors/caliper/tests/test_transformers.py
@@ -1,6 +1,7 @@
"""
Test the transformers for all of the currently supported events into Caliper format.
"""
+
import os
from django.test import TestCase
@@ -18,6 +19,7 @@ class CaliperTransformersFixturesTestMixin(TransformersFixturesTestMixin):
This mixin is split into its own class so it can be used by packages outside of ERB.
"""
+
registry = CaliperTransformersRegistry
@property
@@ -25,10 +27,10 @@ def expected_events_fixture_path(self):
"""
Return the path to the expected transformed events fixture files.
"""
- return '{}/fixtures/expected'.format(os.path.dirname(os.path.abspath(__file__)))
+ return "{}/fixtures/expected".format(os.path.dirname(os.path.abspath(__file__)))
def assert_correct_transformer_version(self, transformed_event, transformer_version):
- self.assertEqual(transformed_event['extensions']['transformerVersion'], transformer_version)
+ self.assertEqual(transformed_event["extensions"]["transformerVersion"], transformer_version)
def compare_events(self, transformed_event, expected_event):
"""
@@ -42,9 +44,9 @@ def compare_events(self, transformed_event, expected_event):
AssertionError: Raised if the two events are not same.
"""
# id is a randomly generated UUID therefore not comparing that
- self.assertIn('id', transformed_event)
- expected_event.pop('id')
- transformed_event.pop('id')
+ self.assertIn("id", transformed_event)
+ expected_event.pop("id")
+ transformed_event.pop("id")
self.assertDictEqual(expected_event, transformed_event)
diff --git a/event_routing_backends/processors/caliper/transformer.py b/event_routing_backends/processors/caliper/transformer.py
index 0f7e9496..4d5090d9 100644
--- a/event_routing_backends/processors/caliper/transformer.py
+++ b/event_routing_backends/processors/caliper/transformer.py
@@ -1,6 +1,7 @@
"""
Base transformer to transform common event fields.
"""
+
import uuid
from django.contrib.auth import get_user_model
@@ -16,11 +17,12 @@ class CaliperTransformer(BaseTransformerMixin):
"""
Base transformer class to transform common fields.
"""
+
required_fields = (
- 'type',
- 'object',
- 'action',
- 'extensions',
+ "type",
+ "object",
+ "action",
+ "extensions",
)
def base_transform(self, transformed_event):
@@ -37,22 +39,24 @@ def _add_generic_fields(self, transformed_event):
"""
Add all of the generic fields to the transformed_event object.
"""
- transformed_event.update({
- '@context': CALIPER_EVENT_CONTEXT,
- 'id': uuid.uuid4().urn,
- 'eventTime': convert_datetime_to_iso(self.get_data('timestamp', True)),
- })
+ transformed_event.update(
+ {
+ "@context": CALIPER_EVENT_CONTEXT,
+ "id": uuid.uuid4().urn,
+ "eventTime": convert_datetime_to_iso(self.get_data("timestamp", True)),
+ }
+ )
def _add_actor_info(self, transformed_event):
"""
Add all generic information related to `actor` to the transformed_event.
"""
- transformed_event['actor'] = {
- 'id': self.get_object_iri(
- 'user',
- get_anonymous_user_id(self.extract_username_or_userid(), 'CALIPER'),
+ transformed_event["actor"] = {
+ "id": self.get_object_iri(
+ "user",
+ get_anonymous_user_id(self.extract_username_or_userid(), "CALIPER"),
),
- 'type': 'Person'
+ "type": "Person",
}
def _add_session_info(self, transformed_event):
@@ -61,12 +65,12 @@ def _add_session_info(self, transformed_event):
"""
sessionid = self.extract_sessionid()
if sessionid:
- transformed_event['session'] = {
- 'id': self.get_object_iri(
- 'sessions',
+ transformed_event["session"] = {
+ "id": self.get_object_iri(
+ "sessions",
sessionid,
),
- 'type': 'Session'
+ "type": "Session",
}
def get_object(self):
@@ -77,13 +81,13 @@ def get_object(self):
dict
"""
caliper_object = super().get_object()
- course_id = self.get_data('context.course_id')
+ course_id = self.get_data("context.course_id")
if course_id is not None:
extensions = {"isPartOf": {}}
- extensions['isPartOf']['id'] = self.get_object_iri('course', course_id)
- extensions['isPartOf']['type'] = 'CourseOffering'
- caliper_object['extensions'] = {}
- caliper_object['extensions'].update(extensions)
+ extensions["isPartOf"]["id"] = self.get_object_iri("course", course_id)
+ extensions["isPartOf"]["type"] = "CourseOffering"
+ caliper_object["extensions"] = {}
+ caliper_object["extensions"].update(extensions)
return caliper_object
@@ -95,5 +99,5 @@ def get_extensions(self):
dict
"""
return {
- 'transformerVersion': self.transformer_version,
+ "transformerVersion": self.transformer_version,
}
diff --git a/event_routing_backends/processors/caliper/transformer_processor.py b/event_routing_backends/processors/caliper/transformer_processor.py
index 45b8b7ec..11f1feee 100644
--- a/event_routing_backends/processors/caliper/transformer_processor.py
+++ b/event_routing_backends/processors/caliper/transformer_processor.py
@@ -1,6 +1,7 @@
"""
Caliper processor for transforming and routing events.
"""
+
import json
from logging import getLogger
@@ -11,7 +12,7 @@
from event_routing_backends.processors.mixins.base_transformer_processor import BaseTransformerProcessorMixin
logger = getLogger(__name__)
-caliper_logger = getLogger('caliper_tracking')
+caliper_logger = getLogger("caliper_tracking")
class CaliperProcessor(BaseTransformerProcessorMixin):
@@ -50,9 +51,6 @@ def transform_event(self, event):
if CALIPER_EVENT_LOGGING_ENABLED.is_enabled():
caliper_logger.info(json_event)
- logger.debug('Caliper version of edx event "{}" is: {}'.format(
- event["name"],
- json_event
- ))
+ logger.debug('Caliper version of edx event "{}" is: {}'.format(event["name"], json_event))
return transformed_event
diff --git a/event_routing_backends/processors/mixins/base_transformer.py b/event_routing_backends/processors/mixins/base_transformer.py
index d754b34f..323351b8 100644
--- a/event_routing_backends/processors/mixins/base_transformer.py
+++ b/event_routing_backends/processors/mixins/base_transformer.py
@@ -1,6 +1,7 @@
"""
Base Transformer Mixin to add or transform common data values.
"""
+
import logging
from django.conf import settings
@@ -43,6 +44,7 @@ def find_nested(source_dict, key):
Returns:
ANY
"""
+
def _find_nested(event_dict):
"""
Inner recursive method to find the key in dict.
@@ -84,7 +86,7 @@ def transformer_version(self):
version of transformer package used to transform events
"""
- if getattr(settings, 'RUNNING_WITH_TEST_SETTINGS', False):
+ if getattr(settings, "RUNNING_WITH_TEST_SETTINGS", False):
return "{}@{}".format("event-routing-backends", "1.1.1")
else:
return "{}@{}".format("event-routing-backends", __version__)
@@ -103,13 +105,13 @@ def transform(self):
if hasattr(self, key):
value = getattr(self, key)
transformed_event[key] = value
- elif hasattr(self, f'get_{key}'):
- value = getattr(self, f'get_{key}')()
+ elif hasattr(self, f"get_{key}"):
+ value = getattr(self, f"get_{key}")()
transformed_event[key] = value
else:
raise ValueError(
'Cannot find value for "{}" in transformer {} for the edx event "{}"'.format(
- key, self.__class__.__name__, self.get_data('name', True)
+ key, self.__class__.__name__, self.get_data("name", True)
)
)
@@ -125,11 +127,11 @@ def extract_username_or_userid(self):
Returns:
str
"""
- username_or_id = self.get_data('username') or self.get_data('user_id')
+ username_or_id = self.get_data("username") or self.get_data("user_id")
if not username_or_id:
- username_or_id = self.get_data('data.username') or self.get_data('data.user_id')
+ username_or_id = self.get_data("data.username") or self.get_data("data.user_id")
if not username_or_id:
- username_or_id = self.get_data('context.username') or self.get_data('context.user_id')
+ username_or_id = self.get_data("context.username") or self.get_data("context.user_id")
return username_or_id
def extract_sessionid(self):
@@ -139,7 +141,7 @@ def extract_sessionid(self):
Returns:
str
"""
- return self.get_data('session') or self.get_data('context.session') or self.get_data('data.session')
+ return self.get_data("session") or self.get_data("context.session") or self.get_data("data.session")
def get_data(self, key, required=False):
"""
@@ -166,7 +168,7 @@ def get_data(self, key, required=False):
}
}
"""
- if '.' in key:
+ if "." in key:
result = get_value_from_dotted_path(self.event, key)
else:
result = BaseTransformerMixin.find_nested(self.event, key)
@@ -176,9 +178,7 @@ def get_data(self, key, required=False):
if result is None:
if required:
- raise ValueError(
- 'Could not get value for {} in event "{}"'.format(key, self.event.get('name', None))
- )
+ raise ValueError('Could not get value for {} in event "{}"'.format(key, self.event.get("name", None)))
return result
@@ -210,10 +210,8 @@ def get_object_iri(self, object_type, object_id):
if object_id is None or object_type is None:
return None
- return '{root_url}/{object_type}/{object_id}'.format(
- root_url=settings.LMS_ROOT_URL,
- object_type=object_type,
- object_id=object_id
+ return "{root_url}/{object_type}/{object_id}".format(
+ root_url=settings.LMS_ROOT_URL, object_type=object_type, object_id=object_id
)
def get_object(self):
diff --git a/event_routing_backends/processors/mixins/base_transformer_processor.py b/event_routing_backends/processors/mixins/base_transformer_processor.py
index 17241793..b0756b04 100644
--- a/event_routing_backends/processors/mixins/base_transformer_processor.py
+++ b/event_routing_backends/processors/mixins/base_transformer_processor.py
@@ -1,6 +1,7 @@
"""
Base Processor Mixin for transformer processors.
"""
+
from logging import getLogger
from eventtracking.processors.exceptions import NoBackendEnabled, NoTransformerImplemented
@@ -61,21 +62,19 @@ def transform_event(self, event):
Returns:
ANY: transformed event
"""
- event_name = event.get('name')
+ event_name = event.get("name")
try:
transformed_event = self.get_transformed_event(event)
except NoTransformerImplemented:
- logger.error('Could not get transformer for %s event.', event_name)
+ logger.error("Could not get transformer for %s event.", event_name)
return None
except Exception as ex:
logger.exception(
'There was an error while trying to transform event "{event}" using'
- ' {processor} processor. Error: {error}'.format(
- event=event_name,
- processor=self.__class__.__name__,
- error=ex
+ " {processor} processor. Error: {error}".format(
+ event=event_name, processor=self.__class__.__name__, error=ex
)
)
raise
@@ -99,10 +98,11 @@ def get_transformed_event(self, event):
NoTransformerImplemented
"""
if not self.registry:
- logger.exception('Cannot transform event "{event}". Transformer class '
- '"{transformer}" must have its own "registry" set.'.format(
- event=event['name'],
- transformer=self.__class__.__name__
- ))
+ logger.exception(
+ 'Cannot transform event "{event}". Transformer class '
+ '"{transformer}" must have its own "registry" set.'.format(
+ event=event["name"], transformer=self.__class__.__name__
+ )
+ )
return None
return self.registry.get_transformer(event).transform()
diff --git a/event_routing_backends/processors/openedx_filters/decorators.py b/event_routing_backends/processors/openedx_filters/decorators.py
index adb1b4fe..a2cd7f76 100644
--- a/event_routing_backends/processors/openedx_filters/decorators.py
+++ b/event_routing_backends/processors/openedx_filters/decorators.py
@@ -1,6 +1,7 @@
"""
Decorators that helps to implement the Processor filter functionality.
"""
+
import functools
from event_routing_backends.processors.openedx_filters.filters import ProcessorBaseFilter
@@ -35,6 +36,7 @@ def get_object(self):
3. More details about filters https://github.com/openedx/openedx-filters/
"""
+
def wrapper(func):
@functools.wraps(func)
def inner_wrapper(*args, **kwargs):
diff --git a/event_routing_backends/processors/openedx_filters/exceptions.py b/event_routing_backends/processors/openedx_filters/exceptions.py
index 6a6d1a6b..611fa50f 100644
--- a/event_routing_backends/processors/openedx_filters/exceptions.py
+++ b/event_routing_backends/processors/openedx_filters/exceptions.py
@@ -1,6 +1,7 @@
"""
Custom processors exceptions thrown by filters.
"""
+
from openedx_filters.exceptions import OpenEdxFilterException
diff --git a/event_routing_backends/processors/openedx_filters/filters.py b/event_routing_backends/processors/openedx_filters/filters.py
index 7f02aa18..36a615bd 100644
--- a/event_routing_backends/processors/openedx_filters/filters.py
+++ b/event_routing_backends/processors/openedx_filters/filters.py
@@ -2,6 +2,7 @@
Processors filters, this file aims to contain all the filters that could modify the
standard transformer results by implementing external pipeline steps.
"""
+
from openedx_filters.tooling import OpenEdxPublicFilter
from event_routing_backends.processors.openedx_filters.exceptions import InvalidFilterType
diff --git a/event_routing_backends/processors/tests/openedx_filters/test_filters.py b/event_routing_backends/processors/tests/openedx_filters/test_filters.py
index 6f186ad8..e9717a5e 100644
--- a/event_routing_backends/processors/tests/openedx_filters/test_filters.py
+++ b/event_routing_backends/processors/tests/openedx_filters/test_filters.py
@@ -1,4 +1,5 @@
"""Test cases for the filters file."""
+
from django.test import TestCase
from mock import Mock, patch
from openedx_filters.tooling import OpenEdxPublicFilter
@@ -29,9 +30,7 @@ def test_expected_value(self, run_pipeline_mock):
- run_filter returns the value of the result key
"""
transformer = Mock()
- run_pipeline_mock.return_value = {
- "result": "expected_value"
- }
+ run_pipeline_mock.return_value = {"result": "expected_value"}
input_value = "dummy_value"
openedx_filter = ProcessorBaseFilter.generate_dynamic_filter(filter_type="test_filter")
diff --git a/event_routing_backends/processors/tests/transformers_test_mixin.py b/event_routing_backends/processors/tests/transformers_test_mixin.py
index 07f79887..b1f65b88 100644
--- a/event_routing_backends/processors/tests/transformers_test_mixin.py
+++ b/event_routing_backends/processors/tests/transformers_test_mixin.py
@@ -1,6 +1,7 @@
"""
Mixin for testing transformers for all of the currently supported events
"""
+
import json
import logging
import os
@@ -24,9 +25,9 @@
try:
EVENT_FIXTURE_FILENAMES = [
- event_file_name for event_file_name in os.listdir(
- f'{TEST_DIR_PATH}/fixtures/current/'
- ) if event_file_name.endswith(".json")
+ event_file_name
+ for event_file_name in os.listdir(f"{TEST_DIR_PATH}/fixtures/current/")
+ if event_file_name.endswith(".json")
]
except FileNotFoundError as exc: # pragma: no cover
@@ -36,13 +37,14 @@
class DummyTransformer(BaseTransformerMixin):
- required_fields = ('does_not_exist',)
+ required_fields = ("does_not_exist",)
class TransformersFixturesTestMixin:
"""
Mixin to help test event transforms using "raw" and "expected" fixture data.
"""
+
# no limit to diff in the output of tests
maxDiff = None
@@ -50,7 +52,7 @@ class TransformersFixturesTestMixin:
def setUp(self):
super().setUp()
- UserFactory.create(username='edx', email='edx@example.com')
+ UserFactory.create(username="edx", email="edx@example.com")
@property
def raw_events_fixture_path(self):
@@ -72,10 +74,10 @@ def get_raw_event(self, event_filename):
"""
base_event_filename = os.path.basename(event_filename)
- input_event_file_path = '{test_dir}/{event_filename}'.format(
+ input_event_file_path = "{test_dir}/{event_filename}".format(
test_dir=self.raw_events_fixture_path, event_filename=base_event_filename
)
- with open(input_event_file_path, encoding='utf-8') as current:
+ with open(input_event_file_path, encoding="utf-8") as current:
data = json.loads(current.read())
return data
@@ -96,7 +98,7 @@ def check_event_transformer(self, raw_event_file, expected_event_file):
Writes errors to test_out/ for analysis.
"""
original_event = self.get_raw_event(raw_event_file)
- with open(expected_event_file, encoding='utf-8') as expected:
+ with open(expected_event_file, encoding="utf-8") as expected:
expected_event = json.loads(expected.read())
event_filename = os.path.basename(raw_event_file)
@@ -132,47 +134,45 @@ class TransformersTestMixin:
"""
Tests that supported events are transformed correctly.
"""
+
def test_with_no_field_transformer(self):
- self.registry.register('test_event')(DummyTransformer)
+ self.registry.register("test_event")(DummyTransformer)
with self.assertRaises(ValueError):
- self.registry.get_transformer({
- 'name': 'test_event'
- }).transform()
+ self.registry.get_transformer({"name": "test_event"}).transform()
def test_required_field_transformer(self):
- self.registry.register('test_event')(DummyTransformer)
+ self.registry.register("test_event")(DummyTransformer)
with self.assertRaises(ValueError):
- self.registry.get_transformer({
- "name": "edx.course.enrollment.activated"
- }).transform()
+ self.registry.get_transformer({"name": "edx.course.enrollment.activated"}).transform()
@override_settings(RUNNING_WITH_TEST_SETTINGS=True)
def test_transformer_version_with_test_settings(self):
- self.registry.register('test_event')(DummyTransformer)
- raw_event = self.get_raw_event('edx.course.enrollment.activated.json')
+ self.registry.register("test_event")(DummyTransformer)
+ raw_event = self.get_raw_event("edx.course.enrollment.activated.json")
transformed_event = self.registry.get_transformer(raw_event).transform()
- self.assert_correct_transformer_version(transformed_event, 'event-routing-backends@1.1.1')
+ self.assert_correct_transformer_version(transformed_event, "event-routing-backends@1.1.1")
@override_settings(RUNNING_WITH_TEST_SETTINGS=False)
def test_transformer_version(self):
- self.registry.register('test_event')(DummyTransformer)
- raw_event = self.get_raw_event('edx.course.enrollment.activated.json')
+ self.registry.register("test_event")(DummyTransformer)
+ raw_event = self.get_raw_event("edx.course.enrollment.activated.json")
transformed_event = self.registry.get_transformer(raw_event).transform()
- self.assert_correct_transformer_version(transformed_event, 'event-routing-backends@{}'.format(__version__))
+ self.assert_correct_transformer_version(transformed_event, "event-routing-backends@{}".format(__version__))
- @patch('event_routing_backends.helpers.uuid.uuid4')
+ @patch("event_routing_backends.helpers.uuid.uuid4")
@ddt.data(*EVENT_FIXTURE_FILENAMES)
def test_event_transformer(self, raw_event_file_path, mocked_uuid4):
# Used to generate the anonymized actor.name,
# which in turn is used to generate the event UUID.
- mocked_uuid4.return_value = UUID('32e08e30-f8ae-4ce2-94a8-c2bfe38a70cb')
+ mocked_uuid4.return_value = UUID("32e08e30-f8ae-4ce2-94a8-c2bfe38a70cb")
# if an event's expected fixture doesn't exist, the test shouldn't fail.
# evaluate transformation of only supported event fixtures.
base_event_filename = os.path.basename(raw_event_file_path)
- expected_event_file_path = '{expected_events_fixture_path}/{event_filename}'.format(
- expected_events_fixture_path=self.expected_events_fixture_path, event_filename=base_event_filename
+ expected_event_file_path = "{expected_events_fixture_path}/{event_filename}".format(
+ expected_events_fixture_path=self.expected_events_fixture_path,
+ event_filename=base_event_filename,
)
if not os.path.isfile(expected_event_file_path):
diff --git a/event_routing_backends/processors/transformer_utils/registry.py b/event_routing_backends/processors/transformer_utils/registry.py
index 63621bc6..3aed6a33 100644
--- a/event_routing_backends/processors/transformer_utils/registry.py
+++ b/event_routing_backends/processors/transformer_utils/registry.py
@@ -1,6 +1,7 @@
"""
Registry to keep track of event transformers
"""
+
from logging import getLogger
from eventtracking.processors.exceptions import NoTransformerImplemented
@@ -15,6 +16,7 @@ class TransformerRegistry:
Every Registry that inherits this registry MUST has its own `mapping`
class attribute to avoid conflicts.
"""
+
mapping = {}
@classmethod
@@ -25,12 +27,8 @@ def validate_mapping_exists(cls):
Raises:
AttributeError
"""
- if 'mapping' not in cls.__dict__:
- raise AttributeError(
- '{} registry must has its own "mapping" class attribute.'.format(
- cls.__name__
- )
- )
+ if "mapping" not in cls.__dict__:
+ raise AttributeError('{} registry must has its own "mapping" class attribute.'.format(cls.__name__))
@classmethod
def register(cls, event_key):
@@ -51,20 +49,19 @@ def __inner__(transformer):
"""
if event_key in cls.mapping:
logger.info(
- 'Overriding the existing transformer {old_transformer} for event '
- '{event_name} with {new_transformer}'.format(
+ "Overriding the existing transformer {old_transformer} for event "
+ "{event_name} with {new_transformer}".format(
old_transformer=cls.mapping[event_key],
new_transformer=transformer,
- event_name=event_key
+ event_name=event_key,
)
)
cls.mapping[event_key] = transformer
else:
logger.debug(
- 'Registered transformer {transformer} for event {event_name} '.format(
- transformer=transformer,
- event_name=event_key
+ "Registered transformer {transformer} for event {event_name} ".format(
+ transformer=transformer, event_name=event_key
)
)
cls.mapping[event_key] = transformer
@@ -86,7 +83,7 @@ def get_transformer(cls, event):
Raises:
`NoTransformerImplemented`: if matching transformer is not found.
"""
- event_name = event.get('name')
+ event_name = event.get("name")
try:
return cls.mapping[event_name](event)
except KeyError as error:
diff --git a/event_routing_backends/processors/transformer_utils/tests/test_registry.py b/event_routing_backends/processors/transformer_utils/tests/test_registry.py
index 6c56b9a2..b1301bc9 100644
--- a/event_routing_backends/processors/transformer_utils/tests/test_registry.py
+++ b/event_routing_backends/processors/transformer_utils/tests/test_registry.py
@@ -1,6 +1,7 @@
"""
Test the TransformerRegistry
"""
+
from unittest.mock import MagicMock
import ddt
@@ -20,18 +21,20 @@ class WithoutRegistry(TransformerRegistry):
pass
with self.assertRaises(AttributeError):
- WithoutRegistry.register('test.key')(MagicMock())
+ WithoutRegistry.register("test.key")(MagicMock())
def test_override_register(self):
mocked_transformer = MagicMock()
mocked_transformer2 = MagicMock()
- TransformerRegistry.register('test.key')(mocked_transformer)
- self.assertEqual(TransformerRegistry.get_transformer({
- 'name': 'test.key'
- }), mocked_transformer())
-
- TransformerRegistry.register('test.key')(mocked_transformer2)
- self.assertEqual(TransformerRegistry.get_transformer({
- 'name': 'test.key'
- }), mocked_transformer2())
+ TransformerRegistry.register("test.key")(mocked_transformer)
+ self.assertEqual(
+ TransformerRegistry.get_transformer({"name": "test.key"}),
+ mocked_transformer(),
+ )
+
+ TransformerRegistry.register("test.key")(mocked_transformer2)
+ self.assertEqual(
+ TransformerRegistry.get_transformer({"name": "test.key"}),
+ mocked_transformer2(),
+ )
diff --git a/event_routing_backends/processors/xapi/__init__.py b/event_routing_backends/processors/xapi/__init__.py
index c912628f..9ac55209 100644
--- a/event_routing_backends/processors/xapi/__init__.py
+++ b/event_routing_backends/processors/xapi/__init__.py
@@ -2,7 +2,6 @@
xAPI processors and spec implementation.
"""
-
from edx_toggles.toggles import SettingToggle
# .. toggle_name: XAPI_EVENTS_ENABLED
diff --git a/event_routing_backends/processors/xapi/constants.py b/event_routing_backends/processors/xapi/constants.py
index 0d2a7cec..7ac4653f 100644
--- a/event_routing_backends/processors/xapi/constants.py
+++ b/event_routing_backends/processors/xapi/constants.py
@@ -3,125 +3,125 @@
"""
# xAPI verbs
-XAPI_VERB_ATTEMPTED = 'http://adlnet.gov/expapi/verbs/attempted'
-XAPI_VERB_EVALUATED = 'https://w3id.org/xapi/acrossx/verbs/evaluated'
-XAPI_VERB_ANSWERED = 'http://adlnet.gov/expapi/verbs/answered'
-XAPI_VERB_LAUNCHED = 'http://adlnet.gov/expapi/verbs/launched'
-XAPI_VERB_REGISTERED = 'http://adlnet.gov/expapi/verbs/registered'
-XAPI_VERB_UNREGISTERED = 'http://id.tincanapi.com/verb/unregistered'
-XAPI_VERB_COMPLETED = 'http://adlnet.gov/expapi/verbs/completed'
-XAPI_VERB_PASSED = 'http://adlnet.gov/expapi/verbs/passed'
-XAPI_VERB_FAILED = 'http://adlnet.gov/expapi/verbs/failed'
-XAPI_VERB_EXPERIENCED = 'http://adlnet.gov/expapi/verbs/experienced'
-XAPI_VERB_NAVIGATED = 'https://w3id.org/xapi/dod-isd/verbs/navigated'
-XAPI_VERB_POSTED = 'https://w3id.org/xapi/acrossx/verbs/posted'
-XAPI_VERB_EDITED = 'https://w3id.org/xapi/acrossx/verbs/edited'
-XAPI_VERB_VIEWED = 'http://id.tincanapi.com/verb/viewed'
-XAPI_VERB_DELETED = 'https://w3id.org/xapi/dod-isd/verbs/deleted'
-XAPI_VERB_VOTED = 'https://w3id.org/xapi/openedx/verb/voted'
-XAPI_VERB_REPORTED = 'https://w3id.org/xapi/acrossx/verbs/reported'
-XAPI_VERB_UNREPORTED = 'https://w3id.org/xapi/openedx/verb/unreported'
-XAPI_VERB_EARNED = 'http://id.tincanapi.com/verb/earned'
+XAPI_VERB_ATTEMPTED = "http://adlnet.gov/expapi/verbs/attempted"
+XAPI_VERB_EVALUATED = "https://w3id.org/xapi/acrossx/verbs/evaluated"
+XAPI_VERB_ANSWERED = "http://adlnet.gov/expapi/verbs/answered"
+XAPI_VERB_LAUNCHED = "http://adlnet.gov/expapi/verbs/launched"
+XAPI_VERB_REGISTERED = "http://adlnet.gov/expapi/verbs/registered"
+XAPI_VERB_UNREGISTERED = "http://id.tincanapi.com/verb/unregistered"
+XAPI_VERB_COMPLETED = "http://adlnet.gov/expapi/verbs/completed"
+XAPI_VERB_PASSED = "http://adlnet.gov/expapi/verbs/passed"
+XAPI_VERB_FAILED = "http://adlnet.gov/expapi/verbs/failed"
+XAPI_VERB_EXPERIENCED = "http://adlnet.gov/expapi/verbs/experienced"
+XAPI_VERB_NAVIGATED = "https://w3id.org/xapi/dod-isd/verbs/navigated"
+XAPI_VERB_POSTED = "https://w3id.org/xapi/acrossx/verbs/posted"
+XAPI_VERB_EDITED = "https://w3id.org/xapi/acrossx/verbs/edited"
+XAPI_VERB_VIEWED = "http://id.tincanapi.com/verb/viewed"
+XAPI_VERB_DELETED = "https://w3id.org/xapi/dod-isd/verbs/deleted"
+XAPI_VERB_VOTED = "https://w3id.org/xapi/openedx/verb/voted"
+XAPI_VERB_REPORTED = "https://w3id.org/xapi/acrossx/verbs/reported"
+XAPI_VERB_UNREPORTED = "https://w3id.org/xapi/openedx/verb/unreported"
+XAPI_VERB_EARNED = "http://id.tincanapi.com/verb/earned"
-XAPI_VERB_PROGRESSED = 'http://adlnet.gov/expapi/verbs/progressed'
-XAPI_VERB_TERMINATED = 'http://adlnet.gov/expapi/verbs/terminated'
-XAPI_VERB_ASKED = 'http://adlnet.gov/expapi/verbs/asked'
+XAPI_VERB_PROGRESSED = "http://adlnet.gov/expapi/verbs/progressed"
+XAPI_VERB_TERMINATED = "http://adlnet.gov/expapi/verbs/terminated"
+XAPI_VERB_ASKED = "http://adlnet.gov/expapi/verbs/asked"
-XAPI_VERB_INITIALIZED = 'http://adlnet.gov/expapi/verbs/initialized'
-XAPI_VERB_PLAYED = 'https://w3id.org/xapi/video/verbs/played'
-XAPI_VERB_PAUSED = 'https://w3id.org/xapi/video/verbs/paused'
-XAPI_VERB_SEEKED = 'https://w3id.org/xapi/video/verbs/seeked'
-XAPI_VERB_INTERACTED = 'http://adlnet.gov/expapi/verbs/interacted'
+XAPI_VERB_INITIALIZED = "http://adlnet.gov/expapi/verbs/initialized"
+XAPI_VERB_PLAYED = "https://w3id.org/xapi/video/verbs/played"
+XAPI_VERB_PAUSED = "https://w3id.org/xapi/video/verbs/paused"
+XAPI_VERB_SEEKED = "https://w3id.org/xapi/video/verbs/seeked"
+XAPI_VERB_INTERACTED = "http://adlnet.gov/expapi/verbs/interacted"
# xAPI activities
-XAPI_ACTIVITY_QUESTION = 'http://adlnet.gov/expapi/activities/question'
-XAPI_ACTIVITY_SOLUTION = 'http://id.tincanapi.com/activitytype/solution'
-XAPI_ACTIVITY_RESOURCE = 'http://id.tincanapi.com/activitytype/resource'
-XAPI_ACTIVITY_INTERACTION = 'http://adlnet.gov/expapi/activities/cmi.interaction'
-XAPI_ACTIVITY_SUPPLEMENTAL_INFO = 'https://w3id.org/xapi/acrossx/extensions/supplemental-info'
-XAPI_ACTIVITY_COURSE = 'http://adlnet.gov/expapi/activities/course'
-XAPI_ACTIVITY_MODULE = 'http://adlnet.gov/expapi/activities/module'
-XAPI_ACTIVITY_VIDEO = 'https://w3id.org/xapi/video/activity-type/video'
-XAPI_ACTIVITY_DISCUSSION = 'http://id.tincanapi.com/activitytype/discussion'
-XAPI_ACTIVITY_LINK = 'http://adlnet.gov/expapi/activities/link'
-XAPI_ACTIVITY_POSITION = 'http://id.tincanapi.com/extension/position'
-XAPI_ACTIVITY_TOTAL_COUNT = 'https://w3id.org/xapi/acrossx/extensions/total-items'
-XAPI_ACTIVITY_MODE = 'https://w3id.org/xapi/acrossx/extensions/type'
-XAPI_ACTIVITY_ATTEMPT = 'http://id.tincanapi.com/extension/attempt-id'
-XAPI_ACTIVITY_GRADE_CLASSIFICATION = 'http://www.tincanapi.co.uk/activitytypes/grade_classification'
-XAPI_ACTIVITY_GRADE = 'http://www.tincanapi.co.uk/extensions/result/classification'
-XAPI_ACTIVITY_TIMED_ASSESSMENT = 'https://w3id.org/xapi/openedx/activity/timed-assessment'
-XAPI_ACTIVITY_PRACTICE_ASSESSMENT = 'https://w3id.org/xapi/openedx/activity/practice-assessment'
-XAPI_ACTIVITY_PROCTORED_ASSESSMENT = 'https://w3id.org/xapi/openedx/activity/proctored-assessment'
-XAPI_ACTIVITY_PROGRESS = 'https://w3id.org/xapi/cmi5/result/extensions/progress'
+XAPI_ACTIVITY_QUESTION = "http://adlnet.gov/expapi/activities/question"
+XAPI_ACTIVITY_SOLUTION = "http://id.tincanapi.com/activitytype/solution"
+XAPI_ACTIVITY_RESOURCE = "http://id.tincanapi.com/activitytype/resource"
+XAPI_ACTIVITY_INTERACTION = "http://adlnet.gov/expapi/activities/cmi.interaction"
+XAPI_ACTIVITY_SUPPLEMENTAL_INFO = "https://w3id.org/xapi/acrossx/extensions/supplemental-info"
+XAPI_ACTIVITY_COURSE = "http://adlnet.gov/expapi/activities/course"
+XAPI_ACTIVITY_MODULE = "http://adlnet.gov/expapi/activities/module"
+XAPI_ACTIVITY_VIDEO = "https://w3id.org/xapi/video/activity-type/video"
+XAPI_ACTIVITY_DISCUSSION = "http://id.tincanapi.com/activitytype/discussion"
+XAPI_ACTIVITY_LINK = "http://adlnet.gov/expapi/activities/link"
+XAPI_ACTIVITY_POSITION = "http://id.tincanapi.com/extension/position"
+XAPI_ACTIVITY_TOTAL_COUNT = "https://w3id.org/xapi/acrossx/extensions/total-items"
+XAPI_ACTIVITY_MODE = "https://w3id.org/xapi/acrossx/extensions/type"
+XAPI_ACTIVITY_ATTEMPT = "http://id.tincanapi.com/extension/attempt-id"
+XAPI_ACTIVITY_GRADE_CLASSIFICATION = "http://www.tincanapi.co.uk/activitytypes/grade_classification"
+XAPI_ACTIVITY_GRADE = "http://www.tincanapi.co.uk/extensions/result/classification"
+XAPI_ACTIVITY_TIMED_ASSESSMENT = "https://w3id.org/xapi/openedx/activity/timed-assessment"
+XAPI_ACTIVITY_PRACTICE_ASSESSMENT = "https://w3id.org/xapi/openedx/activity/practice-assessment"
+XAPI_ACTIVITY_PROCTORED_ASSESSMENT = "https://w3id.org/xapi/openedx/activity/proctored-assessment"
+XAPI_ACTIVITY_PROGRESS = "https://w3id.org/xapi/cmi5/result/extensions/progress"
# xAPI context
-XAPI_CONTEXT_VIDEO_LENGTH = 'https://w3id.org/xapi/video/extensions/length'
-XAPI_CONTEXT_VIDEO_CC_LANGUAGE = 'https://w3id.org/xapi/video/extensions/cc-subtitle-lang'
-XAPI_CONTEXT_STARTING_POSITION = 'http://id.tincanapi.com/extension/starting-position'
-XAPI_CONTEXT_ENDING_POSITION = 'http://id.tincanapi.com/extension/ending-point'
-XAPI_CONTEXT_COMPLETION_THRESHOLD = 'https://w3id.org/xapi/video/extensions/completion-threshold'
-XAPI_CONTEXT_SESSION_ID = 'https://w3id.org/xapi/openedx/extensions/session-id'
+XAPI_CONTEXT_VIDEO_LENGTH = "https://w3id.org/xapi/video/extensions/length"
+XAPI_CONTEXT_VIDEO_CC_LANGUAGE = "https://w3id.org/xapi/video/extensions/cc-subtitle-lang"
+XAPI_CONTEXT_STARTING_POSITION = "http://id.tincanapi.com/extension/starting-position"
+XAPI_CONTEXT_ENDING_POSITION = "http://id.tincanapi.com/extension/ending-point"
+XAPI_CONTEXT_COMPLETION_THRESHOLD = "https://w3id.org/xapi/video/extensions/completion-threshold"
+XAPI_CONTEXT_SESSION_ID = "https://w3id.org/xapi/openedx/extensions/session-id"
-XAPI_ACTIVITY_TIME_LIMIT = 'https://w3id.org/xapi/acrossx/extensions/time-limit'
+XAPI_ACTIVITY_TIME_LIMIT = "https://w3id.org/xapi/acrossx/extensions/time-limit"
-XAPI_ACTIVITY_EXAM_ATTEMPT = 'http://adlnet.gov/expapi/activities/attempt'
+XAPI_ACTIVITY_EXAM_ATTEMPT = "http://adlnet.gov/expapi/activities/attempt"
-XAPI_CONTEXT_ATTEMPT_STARTED = 'https://w3id.org/xapi/openedx/extension/attempt-started'
-XAPI_CONTEXT_ATTEMPT_COMPLETED = 'https://w3id.org/xapi/openedx/extension/attempt-completed'
-XAPI_CONTEXT_DURATION = 'http://id.tincanapi.com/extension/duration'
-XAPI_CONTEXT_CODE = 'https://w3id.org/xapi/openedx/extension/code'
+XAPI_CONTEXT_ATTEMPT_STARTED = "https://w3id.org/xapi/openedx/extension/attempt-started"
+XAPI_CONTEXT_ATTEMPT_COMPLETED = "https://w3id.org/xapi/openedx/extension/attempt-completed"
+XAPI_CONTEXT_DURATION = "http://id.tincanapi.com/extension/duration"
+XAPI_CONTEXT_CODE = "https://w3id.org/xapi/openedx/extension/code"
# xAPI result
-XAPI_RESULT_VIDEO_TIME = 'https://w3id.org/xapi/video/extensions/time'
-XAPI_RESULT_VIDEO_TIME_FROM = 'https://w3id.org/xapi/video/extensions/time-from'
-XAPI_RESULT_VIDEO_TIME_TO = 'https://w3id.org/xapi/video/extensions/time-to'
-XAPI_RESULT_VIDEO_SPEED_FROM = 'https://w3id.org/xapi/openedx/extension/speed-from'
-XAPI_RESULT_VIDEO_SPEED_TO = 'https://w3id.org/xapi/openedx/extension/speed-to'
-XAPI_RESULT_VIDEO_CC_ENABLED = 'https://w3id.org/xapi/video/extensions/cc-enabled'
-XAPI_RESULT_VIDEO_PROGRESS = 'https://w3id.org/xapi/video/extensions/progress'
+XAPI_RESULT_VIDEO_TIME = "https://w3id.org/xapi/video/extensions/time"
+XAPI_RESULT_VIDEO_TIME_FROM = "https://w3id.org/xapi/video/extensions/time-from"
+XAPI_RESULT_VIDEO_TIME_TO = "https://w3id.org/xapi/video/extensions/time-to"
+XAPI_RESULT_VIDEO_SPEED_FROM = "https://w3id.org/xapi/openedx/extension/speed-from"
+XAPI_RESULT_VIDEO_SPEED_TO = "https://w3id.org/xapi/openedx/extension/speed-to"
+XAPI_RESULT_VIDEO_CC_ENABLED = "https://w3id.org/xapi/video/extensions/cc-enabled"
+XAPI_RESULT_VIDEO_PROGRESS = "https://w3id.org/xapi/video/extensions/progress"
# Every request from a Client and every response from the LRS includes an HTTP header
# with the name X-Experience-API-Version and the version as the value. This parameter contains
# xAPI specification version of the statements being pushed or pulled from LRS
# https://github.com/adlnet/xAPI-Spec/blob/master/xAPI-About.md#Appendix1A
-XAPI_SPECIFICATION_VERSION = '1.0.3'
-XAPI_TRANSFORMER_VERSION_KEY = 'https://w3id.org/xapi/openedx/extension/transformer-version'
+XAPI_SPECIFICATION_VERSION = "1.0.3"
+XAPI_TRANSFORMER_VERSION_KEY = "https://w3id.org/xapi/openedx/extension/transformer-version"
# Languages
-EN = 'en'
-EN_US = 'en-US'
+EN = "en"
+EN_US = "en-US"
# Display Names
-EXPERIENCED = 'experienced'
-INITIALIZED = 'initialized'
-REGISTERED = 'registered'
-UNREGISTERED = 'unregistered'
-ATTEMPTED = 'attempted'
-EVALUATED = 'evaluated'
-ANSWERED = 'answered'
-INTERACTED = 'interacted'
-PLAYED = 'played'
-PAUSED = 'paused'
-COMPLETED = 'completed'
-PASSED = 'passed'
-FAILED = 'failed'
-SEEKED = 'seeked'
-POSTED = 'posted'
-VIEWED = 'viewed'
-DELETED = 'deleted'
-EDITED = 'edited'
-VOTED = 'voted'
-REPORTED = 'reported'
-UNREPORTED = 'unreported'
-EARNED = 'earned'
-PROGRESSED = 'progressed'
+EXPERIENCED = "experienced"
+INITIALIZED = "initialized"
+REGISTERED = "registered"
+UNREGISTERED = "unregistered"
+ATTEMPTED = "attempted"
+EVALUATED = "evaluated"
+ANSWERED = "answered"
+INTERACTED = "interacted"
+PLAYED = "played"
+PAUSED = "paused"
+COMPLETED = "completed"
+PASSED = "passed"
+FAILED = "failed"
+SEEKED = "seeked"
+POSTED = "posted"
+VIEWED = "viewed"
+DELETED = "deleted"
+EDITED = "edited"
+VOTED = "voted"
+REPORTED = "reported"
+UNREPORTED = "unreported"
+EARNED = "earned"
+PROGRESSED = "progressed"
-TERMINATED = 'terminated'
-NAVIGATED = 'navigated'
-ASKED = 'asked'
+TERMINATED = "terminated"
+NAVIGATED = "navigated"
+ASKED = "asked"
# Open edX
-OPENEDX_OAUTH2_TOKEN_URL = '/oauth2/access_token'
-BLOCK_OBJECT_ID_FORMAT = '{platform}/xblock/{block_usage_key}'
-ENROLLMENT_API_URL_FORMAT = '/api/enrollment/v1/enrollment/{username},{course_id}'
+OPENEDX_OAUTH2_TOKEN_URL = "/oauth2/access_token"
+BLOCK_OBJECT_ID_FORMAT = "{platform}/xblock/{block_usage_key}"
+ENROLLMENT_API_URL_FORMAT = "/api/enrollment/v1/enrollment/{username},{course_id}"
diff --git a/event_routing_backends/processors/xapi/event_transformers/completion_events.py b/event_routing_backends/processors/xapi/event_transformers/completion_events.py
index 51917fdc..05904b63 100644
--- a/event_routing_backends/processors/xapi/event_transformers/completion_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/completion_events.py
@@ -1,6 +1,7 @@
"""
Transformers for forum related events.
"""
+
from tincan import Activity, ActivityDefinition, Extensions, LanguageMap, Result, Verb
from event_routing_backends.processors.openedx_filters.decorators import openedx_filter
@@ -20,7 +21,7 @@ class CompletionCreatedTransformer(XApiTransformer):
display=LanguageMap({constants.EN: constants.PROGRESSED}),
)
- additional_fields = ('result', )
+ additional_fields = ("result",)
@openedx_filter(
filter_type="event_routing_backends.processors.xapi.completion_events.completion_created.get_object",
@@ -48,7 +49,5 @@ def get_result(self):
"""
return Result(
completion=self.get_data("data.completion") == 1.0,
- extensions=Extensions(
- {constants.XAPI_ACTIVITY_PROGRESS: self.get_data("data.completion")*100}
- ),
+ extensions=Extensions({constants.XAPI_ACTIVITY_PROGRESS: self.get_data("data.completion") * 100}),
)
diff --git a/event_routing_backends/processors/xapi/event_transformers/enrollment_events.py b/event_routing_backends/processors/xapi/event_transformers/enrollment_events.py
index 9eaffd2e..98e57f0e 100644
--- a/event_routing_backends/processors/xapi/event_transformers/enrollment_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/enrollment_events.py
@@ -34,73 +34,76 @@ def get_object(self):
Returns:
`Activity`
"""
- course_id = self.get_data('context.course_id', True)
- object_id = self.get_object_iri('course', course_id)
+ course_id = self.get_data("context.course_id", True)
+ object_id = self.get_object_iri("course", course_id)
course = get_course_from_id(course_id)
- display_name = course['display_name']
+ display_name = course["display_name"]
return Activity(
id=object_id,
definition=ActivityDefinition(
type=constants.XAPI_ACTIVITY_COURSE,
name=LanguageMap(**({constants.EN: display_name} if display_name is not None else {})),
- extensions=Extensions({
- constants.XAPI_ACTIVITY_MODE: self.get_data('data.mode')
- })
+ extensions=Extensions({constants.XAPI_ACTIVITY_MODE: self.get_data("data.mode")}),
),
)
-@XApiTransformersRegistry.register('edx.course.enrollment.activated')
-@XApiTransformersRegistry.register('edx.course.enrollment.mode_changed')
+@XApiTransformersRegistry.register("edx.course.enrollment.activated")
+@XApiTransformersRegistry.register("edx.course.enrollment.mode_changed")
class EnrollmentActivatedTransformer(BaseEnrollmentTransformer):
"""
Transformers for event generated when learner enrolls or gets the enrollment mode changed in a course.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_REGISTERED,
display=LanguageMap({constants.EN: constants.REGISTERED}),
)
-@XApiTransformersRegistry.register('edx.course.enrollment.deactivated')
+@XApiTransformersRegistry.register("edx.course.enrollment.deactivated")
class EnrollmentDeactivatedTransformer(BaseEnrollmentTransformer):
"""
Transformers for event generated when learner un-enrolls from a course.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_UNREGISTERED,
display=LanguageMap({constants.EN: constants.UNREGISTERED}),
)
-@XApiTransformersRegistry.register('edx.course.grade.passed.first_time')
+@XApiTransformersRegistry.register("edx.course.grade.passed.first_time")
class CourseGradePassedFirstTimeTransformer(BaseEnrollmentTransformer):
"""
Transformers for event generated when learner pass course grade first time from a course.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_PASSED,
display=LanguageMap({constants.EN: constants.PASSED}),
)
-@XApiTransformersRegistry.register('edx.course.grade.now_passed')
+@XApiTransformersRegistry.register("edx.course.grade.now_passed")
class CourseGradeNowPassedTransformer(BaseEnrollmentTransformer):
"""
Transformers for event generated when learner pass course grade first time from a course.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_PASSED,
display=LanguageMap({constants.EN: constants.PASSED}),
)
-@XApiTransformersRegistry.register('edx.course.grade.now_failed')
+@XApiTransformersRegistry.register("edx.course.grade.now_failed")
class CourseGradeNowFailedTransformer(BaseEnrollmentTransformer):
"""
Transformers for event generated when learner pass course grade first time from a course.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_FAILED,
display=LanguageMap({constants.EN: constants.FAILED}),
diff --git a/event_routing_backends/processors/xapi/event_transformers/exam_events.py b/event_routing_backends/processors/xapi/event_transformers/exam_events.py
index 57f61ef8..c646b505 100644
--- a/event_routing_backends/processors/xapi/event_transformers/exam_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/exam_events.py
@@ -33,11 +33,7 @@ def get_object(self):
type=self.exam_type_activity,
name=LanguageMap(**({constants.EN: self.get_data("event.exam_name")})),
extensions=Extensions(
- {
- constants.XAPI_ACTIVITY_TIME_LIMIT: self.get_data(
- "event.exam_default_time_limit_mins"
- )
- }
+ {constants.XAPI_ACTIVITY_TIME_LIMIT: self.get_data("event.exam_default_time_limit_mins")}
),
),
)
@@ -53,18 +49,10 @@ def get_context_activities(self):
name=LanguageMap({constants.EN: self.get_data("event.exam_name")}),
extensions=Extensions(
{
- constants.XAPI_CONTEXT_ATTEMPT_STARTED: self.get_data(
- "event.attempt_started_at"
- ),
- constants.XAPI_CONTEXT_ATTEMPT_COMPLETED: self.get_data(
- "event.attempt_completed_at"
- ),
- constants.XAPI_CONTEXT_DURATION: self.get_data(
- "event.attempt_event_elapsed_time_secs"
- ),
- constants.XAPI_ACTIVITY_ATTEMPT: self.get_data(
- "event.attempt_id"
- ),
+ constants.XAPI_CONTEXT_ATTEMPT_STARTED: self.get_data("event.attempt_started_at"),
+ constants.XAPI_CONTEXT_ATTEMPT_COMPLETED: self.get_data("event.attempt_completed_at"),
+ constants.XAPI_CONTEXT_DURATION: self.get_data("event.attempt_event_elapsed_time_secs"),
+ constants.XAPI_ACTIVITY_ATTEMPT: self.get_data("event.attempt_id"),
}
),
),
diff --git a/event_routing_backends/processors/xapi/event_transformers/forum_events.py b/event_routing_backends/processors/xapi/event_transformers/forum_events.py
index 0bbef139..840a0a35 100644
--- a/event_routing_backends/processors/xapi/event_transformers/forum_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/forum_events.py
@@ -1,6 +1,7 @@
"""
Transformers for forum related events.
"""
+
from django.conf import settings
from tincan import Activity, ActivityDefinition, LanguageMap, Verb
@@ -24,53 +25,50 @@ def get_object(self):
`Activity`
"""
- object_id = self.get_data('data.id', True)
- object_path = self.get_data('context.path', True).rstrip('/').replace(object_id, '').rstrip('/')
+ object_id = self.get_data("data.id", True)
+ object_path = self.get_data("context.path", True).rstrip("/").replace(object_id, "").rstrip("/")
kwargs = {}
- if self.get_data('data.title'):
- kwargs['name'] = LanguageMap({constants.EN: self.get_data('data.title')})
+ if self.get_data("data.title"):
+ kwargs["name"] = LanguageMap({constants.EN: self.get_data("data.title")})
return Activity(
- id='{lms_root_url}{object_path}/{object_id}'.format(
- lms_root_url=settings.LMS_ROOT_URL,
- object_path=object_path,
- object_id=object_id
- ),
- definition=ActivityDefinition(
- type=constants.XAPI_ACTIVITY_DISCUSSION,
- **kwargs
- )
+ id="{lms_root_url}{object_path}/{object_id}".format(
+ lms_root_url=settings.LMS_ROOT_URL,
+ object_path=object_path,
+ object_id=object_id,
+ ),
+ definition=ActivityDefinition(type=constants.XAPI_ACTIVITY_DISCUSSION, **kwargs),
)
def get_context_activities(self):
context_activities = super().get_context_activities()
- discussion = self.get_data('data.discussion.id')
+ discussion = self.get_data("data.discussion.id")
if not discussion:
return context_activities
context_activities.grouping = [
Activity(
- id='{lms_root_url}/api/discussion/v1/threads/{discussion_id}'.format(
- lms_root_url=settings.LMS_ROOT_URL,
- discussion_id=discussion
+ id="{lms_root_url}/api/discussion/v1/threads/{discussion_id}".format(
+ lms_root_url=settings.LMS_ROOT_URL, discussion_id=discussion
),
definition=ActivityDefinition(
type=constants.XAPI_ACTIVITY_DISCUSSION,
- )
+ ),
)
]
return context_activities
-@XApiTransformersRegistry.register('edx.forum.thread.created')
+@XApiTransformersRegistry.register("edx.forum.thread.created")
class ThreadCreatedTransformer(BaseForumThreadTransformer):
"""
Transformers for event generated when learner creates a thread in discussion forum.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_POSTED,
display=LanguageMap({constants.EN: constants.POSTED}),
@@ -84,57 +82,59 @@ def get_context_extensions(self):
`Extensions`
"""
extensions = super().get_context_extensions()
- extensions.update({
- constants.XAPI_ACTIVITY_MODE: self.get_data('thread_type')
- })
+ extensions.update({constants.XAPI_ACTIVITY_MODE: self.get_data("thread_type")})
return extensions
-@XApiTransformersRegistry.register('edx.forum.thread.edited')
-@XApiTransformersRegistry.register('edx.forum.response.edited')
-@XApiTransformersRegistry.register('edx.forum.comment.edited')
+@XApiTransformersRegistry.register("edx.forum.thread.edited")
+@XApiTransformersRegistry.register("edx.forum.response.edited")
+@XApiTransformersRegistry.register("edx.forum.comment.edited")
class ThreadEditedTransformer(BaseForumThreadTransformer):
"""
Transformers for event generated when learner modifies a
thread/response/comment in discussion forum.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_EDITED,
display=LanguageMap({constants.EN: constants.EDITED}),
)
-@XApiTransformersRegistry.register('edx.forum.thread.viewed')
+@XApiTransformersRegistry.register("edx.forum.thread.viewed")
class ThreadViewedTransformer(BaseForumThreadTransformer):
"""
Transformers for event generated when learner viewes a thread in discussion forum.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_VIEWED,
display=LanguageMap({constants.EN: constants.VIEWED}),
)
-@XApiTransformersRegistry.register('edx.forum.thread.deleted')
-@XApiTransformersRegistry.register('edx.forum.response.deleted')
-@XApiTransformersRegistry.register('edx.forum.comment.deleted')
+@XApiTransformersRegistry.register("edx.forum.thread.deleted")
+@XApiTransformersRegistry.register("edx.forum.response.deleted")
+@XApiTransformersRegistry.register("edx.forum.comment.deleted")
class ThreadDeletedTransformer(BaseForumThreadTransformer):
"""
Transformers for event generated when learner deletes a
thread/response/comment in discussion forum.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_DELETED,
display=LanguageMap({constants.EN: constants.DELETED}),
)
-@XApiTransformersRegistry.register('edx.forum.thread.voted')
-@XApiTransformersRegistry.register('edx.forum.response.voted')
+@XApiTransformersRegistry.register("edx.forum.thread.voted")
+@XApiTransformersRegistry.register("edx.forum.response.voted")
class ThreadVotedTransformer(BaseForumThreadTransformer):
"""
Transformers for event generated when learner votes on a thread/response in discussion forum.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_VOTED,
display=LanguageMap({constants.EN: constants.VOTED}),
@@ -148,47 +148,48 @@ def get_context_extensions(self):
`Extensions`
"""
extensions = super().get_context_extensions()
- extensions.update({
- constants.XAPI_ACTIVITY_MODE: self.get_data('vote_value')
- })
+ extensions.update({constants.XAPI_ACTIVITY_MODE: self.get_data("vote_value")})
return extensions
-@XApiTransformersRegistry.register('edx.forum.response.created')
-@XApiTransformersRegistry.register('edx.forum.comment.created')
+@XApiTransformersRegistry.register("edx.forum.response.created")
+@XApiTransformersRegistry.register("edx.forum.comment.created")
class ThreadResponseCreatedTransformer(BaseForumThreadTransformer):
"""
Transformer for event generated when learner creates a response
or comment under a thread in discussion forum.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_POSTED,
display=LanguageMap({constants.EN: constants.POSTED}),
)
-@XApiTransformersRegistry.register('edx.forum.thread.reported')
-@XApiTransformersRegistry.register('edx.forum.response.reported')
-@XApiTransformersRegistry.register('edx.forum.comment.reported')
+@XApiTransformersRegistry.register("edx.forum.thread.reported")
+@XApiTransformersRegistry.register("edx.forum.response.reported")
+@XApiTransformersRegistry.register("edx.forum.comment.reported")
class ThreadResponseReportedTransformer(BaseForumThreadTransformer):
"""
Transformer for event generated when learner reports a thread,
response or comment as inappropriate.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_REPORTED,
display=LanguageMap({constants.EN: constants.REPORTED}),
)
-@XApiTransformersRegistry.register('edx.forum.thread.unreported')
-@XApiTransformersRegistry.register('edx.forum.response.unreported')
-@XApiTransformersRegistry.register('edx.forum.comment.unreported')
+@XApiTransformersRegistry.register("edx.forum.thread.unreported")
+@XApiTransformersRegistry.register("edx.forum.response.unreported")
+@XApiTransformersRegistry.register("edx.forum.comment.unreported")
class ThreadResponseUnReportedTransformer(BaseForumThreadTransformer):
"""
Transformer for event generated when learner unreports a thread,
response or comment which was earlier reported as inappropriate.
"""
+
_verb = Verb(
id=constants.XAPI_VERB_UNREPORTED,
display=LanguageMap({constants.EN: constants.UNREPORTED}),
diff --git a/event_routing_backends/processors/xapi/event_transformers/grading_events.py b/event_routing_backends/processors/xapi/event_transformers/grading_events.py
index 1dbc2afd..a528d8d0 100644
--- a/event_routing_backends/processors/xapi/event_transformers/grading_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/grading_events.py
@@ -1,6 +1,7 @@
"""
Transformers for grading related events.
"""
+
from tincan import Activity, ActivityDefinition, Extensions, LanguageMap, Result, Verb
from event_routing_backends.helpers import get_course_from_id
@@ -95,9 +96,7 @@ def get_object(self):
id=object_id,
definition=ActivityDefinition(
type=constants.XAPI_ACTIVITY_COURSE,
- name=LanguageMap(
- **({constants.EN: display_name} if display_name is not None else {})
- ),
+ name=LanguageMap(**({constants.EN: display_name} if display_name is not None else {})),
),
)
@@ -121,9 +120,5 @@ def get_result(self):
"raw": weighted_earned,
"scaled": weighted_earned,
},
- extensions=Extensions(
- {
- constants.XAPI_ACTIVITY_GRADE_CLASSIFICATION: letter_grade
- }
- ),
+ extensions=Extensions({constants.XAPI_ACTIVITY_GRADE_CLASSIFICATION: letter_grade}),
)
diff --git a/event_routing_backends/processors/xapi/event_transformers/navigation_events.py b/event_routing_backends/processors/xapi/event_transformers/navigation_events.py
index d0460041..656e6c2a 100644
--- a/event_routing_backends/processors/xapi/event_transformers/navigation_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/navigation_events.py
@@ -1,6 +1,7 @@
"""
Transformers for navigation related events.
"""
+
from tincan import Activity, ActivityDefinition, Extensions, LanguageMap
from event_routing_backends.processors.openedx_filters.decorators import openedx_filter
@@ -9,30 +10,30 @@
from event_routing_backends.processors.xapi.transformer import XApiTransformer, XApiVerbTransformerMixin
VERB_MAP = {
- 'edx.ui.lms.sequence.next_selected': {
- 'id': constants.XAPI_VERB_NAVIGATED,
- 'display': constants.NAVIGATED
+ "edx.ui.lms.sequence.next_selected": {
+ "id": constants.XAPI_VERB_NAVIGATED,
+ "display": constants.NAVIGATED,
+ },
+ "edx.ui.lms.sequence.previous_selected": {
+ "id": constants.XAPI_VERB_NAVIGATED,
+ "display": constants.NAVIGATED,
},
- 'edx.ui.lms.sequence.previous_selected': {
- 'id': constants.XAPI_VERB_NAVIGATED,
- 'display': constants.NAVIGATED
+ "edx.ui.lms.sequence.tab_selected": {
+ "id": constants.XAPI_VERB_NAVIGATED,
+ "display": constants.NAVIGATED,
},
- 'edx.ui.lms.sequence.tab_selected': {
- 'id': constants.XAPI_VERB_NAVIGATED,
- 'display': constants.NAVIGATED
+ "edx.ui.lms.link_clicked": {
+ "id": constants.XAPI_VERB_NAVIGATED,
+ "display": constants.NAVIGATED,
},
- 'edx.ui.lms.link_clicked': {
- 'id': constants.XAPI_VERB_NAVIGATED,
- 'display': constants.NAVIGATED
+ "edx.ui.lms.sequence.outline.selected": {
+ "id": constants.XAPI_VERB_NAVIGATED,
+ "display": constants.NAVIGATED,
},
- 'edx.ui.lms.sequence.outline.selected': {
- 'id': constants.XAPI_VERB_NAVIGATED,
- 'display': constants.NAVIGATED
+ "edx.ui.lms.outline.selected": {
+ "id": constants.XAPI_VERB_NAVIGATED,
+ "display": constants.NAVIGATED,
},
- 'edx.ui.lms.outline.selected': {
- 'id': constants.XAPI_VERB_NAVIGATED,
- 'display': constants.NAVIGATED
- }
}
@@ -42,10 +43,11 @@ class NavigationTransformersMixin(XApiTransformer, XApiVerbTransformerMixin):
This class has the common attributes for all navigation events.
"""
+
verb_map = VERB_MAP
-@XApiTransformersRegistry.register('edx.ui.lms.link_clicked')
+@XApiTransformersRegistry.register("edx.ui.lms.link_clicked")
class LinkClickedTransformer(NavigationTransformersMixin):
"""
xAPI transformer for event generated when user clicks a link.
@@ -60,15 +62,13 @@ def get_object(self):
`Activity`
"""
return Activity(
- id=self.get_data('data.target_url', True),
- definition=ActivityDefinition(
- type=constants.XAPI_ACTIVITY_LINK
- ),
+ id=self.get_data("data.target_url", True),
+ definition=ActivityDefinition(type=constants.XAPI_ACTIVITY_LINK),
)
-@XApiTransformersRegistry.register('edx.ui.lms.sequence.outline.selected')
-@XApiTransformersRegistry.register('edx.ui.lms.outline.selected')
+@XApiTransformersRegistry.register("edx.ui.lms.sequence.outline.selected")
+@XApiTransformersRegistry.register("edx.ui.lms.outline.selected")
class OutlineSelectedTransformer(NavigationTransformersMixin):
"""
xAPI transformer for Navigation events.
@@ -83,17 +83,17 @@ def get_object(self):
`Activity`
"""
return Activity(
- id=self.get_data('data.target_url'),
+ id=self.get_data("data.target_url"),
definition=ActivityDefinition(
type=constants.XAPI_ACTIVITY_MODULE,
- name=LanguageMap({constants.EN: self.get_data('data.target_name')})
+ name=LanguageMap({constants.EN: self.get_data("data.target_name")}),
),
)
-@XApiTransformersRegistry.register('edx.ui.lms.sequence.next_selected')
-@XApiTransformersRegistry.register('edx.ui.lms.sequence.previous_selected')
-@XApiTransformersRegistry.register('edx.ui.lms.sequence.tab_selected')
+@XApiTransformersRegistry.register("edx.ui.lms.sequence.next_selected")
+@XApiTransformersRegistry.register("edx.ui.lms.sequence.previous_selected")
+@XApiTransformersRegistry.register("edx.ui.lms.sequence.tab_selected")
class TabNavigationTransformer(NavigationTransformersMixin):
"""
xAPI transformer for Navigation events.
@@ -108,12 +108,10 @@ def get_object(self):
`Activity`
"""
return Activity(
- id=self.get_object_iri('xblock', self.get_data('data.id')),
+ id=self.get_object_iri("xblock", self.get_data("data.id")),
definition=ActivityDefinition(
type=constants.XAPI_ACTIVITY_RESOURCE,
- extensions=Extensions({
- constants.XAPI_ACTIVITY_TOTAL_COUNT: self.get_data('data.tab_count')
- })
+ extensions=Extensions({constants.XAPI_ACTIVITY_TOTAL_COUNT: self.get_data("data.tab_count")}),
),
)
@@ -125,21 +123,27 @@ def get_context_extensions(self):
`Extensions`
"""
extensions = super().get_context_extensions()
- event_name = self.get_data('name', True)
- if event_name == 'edx.ui.lms.sequence.tab_selected':
- extensions.update({
- constants.XAPI_CONTEXT_STARTING_POSITION: self.get_data('data.current_tab'),
- constants.XAPI_CONTEXT_ENDING_POSITION: self.get_data('data.target_tab'),
- })
- elif event_name == 'edx.ui.lms.sequence.next_selected':
- extensions.update({
- constants.XAPI_CONTEXT_STARTING_POSITION: self.get_data('data.current_tab'),
- constants.XAPI_CONTEXT_ENDING_POSITION: 'next unit',
- })
+ event_name = self.get_data("name", True)
+ if event_name == "edx.ui.lms.sequence.tab_selected":
+ extensions.update(
+ {
+ constants.XAPI_CONTEXT_STARTING_POSITION: self.get_data("data.current_tab"),
+ constants.XAPI_CONTEXT_ENDING_POSITION: self.get_data("data.target_tab"),
+ }
+ )
+ elif event_name == "edx.ui.lms.sequence.next_selected":
+ extensions.update(
+ {
+ constants.XAPI_CONTEXT_STARTING_POSITION: self.get_data("data.current_tab"),
+ constants.XAPI_CONTEXT_ENDING_POSITION: "next unit",
+ }
+ )
else:
- extensions.update({
- constants.XAPI_CONTEXT_STARTING_POSITION: self.get_data('data.current_tab'),
- constants.XAPI_CONTEXT_ENDING_POSITION: 'previous unit',
- })
+ extensions.update(
+ {
+ constants.XAPI_CONTEXT_STARTING_POSITION: self.get_data("data.current_tab"),
+ constants.XAPI_CONTEXT_ENDING_POSITION: "previous unit",
+ }
+ )
return extensions
diff --git a/event_routing_backends/processors/xapi/event_transformers/problem_interaction_events.py b/event_routing_backends/processors/xapi/event_transformers/problem_interaction_events.py
index b203a931..653d3560 100644
--- a/event_routing_backends/processors/xapi/event_transformers/problem_interaction_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/problem_interaction_events.py
@@ -1,6 +1,7 @@
"""
Transformers for problem interaction events.
"""
+
import json
from tincan import Activity, ActivityDefinition, Extensions, LanguageMap, Result
@@ -19,56 +20,53 @@
# map open edx problems interation types to xAPI valid interaction types
INTERACTION_TYPES_MAP = {
- 'choiceresponse': 'choice',
- 'multiplechoiceresponse': 'choice',
- 'numericalresponse': 'numeric',
- 'stringresponse': 'fill-in',
- 'customresponse': 'other',
- 'coderesponse': 'other',
- 'externalresponse': 'other',
- 'formularesponse': 'fill-in',
- 'schematicresponse': 'other',
- 'imageresponse': 'matching',
- 'annotationresponse': 'fill-in',
- 'choicetextresponse': 'choice',
- 'optionresponse': 'choice',
- 'symbolicresponse': 'fill-in',
- 'truefalseresponse': 'true-false',
- 'non_existent': 'other',
+ "choiceresponse": "choice",
+ "multiplechoiceresponse": "choice",
+ "numericalresponse": "numeric",
+ "stringresponse": "fill-in",
+ "customresponse": "other",
+ "coderesponse": "other",
+ "externalresponse": "other",
+ "formularesponse": "fill-in",
+ "schematicresponse": "other",
+ "imageresponse": "matching",
+ "annotationresponse": "fill-in",
+ "choicetextresponse": "choice",
+ "optionresponse": "choice",
+ "symbolicresponse": "fill-in",
+ "truefalseresponse": "true-false",
+ "non_existent": "other",
}
EVENT_OBJECT_DEFINITION_TYPE = {
- 'edx.grades.problem.submitted': constants.XAPI_ACTIVITY_QUESTION,
- 'showanswer': constants.XAPI_ACTIVITY_SOLUTION,
- 'edx.problem.hint.demandhint_displayed': constants.XAPI_ACTIVITY_SUPPLEMENTAL_INFO,
+ "edx.grades.problem.submitted": constants.XAPI_ACTIVITY_QUESTION,
+ "showanswer": constants.XAPI_ACTIVITY_SOLUTION,
+ "edx.problem.hint.demandhint_displayed": constants.XAPI_ACTIVITY_SUPPLEMENTAL_INFO,
}
-DEFAULT_INTERACTION_TYPE = 'other'
+DEFAULT_INTERACTION_TYPE = "other"
VERB_MAP = {
- 'edx.grades.problem.submitted': {
- 'id': constants.XAPI_VERB_ATTEMPTED,
- 'display': constants.ATTEMPTED
- },
- 'problem_check_browser': {
- 'id': constants.XAPI_VERB_ATTEMPTED,
- 'display': constants.ATTEMPTED
+ "edx.grades.problem.submitted": {
+ "id": constants.XAPI_VERB_ATTEMPTED,
+ "display": constants.ATTEMPTED,
},
- 'problem_check': {
- 'id': constants.XAPI_VERB_EVALUATED,
- 'display': constants.EVALUATED
+ "problem_check_browser": {
+ "id": constants.XAPI_VERB_ATTEMPTED,
+ "display": constants.ATTEMPTED,
},
- 'showanswer': {
- 'id': constants.XAPI_VERB_ASKED,
- 'display': constants.ASKED
+ "problem_check": {
+ "id": constants.XAPI_VERB_EVALUATED,
+ "display": constants.EVALUATED,
},
- 'edx.problem.hint.demandhint_displayed': {
- 'id': constants.XAPI_VERB_ASKED,
- 'display': constants.ASKED
+ "showanswer": {"id": constants.XAPI_VERB_ASKED, "display": constants.ASKED},
+ "edx.problem.hint.demandhint_displayed": {
+ "id": constants.XAPI_VERB_ASKED,
+ "display": constants.ASKED,
},
- 'edx.problem.completed': {
- 'id': constants.XAPI_VERB_COMPLETED,
- 'display': constants.COMPLETED
+ "edx.problem.completed": {
+ "id": constants.XAPI_VERB_COMPLETED,
+ "display": constants.COMPLETED,
},
}
@@ -77,6 +75,7 @@ class BaseProblemsTransformer(XApiTransformer, XApiVerbTransformerMixin):
"""
Base Transformer for problem interaction events.
"""
+
verb_map = VERB_MAP
@openedx_filter(
@@ -103,11 +102,11 @@ def get_object_id(self):
str
"""
object_id = None
- data = self.get_data('data')
+ data = self.get_data("data")
if data and isinstance(data, dict):
- object_id = self.get_data('data.problem_id') or self.get_data('data.module_id', True)
+ object_id = self.get_data("data.problem_id") or self.get_data("data.module_id", True)
else:
- object_id = self.get_data('usage_key')
+ object_id = self.get_data("usage_key")
return object_id
@@ -118,17 +117,20 @@ def get_object_definition(self):
Returns:
ActivityDefinition
"""
- event_name = self.get_data('name', True)
+ event_name = self.get_data("name", True)
return ActivityDefinition(
- type=EVENT_OBJECT_DEFINITION_TYPE[event_name] if event_name in EVENT_OBJECT_DEFINITION_TYPE else
- constants.XAPI_ACTIVITY_INTERACTION,
+ type=(
+ EVENT_OBJECT_DEFINITION_TYPE[event_name]
+ if event_name in EVENT_OBJECT_DEFINITION_TYPE
+ else constants.XAPI_ACTIVITY_INTERACTION
+ ),
)
-@XApiTransformersRegistry.register('showanswer')
-@XApiTransformersRegistry.register('edx.problem.completed')
-@XApiTransformersRegistry.register('edx.problem.hint.demandhint_displayed')
+@XApiTransformersRegistry.register("showanswer")
+@XApiTransformersRegistry.register("edx.problem.completed")
+@XApiTransformersRegistry.register("edx.problem.hint.demandhint_displayed")
class ProblemEventsTransformer(BaseProblemsTransformer):
"""
Transform problem interaction events into xAPI format.
@@ -142,29 +144,29 @@ def get_object(self):
`Activity`
"""
xapi_object = super().get_object()
- event_name = self.get_data('name', True)
- if event_name == 'showanswer':
- problem_id = self.get_data('problem_id', True)
- xapi_object.id = '{iri}/answer'.format(
- iri=self.get_object_iri('xblock', problem_id),
+ event_name = self.get_data("name", True)
+ if event_name == "showanswer":
+ problem_id = self.get_data("problem_id", True)
+ xapi_object.id = "{iri}/answer".format(
+ iri=self.get_object_iri("xblock", problem_id),
)
- if event_name == 'edx.problem.hint.demandhint_displayed':
- module_id = self.get_data('module_id', True)
- hint_index = self.get_data('hint_index', True)
- xapi_object.id = '{iri}/hint/{hint_index}'.format(
- iri=self.get_object_iri('xblock', module_id),
- hint_index=hint_index
+ if event_name == "edx.problem.hint.demandhint_displayed":
+ module_id = self.get_data("module_id", True)
+ hint_index = self.get_data("hint_index", True)
+ xapi_object.id = "{iri}/hint/{hint_index}".format(
+ iri=self.get_object_iri("xblock", module_id), hint_index=hint_index
)
return xapi_object
-@XApiTransformersRegistry.register('edx.grades.problem.submitted')
+@XApiTransformersRegistry.register("edx.grades.problem.submitted")
class ProblemSubmittedTransformer(BaseProblemsTransformer):
"""
Transform problem interaction related events into xAPI format.
"""
- additional_fields = ('result', )
+
+ additional_fields = ("result",)
def get_object(self):
"""
@@ -174,7 +176,7 @@ def get_object(self):
`Activity`
"""
xapi_object = super().get_object()
- xapi_object.id = self.get_object_iri('xblock', xapi_object.id)
+ xapi_object.id = self.get_object_iri("xblock", xapi_object.id)
return xapi_object
def get_result(self):
@@ -184,22 +186,22 @@ def get_result(self):
Returns:
`Result`
"""
- event_data = self.get_data('data')
- weighted_possible = event_data['weighted_possible'] or 0
- weighted_earned = event_data['weighted_earned'] or 0
+ event_data = self.get_data("data")
+ weighted_possible = event_data["weighted_possible"] or 0
+ weighted_earned = event_data["weighted_earned"] or 0
if weighted_possible > 0:
- scaled = weighted_earned/weighted_possible
+ scaled = weighted_earned / weighted_possible
else:
scaled = 0
return Result(
success=weighted_earned >= weighted_possible,
score={
- 'min': 0,
- 'max': weighted_possible,
- 'raw': weighted_earned,
- 'scaled': scaled
- }
+ "min": 0,
+ "max": weighted_possible,
+ "raw": weighted_earned,
+ "scaled": scaled,
+ },
)
@@ -214,7 +216,8 @@ class BaseProblemCheckTransformer(BaseProblemsTransformer):
* 1 parent GroupActivity
* N "child" Activity which reference the parent, where N>=0
"""
- additional_fields = ('result', )
+
+ additional_fields = ("result",)
@openedx_filter(
filter_type="event_routing_backends.processors.xapi.problem_interaction_events.base_problem_check.get_object",
@@ -230,19 +233,15 @@ def get_object(self):
# If the event was generated from browser, there is no `problem_id`
# or `module_id` field. Therefore we get block id from the referrer.
- event_source = self.get_data('context.event_source') or self.get_data('event_source')
- referer = self.get_data('referer') or self.get_data('context.referer', True)
- if event_source == 'browser':
- block_id = get_problem_block_id(
- referer,
- self.get_data('data'),
- self.get_data('context.course_id')
- )
- xapi_object.id = self.get_object_iri('xblock', block_id)
+ event_source = self.get_data("context.event_source") or self.get_data("event_source")
+ referer = self.get_data("referer") or self.get_data("context.referer", True)
+ if event_source == "browser":
+ block_id = get_problem_block_id(referer, self.get_data("data"), self.get_data("context.course_id"))
+ xapi_object.id = self.get_object_iri("xblock", block_id)
return xapi_object
else:
if xapi_object.id:
- xapi_object.id = self.get_object_iri('xblock', xapi_object.id)
+ xapi_object.id = self.get_object_iri("xblock", xapi_object.id)
return xapi_object
@@ -255,16 +254,14 @@ def get_object_definition(self):
"""
definition = super().get_object_definition()
- if self.get_data('data.attempts'):
- definition.extensions = Extensions({
- constants.XAPI_ACTIVITY_ATTEMPT: self.get_data('data.attempts')
- })
+ if self.get_data("data.attempts"):
+ definition.extensions = Extensions({constants.XAPI_ACTIVITY_ATTEMPT: self.get_data("data.attempts")})
interaction_type = self._get_interaction_type()
- display_name = self.get_data('display_name')
+ display_name = self.get_data("display_name")
submission = self._get_submission()
if submission:
- interaction_type = INTERACTION_TYPES_MAP.get(submission.get('response_type'), DEFAULT_INTERACTION_TYPE)
- definition.description = LanguageMap({constants.EN_US: submission['question']})
+ interaction_type = INTERACTION_TYPES_MAP.get(submission.get("response_type"), DEFAULT_INTERACTION_TYPE)
+ definition.description = LanguageMap({constants.EN_US: submission["question"]})
elif display_name:
definition.name = LanguageMap({constants.EN_US: display_name})
@@ -279,10 +276,10 @@ def _get_submission(self):
Returns:
dict
"""
- submissions = self.get_data('data.submission')
+ submissions = self.get_data("data.submission")
if submissions:
for sub_id in submissions:
- if 'response_type' in submissions[sub_id] and submissions[sub_id]['response_type']:
+ if "response_type" in submissions[sub_id] and submissions[sub_id]["response_type"]:
return submissions[sub_id]
return None
@@ -297,7 +294,7 @@ def _get_interaction_type(self):
Returns:
str
"""
- response_type = self.get_data('response_type')
+ response_type = self.get_data("response_type")
try:
return INTERACTION_TYPES_MAP[response_type]
except KeyError:
@@ -311,11 +308,11 @@ def get_result(self):
Result
"""
# Do not transform result if the event is generated from browser
- source = self.get_data('event_source') or self.get_data('context.event_source')
- if source == 'browser':
+ source = self.get_data("event_source") or self.get_data("context.event_source")
+ if source == "browser":
return None
- event_data = self.get_data('data')
+ event_data = self.get_data("data")
if event_data is None:
event_data = {}
@@ -333,11 +330,11 @@ def get_result(self):
# parsing is. Should we ever find it necessary to make a better
# parser for them, Insights had a good effort here:
# https://github.com/openedx/edx-analytics-pipeline/blob/8d96f93/edx/analytics/tasks/insights/answer_dist.py#L260C36-L260C36
- response = event_data.get('answers', None)
- correct = self.get_data('success') == 'correct'
+ response = event_data.get("answers", None)
+ correct = self.get_data("success") == "correct"
- max_grade = self.get_data('max_grade')
- grade = self.get_data('grade')
+ max_grade = self.get_data("max_grade")
+ grade = self.get_data("grade")
scaled = None
if max_grade is not None and grade is not None:
@@ -359,16 +356,16 @@ def get_result(self):
return cls(
success=correct,
score={
- 'min': 0,
- 'max': max_grade,
- 'raw': grade,
- 'scaled': scaled,
+ "min": 0,
+ "max": max_grade,
+ "raw": grade,
+ "scaled": scaled,
},
- response=response
+ response=response,
)
-@XApiTransformersRegistry.register('problem_check')
+@XApiTransformersRegistry.register("problem_check")
class ProblemCheckTransformer(OneToManyXApiTransformerMixin, BaseProblemCheckTransformer):
"""
Transform problem check events into one or more xAPI statements.
@@ -380,6 +377,7 @@ class ProblemCheckTransformer(OneToManyXApiTransformerMixin, BaseProblemCheckTra
* 1 parent GroupActivity
* N "child" Activity which reference the parent, where N>=0
"""
+
@property
def child_transformer_class(self):
"""
@@ -402,7 +400,7 @@ def get_child_ids(self):
Returns:
list of strings
"""
- submissions = self.get_data('submission') or {}
+ submissions = self.get_data("submission") or {}
child_ids = submissions.keys()
if len(child_ids) > 1:
return child_ids
@@ -445,6 +443,7 @@ class ProblemCheckChildTransformer(OneToManyChildXApiTransformerMixin, BaseProbl
"""
Transformer for subproblems of a multi-question problem_check event.
"""
+
def _get_submission(self):
"""
Return this child's submission data from the event data, if valid.
@@ -452,7 +451,7 @@ def _get_submission(self):
Returns:
dict
"""
- submissions = self.get_data('submission') or {}
+ submissions = self.get_data("submission") or {}
return submissions.get(self.child_id)
def get_object_id(self):
@@ -464,10 +463,12 @@ def get_object_id(self):
str
"""
object_id = super().get_object_id() or ""
- object_id = '@'.join([
- *object_id.split('@')[:-1],
- self.child_id,
- ])
+ object_id = "@".join(
+ [
+ *object_id.split("@")[:-1],
+ self.child_id,
+ ]
+ )
return object_id
def get_result(self):
@@ -481,7 +482,7 @@ def get_result(self):
# Don't report the score on child events; only the parent knows the score.
result.score = None
submission = self._get_submission() or {}
- result.response = submission.get('answer')
+ result.response = submission.get("answer")
return result
@@ -491,6 +492,7 @@ class JSONEncodedResult(Result):
in for a `response` to str. This breaks our ability to serialize list
responses into JSON, so we override it here.
"""
+
@property
def response(self):
"""Response for Result
diff --git a/event_routing_backends/processors/xapi/event_transformers/video_events.py b/event_routing_backends/processors/xapi/event_transformers/video_events.py
index 6341dee4..e8cf6001 100644
--- a/event_routing_backends/processors/xapi/event_transformers/video_events.py
+++ b/event_routing_backends/processors/xapi/event_transformers/video_events.py
@@ -41,89 +41,74 @@
from event_routing_backends.processors.xapi.transformer import XApiTransformer, XApiVerbTransformerMixin
VERB_MAP = {
- 'load_video': {
- 'id': constants.XAPI_VERB_INITIALIZED,
- 'display': constants.INITIALIZED
+ "load_video": {
+ "id": constants.XAPI_VERB_INITIALIZED,
+ "display": constants.INITIALIZED,
},
- 'edx.video.loaded': {
- 'id': constants.XAPI_VERB_INITIALIZED,
- 'display': constants.INITIALIZED
+ "edx.video.loaded": {
+ "id": constants.XAPI_VERB_INITIALIZED,
+ "display": constants.INITIALIZED,
},
- 'play_video': {
- 'id': constants.XAPI_VERB_PLAYED,
- 'display': constants.PLAYED
+ "play_video": {"id": constants.XAPI_VERB_PLAYED, "display": constants.PLAYED},
+ "edx.video.played": {"id": constants.XAPI_VERB_PLAYED, "display": constants.PLAYED},
+ "stop_video": {
+ "id": constants.XAPI_VERB_TERMINATED,
+ "display": constants.TERMINATED,
},
- 'edx.video.played': {
- 'id': constants.XAPI_VERB_PLAYED,
- 'display': constants.PLAYED
+ "edx.video.stopped": {
+ "id": constants.XAPI_VERB_TERMINATED,
+ "display": constants.TERMINATED,
},
- 'stop_video': {
- 'id': constants.XAPI_VERB_TERMINATED,
- 'display': constants.TERMINATED
+ "complete_video": {
+ "id": constants.XAPI_VERB_COMPLETED,
+ "display": constants.COMPLETED,
},
- 'edx.video.stopped': {
- 'id': constants.XAPI_VERB_TERMINATED,
- 'display': constants.TERMINATED
+ "edx.video.completed": {
+ "id": constants.XAPI_VERB_COMPLETED,
+ "display": constants.COMPLETED,
},
- 'complete_video': {
- 'id': constants.XAPI_VERB_COMPLETED,
- 'display': constants.COMPLETED
+ "pause_video": {"id": constants.XAPI_VERB_PAUSED, "display": constants.PAUSED},
+ "edx.video.paused": {"id": constants.XAPI_VERB_PAUSED, "display": constants.PAUSED},
+ "seek_video": {"id": constants.XAPI_VERB_SEEKED, "display": constants.SEEKED},
+ "edx.video.position.changed": {
+ "id": constants.XAPI_VERB_SEEKED,
+ "display": constants.SEEKED,
},
- 'edx.video.completed': {
- 'id': constants.XAPI_VERB_COMPLETED,
- 'display': constants.COMPLETED
+ "hide_transcript": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'pause_video': {
- 'id': constants.XAPI_VERB_PAUSED,
- 'display': constants.PAUSED
+ "edx.video.transcript.hidden": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'edx.video.paused': {
- 'id': constants.XAPI_VERB_PAUSED,
- 'display': constants.PAUSED
+ "show_transcript": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'seek_video': {
- 'id': constants.XAPI_VERB_SEEKED,
- 'display': constants.SEEKED
+ "edx.video.transcript.shown": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'edx.video.position.changed': {
- 'id': constants.XAPI_VERB_SEEKED,
- 'display': constants.SEEKED
+ "speed_change_video": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'hide_transcript': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
+ "video_hide_cc_menu": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'edx.video.transcript.hidden': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
+ "video_show_cc_menu": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'show_transcript': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
+ "edx.video.closed_captions.hidden": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
- 'edx.video.transcript.shown': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
- },
- 'speed_change_video': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
- },
- 'video_hide_cc_menu': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
- },
- 'video_show_cc_menu': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
- },
- 'edx.video.closed_captions.hidden': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
- },
- 'edx.video.closed_captions.shown': {
- 'id': constants.XAPI_VERB_INTERACTED,
- 'display': constants.INTERACTED
+ "edx.video.closed_captions.shown": {
+ "id": constants.XAPI_VERB_INTERACTED,
+ "display": constants.INTERACTED,
},
}
@@ -132,6 +117,7 @@ class BaseVideoTransformer(XApiTransformer, XApiVerbTransformerMixin):
"""
Base transformer for video interaction events.
"""
+
verb_map = VERB_MAP
@openedx_filter(filter_type="event_routing_backends.processors.xapi.video_events.base_video.get_object")
@@ -142,15 +128,12 @@ def get_object(self):
Returns:
`Activity`
"""
- course_id = self.get_data('context.course_id', True)
- video_id = self.get_data('data.id', True)
+ course_id = self.get_data("context.course_id", True)
+ video_id = self.get_data("data.id", True)
object_id = make_video_block_id(course_id=course_id, video_id=video_id)
return Activity(
- id='{lms_root_url}/xblock/{object_id}'.format(
- lms_root_url=settings.LMS_ROOT_URL,
- object_id=object_id
- ),
+ id="{lms_root_url}/xblock/{object_id}".format(lms_root_url=settings.LMS_ROOT_URL, object_id=object_id),
definition=ActivityDefinition(
type=constants.XAPI_ACTIVITY_VIDEO,
# TODO: Add video's display name
@@ -166,32 +149,35 @@ def get_context_extensions(self):
"""
extensions = super().get_context_extensions()
- extensions.update({
+ extensions.update(
+ {
# TODO: Add completion threshold once its added in the platform.
- constants.XAPI_CONTEXT_VIDEO_LENGTH: convert_seconds_to_float(self.get_data('data.duration'))
- })
+ constants.XAPI_CONTEXT_VIDEO_LENGTH: convert_seconds_to_float(self.get_data("data.duration"))
+ }
+ )
return extensions
-@XApiTransformersRegistry.register('load_video')
-@XApiTransformersRegistry.register('edx.video.loaded')
+@XApiTransformersRegistry.register("load_video")
+@XApiTransformersRegistry.register("edx.video.loaded")
class VideoLoadedTransformer(BaseVideoTransformer):
"""
Transformer for the event generated when a video is loaded in the browser.
"""
-@XApiTransformersRegistry.register('play_video')
-@XApiTransformersRegistry.register('edx.video.played')
-@XApiTransformersRegistry.register('stop_video')
-@XApiTransformersRegistry.register('edx.video.stopped')
-@XApiTransformersRegistry.register('pause_video')
-@XApiTransformersRegistry.register('edx.video.paused')
+@XApiTransformersRegistry.register("play_video")
+@XApiTransformersRegistry.register("edx.video.played")
+@XApiTransformersRegistry.register("stop_video")
+@XApiTransformersRegistry.register("edx.video.stopped")
+@XApiTransformersRegistry.register("pause_video")
+@XApiTransformersRegistry.register("edx.video.paused")
class VideoInteractionTransformers(BaseVideoTransformer):
"""
Transformer for the events generated when learner interacts with the video.
"""
- additional_fields = BaseVideoTransformer.additional_fields + ('result', )
+
+ additional_fields = BaseVideoTransformer.additional_fields + ("result",)
def get_result(self):
"""
@@ -200,27 +186,24 @@ def get_result(self):
Returns:
`Result`
"""
- current_time = self.get_data('data.current_time') or self.get_data('data.currentTime')
- return Result(
- extensions=Extensions({
- constants.XAPI_RESULT_VIDEO_TIME: convert_seconds_to_float(current_time)
- })
- )
-
-
-@XApiTransformersRegistry.register('hide_transcript')
-@XApiTransformersRegistry.register('video_hide_cc_menu')
-@XApiTransformersRegistry.register('edx.video.transcript.hidden')
-@XApiTransformersRegistry.register('show_transcript')
-@XApiTransformersRegistry.register('edx.video.transcript.shown')
-@XApiTransformersRegistry.register('edx.video.closed_captions.hidden')
-@XApiTransformersRegistry.register('edx.video.closed_captions.shown')
-@XApiTransformersRegistry.register('video_show_cc_menu')
+ current_time = self.get_data("data.current_time") or self.get_data("data.currentTime")
+ return Result(extensions=Extensions({constants.XAPI_RESULT_VIDEO_TIME: convert_seconds_to_float(current_time)}))
+
+
+@XApiTransformersRegistry.register("hide_transcript")
+@XApiTransformersRegistry.register("video_hide_cc_menu")
+@XApiTransformersRegistry.register("edx.video.transcript.hidden")
+@XApiTransformersRegistry.register("show_transcript")
+@XApiTransformersRegistry.register("edx.video.transcript.shown")
+@XApiTransformersRegistry.register("edx.video.closed_captions.hidden")
+@XApiTransformersRegistry.register("edx.video.closed_captions.shown")
+@XApiTransformersRegistry.register("video_show_cc_menu")
class VideoCCTransformers(BaseVideoTransformer):
"""
Transformer for the events generated when CC enabled/disabled on videos
"""
- additional_fields = BaseVideoTransformer.additional_fields + ('result', )
+
+ additional_fields = BaseVideoTransformer.additional_fields + ("result",)
def get_result(self):
"""
@@ -229,32 +212,36 @@ def get_result(self):
Returns:
`Result`
"""
- event_name = self.get_data('name')
+ event_name = self.get_data("name")
cc_enabled = bool(
- event_name in [
- 'edx.video.closed_captions.shown',
- 'edx.video.transcript.shown',
- 'video_show_cc_menu',
- 'show_transcript',
+ event_name
+ in [
+ "edx.video.closed_captions.shown",
+ "edx.video.transcript.shown",
+ "video_show_cc_menu",
+ "show_transcript",
]
- )
- current_time = self.get_data('data.current_time') or self.get_data('data.currentTime')
+ )
+ current_time = self.get_data("data.current_time") or self.get_data("data.currentTime")
return Result(
- extensions=Extensions({
- constants.XAPI_RESULT_VIDEO_TIME: convert_seconds_to_float(current_time),
- constants.XAPI_RESULT_VIDEO_CC_ENABLED: cc_enabled
- })
+ extensions=Extensions(
+ {
+ constants.XAPI_RESULT_VIDEO_TIME: convert_seconds_to_float(current_time),
+ constants.XAPI_RESULT_VIDEO_CC_ENABLED: cc_enabled,
+ }
+ )
)
-@XApiTransformersRegistry.register('edx.video.completed')
-@XApiTransformersRegistry.register('complete_video')
+@XApiTransformersRegistry.register("edx.video.completed")
+@XApiTransformersRegistry.register("complete_video")
class VideoCompletedTransformer(BaseVideoTransformer):
"""
Transformer for the events generated when learner completes any video.
"""
- additional_fields = BaseVideoTransformer.additional_fields + ('result', )
+
+ additional_fields = BaseVideoTransformer.additional_fields + ("result",)
def get_result(self):
"""
@@ -264,21 +251,22 @@ def get_result(self):
`Result`
"""
return Result(
- extensions=Extensions({
- constants.XAPI_RESULT_VIDEO_TIME: convert_seconds_to_float(self.get_data('data.duration'))
- }),
+ extensions=Extensions(
+ {constants.XAPI_RESULT_VIDEO_TIME: convert_seconds_to_float(self.get_data("data.duration"))}
+ ),
completion=True,
- duration=convert_seconds_to_float(self.get_data('data.duration'))
+ duration=convert_seconds_to_float(self.get_data("data.duration")),
)
-@XApiTransformersRegistry.register('seek_video')
-@XApiTransformersRegistry.register('edx.video.position.changed')
+@XApiTransformersRegistry.register("seek_video")
+@XApiTransformersRegistry.register("edx.video.position.changed")
class VideoPositionChangedTransformer(BaseVideoTransformer):
"""
Transformer for the events generated when changes the position of any video.
"""
- additional_fields = BaseVideoTransformer.additional_fields + ('result', )
+
+ additional_fields = BaseVideoTransformer.additional_fields + ("result",)
def get_result(self):
"""
@@ -288,19 +276,22 @@ def get_result(self):
`Result`
"""
return Result(
- extensions=Extensions({
- constants.XAPI_RESULT_VIDEO_TIME_FROM: convert_seconds_to_float(self.get_data('data.old_time')),
- constants.XAPI_RESULT_VIDEO_TIME_TO: convert_seconds_to_float(self.get_data('data.new_time')),
- }),
+ extensions=Extensions(
+ {
+ constants.XAPI_RESULT_VIDEO_TIME_FROM: convert_seconds_to_float(self.get_data("data.old_time")),
+ constants.XAPI_RESULT_VIDEO_TIME_TO: convert_seconds_to_float(self.get_data("data.new_time")),
+ }
+ ),
)
-@XApiTransformersRegistry.register('speed_change_video')
+@XApiTransformersRegistry.register("speed_change_video")
class VideoSpeedChangedTransformer(BaseVideoTransformer):
"""
Transformer for the events generated when speed of video is changed.
"""
- additional_fields = BaseVideoTransformer.additional_fields + ('result', )
+
+ additional_fields = BaseVideoTransformer.additional_fields + ("result",)
def get_result(self):
"""
@@ -310,8 +301,10 @@ def get_result(self):
`Result`
"""
return Result(
- extensions=Extensions({
- constants.XAPI_RESULT_VIDEO_SPEED_FROM: ''.join([self.get_data('data.old_speed'), 'x']),
- constants.XAPI_RESULT_VIDEO_SPEED_TO: ''.join([self.get_data('data.new_speed'), 'x']),
- }),
+ extensions=Extensions(
+ {
+ constants.XAPI_RESULT_VIDEO_SPEED_FROM: "".join([self.get_data("data.old_speed"), "x"]),
+ constants.XAPI_RESULT_VIDEO_SPEED_TO: "".join([self.get_data("data.new_speed"), "x"]),
+ }
+ ),
)
diff --git a/event_routing_backends/processors/xapi/registry.py b/event_routing_backends/processors/xapi/registry.py
index 11032698..21b833c4 100644
--- a/event_routing_backends/processors/xapi/registry.py
+++ b/event_routing_backends/processors/xapi/registry.py
@@ -1,6 +1,7 @@
"""
Registry to keep track of xAPI event transformers
"""
+
from event_routing_backends.processors.transformer_utils.registry import TransformerRegistry
@@ -8,4 +9,5 @@ class XApiTransformersRegistry(TransformerRegistry):
"""
Registry to keep track of xAPI event transformers
"""
+
mapping = {}
diff --git a/event_routing_backends/processors/xapi/statements.py b/event_routing_backends/processors/xapi/statements.py
index 11951308..79586025 100644
--- a/event_routing_backends/processors/xapi/statements.py
+++ b/event_routing_backends/processors/xapi/statements.py
@@ -1,6 +1,7 @@
"""
xAPI statement classes
"""
+
from tincan import Activity
@@ -10,6 +11,7 @@ class GroupActivity(Activity):
For use with Activites that contain one or more child Activities, like Problems that contain multiple Questions.
"""
+
@Activity.object_type.setter
def object_type(self, _):
- self._object_type = 'GroupActivity'
+ self._object_type = "GroupActivity"
diff --git a/event_routing_backends/processors/xapi/tests/test_transformers.py b/event_routing_backends/processors/xapi/tests/test_transformers.py
index 29e260fe..7cc3f658 100644
--- a/event_routing_backends/processors/xapi/tests/test_transformers.py
+++ b/event_routing_backends/processors/xapi/tests/test_transformers.py
@@ -1,6 +1,7 @@
"""
Test the transformers for all of the currently supported events into xAPI format.
"""
+
import hashlib
import json
import os
@@ -23,6 +24,7 @@ class XApiTransformersFixturesTestMixin(TransformersFixturesTestMixin):
This mixin is split into its own class so it can be used by packages outside of ERB.
"""
+
registry = XApiTransformersRegistry
@property
@@ -30,12 +32,12 @@ def expected_events_fixture_path(self):
"""
Return the path to the expected transformed events fixture files.
"""
- return '{}/fixtures/expected'.format(os.path.dirname(os.path.abspath(__file__)))
+ return "{}/fixtures/expected".format(os.path.dirname(os.path.abspath(__file__)))
def assert_correct_transformer_version(self, transformed_event, transformer_version):
self.assertEqual(
transformed_event.context.extensions[constants.XAPI_TRANSFORMER_VERSION_KEY],
- transformer_version
+ transformer_version,
)
def compare_events(self, transformed_event, expected_event):
@@ -85,21 +87,25 @@ class TestXApiTransformers(XApiTransformersFixturesTestMixin, TransformersTestMi
Test xApi event transforms and settings.
"""
- @override_settings(XAPI_AGENT_IFI_TYPE='mbox')
+ @override_settings(XAPI_AGENT_IFI_TYPE="mbox")
def test_xapi_agent_ifi_settings_mbox(self):
- self.registry.register('test_event')(XApiTransformer)
- raw_event = self.get_raw_event('edx.course.enrollment.activated.json')
+ self.registry.register("test_event")(XApiTransformer)
+ raw_event = self.get_raw_event("edx.course.enrollment.activated.json")
transformed_event = self.registry.get_transformer(raw_event).transform()
action_json = transformed_event.actor.to_json()
- self.assertEqual(action_json, json.dumps({"objectType": "Agent", "mbox": "mailto:edx@example.com"}))
+ self.assertEqual(
+ action_json,
+ json.dumps({"objectType": "Agent", "mbox": "mailto:edx@example.com"}),
+ )
- @override_settings(XAPI_AGENT_IFI_TYPE='mbox_sha1sum')
+ @override_settings(XAPI_AGENT_IFI_TYPE="mbox_sha1sum")
def test_xapi_agent_ifi_settings_mbox_sha1sum(self):
- self.registry.register('test_event')(XApiTransformer)
- raw_event = self.get_raw_event('edx.course.enrollment.activated.json')
+ self.registry.register("test_event")(XApiTransformer)
+ raw_event = self.get_raw_event("edx.course.enrollment.activated.json")
transformed_event = self.registry.get_transformer(raw_event).transform()
action_json = transformed_event.actor.to_json()
- mbox_sha1sum = hashlib.sha1('edx@example.com'.encode('utf-8')).hexdigest()
+ mbox_sha1sum = hashlib.sha1("edx@example.com".encode("utf-8")).hexdigest()
self.assertEqual(
- action_json, json.dumps({"objectType": "Agent", "mbox_sha1sum": mbox_sha1sum})
+ action_json,
+ json.dumps({"objectType": "Agent", "mbox_sha1sum": mbox_sha1sum}),
)
diff --git a/event_routing_backends/processors/xapi/tests/test_xapi.py b/event_routing_backends/processors/xapi/tests/test_xapi.py
index d2c93549..34e828a3 100644
--- a/event_routing_backends/processors/xapi/tests/test_xapi.py
+++ b/event_routing_backends/processors/xapi/tests/test_xapi.py
@@ -1,6 +1,7 @@
"""
Test the xAPI processor.
"""
+
import uuid
from django.test import SimpleTestCase
@@ -17,41 +18,37 @@ class TestXApiProcessor(SimpleTestCase):
def setUp(self):
super().setUp()
- self.sample_event = {
- 'name': str(sentinel.name)
- }
+ self.sample_event = {"name": str(sentinel.name)}
self.processor = XApiProcessor()
@override_settings(XAPI_EVENTS_ENABLED=False)
def test_skip_event_when_disabled(self):
self.assertFalse(self.processor(self.sample_event))
- @patch('event_routing_backends.processors.mixins.base_transformer_processor.logger')
+ @patch("event_routing_backends.processors.mixins.base_transformer_processor.logger")
def test_send_method_with_no_transformer_implemented(self, mocked_logger):
self.assertFalse(self.processor([self.sample_event]))
mocked_logger.error.assert_called_once_with(
- 'Could not get transformer for %s event.',
- self.sample_event.get('name')
+ "Could not get transformer for %s event.", self.sample_event.get("name")
)
@patch(
- 'event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer',
- side_effect=ValueError('Generic Error')
+ "event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer",
+ side_effect=ValueError("Generic Error"),
)
- @patch('event_routing_backends.processors.mixins.base_transformer_processor.logger')
+ @patch("event_routing_backends.processors.mixins.base_transformer_processor.logger")
def test_send_method_with_unknown_exception(self, mocked_logger, _):
with self.assertRaises(ValueError):
self.processor([self.sample_event])
mocked_logger.exception.assert_called_once_with(
'There was an error while trying to transform event "sentinel.name" using XApiProcessor'
- ' processor. Error: Generic Error')
+ " processor. Error: Generic Error"
+ )
- @patch(
- 'event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer'
- )
- @patch('event_routing_backends.processors.xapi.transformer_processor.xapi_logger')
+ @patch("event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer")
+ @patch("event_routing_backends.processors.xapi.transformer_processor.xapi_logger")
def test_send_method_with_successfull_flow(self, mocked_logger, mocked_get_transformer):
transformed_event = Statement()
transformed_event.object = Activity(id=str(uuid.uuid4()))
@@ -63,10 +60,8 @@ def test_send_method_with_successfull_flow(self, mocked_logger, mocked_get_trans
self.assertIn(call.info(transformed_event.to_json()), mocked_logger.mock_calls)
- @patch(
- 'event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer'
- )
- @patch('event_routing_backends.processors.xapi.transformer_processor.xapi_logger')
+ @patch("event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer")
+ @patch("event_routing_backends.processors.xapi.transformer_processor.xapi_logger")
def test_send_method_with_event_list_successfull_flow(self, mocked_logger, mocked_get_transformer):
transformed_event = Statement()
@@ -78,10 +73,8 @@ def test_send_method_with_event_list_successfull_flow(self, mocked_logger, mocke
self.processor([self.sample_event])
self.assertIn(call.info(transformed_event.to_json()), mocked_logger.mock_calls)
- @patch(
- 'event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer'
- )
- @patch('event_routing_backends.processors.xapi.transformer_processor.xapi_logger')
+ @patch("event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer")
+ @patch("event_routing_backends.processors.xapi.transformer_processor.xapi_logger")
def test_send_method_with_invalid_object(self, mocked_logger, mocked_get_transformer):
transformed_event = Statement()
mocked_transformer = MagicMock()
@@ -92,10 +85,8 @@ def test_send_method_with_invalid_object(self, mocked_logger, mocked_get_transfo
self.assertNotIn(call(transformed_event.to_json()), mocked_logger.mock_calls)
@override_settings(XAPI_EVENT_LOGGING_ENABLED=False)
- @patch(
- 'event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer'
- )
- @patch('event_routing_backends.processors.xapi.transformer_processor.xapi_logger')
+ @patch("event_routing_backends.processors.xapi.transformer_processor.XApiTransformersRegistry.get_transformer")
+ @patch("event_routing_backends.processors.xapi.transformer_processor.xapi_logger")
def test_send_method_with_successfull_flow_no_logger(self, mocked_logger, mocked_get_transformer):
transformed_event = Statement()
transformed_event.object = Activity(id=str(uuid.uuid4()))
@@ -107,7 +98,7 @@ def test_send_method_with_successfull_flow_no_logger(self, mocked_logger, mocked
self.assertNotIn(call(transformed_event.to_json()), mocked_logger.mock_calls)
- @patch('event_routing_backends.processors.mixins.base_transformer_processor.logger')
+ @patch("event_routing_backends.processors.mixins.base_transformer_processor.logger")
def test_with_no_registry(self, mocked_logger):
backend = XApiProcessor()
backend.registry = None
diff --git a/event_routing_backends/processors/xapi/tests/test_xapi_event_transformers.py b/event_routing_backends/processors/xapi/tests/test_xapi_event_transformers.py
index 3ac03db3..3463ce1f 100644
--- a/event_routing_backends/processors/xapi/tests/test_xapi_event_transformers.py
+++ b/event_routing_backends/processors/xapi/tests/test_xapi_event_transformers.py
@@ -13,6 +13,7 @@ class TestXAPIEventTransformers(SimpleTestCase):
"""
These cases are covered by the fixtures, but coverage doesn't think so.
"""
+
def test_jsonencodedresult_list(self):
test_data = ["foo", "b'ar", 'test"ing', 2]
result = JSONEncodedResult()
diff --git a/event_routing_backends/processors/xapi/transformer.py b/event_routing_backends/processors/xapi/transformer.py
index d38b2375..aa259450 100644
--- a/event_routing_backends/processors/xapi/transformer.py
+++ b/event_routing_backends/processors/xapi/transformer.py
@@ -1,6 +1,7 @@
"""
xAPI Transformer Class
"""
+
import hashlib
from django.conf import settings
@@ -28,9 +29,10 @@ class XApiTransformer(BaseTransformerMixin):
"""
xAPI Transformer Class
"""
+
required_fields = (
- 'object',
- 'verb',
+ "object",
+ "verb",
)
def transform(self):
@@ -53,12 +55,14 @@ def base_transform(self, transformed_event):
Transform the fields that are common for all events.
"""
transformed_event = super().base_transform(transformed_event)
- transformed_event.update({
- 'id': self.get_event_id(),
- 'actor': self.get_actor(),
- 'context': self.get_context(),
- 'timestamp': self.get_timestamp(),
- })
+ transformed_event.update(
+ {
+ "id": self.get_event_id(),
+ "actor": self.get_actor(),
+ "context": self.get_context(),
+ "timestamp": self.get_timestamp(),
+ }
+ )
return transformed_event
def get_event_id(self):
@@ -76,7 +80,7 @@ def get_event_id(self):
# any change in generation of UUID.
actor = self.get_actor()
event_timestamp = self.get_timestamp()
- uuid_str = f'{actor.to_json()}-{event_timestamp}'
+ uuid_str = f"{actor.to_json()}-{event_timestamp}"
return get_uuid5(self.get_verb().to_json(), uuid_str)
@openedx_filter(filter_type="event_routing_backends.processors.xapi.transformer.xapi_transformer.get_actor")
@@ -88,18 +92,16 @@ def get_actor(self):
`Agent`
"""
- if settings.XAPI_AGENT_IFI_TYPE == 'mbox':
+ if settings.XAPI_AGENT_IFI_TYPE == "mbox":
email = get_user_email(self.extract_username_or_userid())
agent = Agent(mbox=email)
- elif settings.XAPI_AGENT_IFI_TYPE == 'mbox_sha1sum':
+ elif settings.XAPI_AGENT_IFI_TYPE == "mbox_sha1sum":
email = get_user_email(self.extract_username_or_userid())
- mbox_sha1sum = hashlib.sha1(email.encode('utf-8')).hexdigest()
+ mbox_sha1sum = hashlib.sha1(email.encode("utf-8")).hexdigest()
agent = Agent(mbox_sha1sum=mbox_sha1sum)
else:
- user_uuid = get_anonymous_user_id(self.extract_username_or_userid(), 'XAPI')
- agent = Agent(
- account={"homePage": settings.LMS_ROOT_URL, "name": user_uuid}
- )
+ user_uuid = get_anonymous_user_id(self.extract_username_or_userid(), "XAPI")
+ agent = Agent(account={"homePage": settings.LMS_ROOT_URL, "name": user_uuid})
return agent
@openedx_filter(filter_type="event_routing_backends.processors.xapi.transformer.xapi_transformer.get_verb")
@@ -117,7 +119,7 @@ def get_timestamp(self):
Returns:
str
"""
- return self.get_data('timestamp') or self.get_data('time')
+ return self.get_data("timestamp") or self.get_data("time")
def get_context_activities(self):
"""
@@ -126,17 +128,14 @@ def get_context_activities(self):
Returns:
`ContextActivities`
"""
- if self.get_data('context.course_id') is not None:
- course = get_course_from_id(self.get_data('context.course_id'))
+ if self.get_data("context.course_id") is not None:
+ course = get_course_from_id(self.get_data("context.course_id"))
course_name = LanguageMap({constants.EN_US: course["display_name"]})
parent_activities = [
Activity(
- id=self.get_object_iri('course', self.get_data('context.course_id')),
+ id=self.get_object_iri("course", self.get_data("context.course_id")),
object_type=constants.XAPI_ACTIVITY_COURSE,
- definition=ActivityDefinition(
- type=constants.XAPI_ACTIVITY_COURSE,
- name=course_name
- )
+ definition=ActivityDefinition(type=constants.XAPI_ACTIVITY_COURSE, name=course_name),
),
]
return ContextActivities(
@@ -153,15 +152,17 @@ def get_context(self):
"""
context = Context(
extensions=self.get_context_extensions(),
- contextActivities=self.get_context_activities()
+ contextActivities=self.get_context_activities(),
)
return context
def get_context_extensions(self):
- return Extensions({
+ return Extensions(
+ {
constants.XAPI_TRANSFORMER_VERSION_KEY: self.transformer_version,
- constants.XAPI_CONTEXT_SESSION_ID: self.extract_sessionid()
- })
+ constants.XAPI_CONTEXT_SESSION_ID: self.extract_sessionid(),
+ }
+ )
class XApiVerbTransformerMixin:
@@ -175,6 +176,7 @@ class XApiVerbTransformerMixin:
This is helpful in base transformer class which are going to be
transforming multiple transformers.
"""
+
verb_map = None
def get_verb(self):
@@ -184,18 +186,15 @@ def get_verb(self):
Returns:
`Verb`
"""
- event_name = self.get_data('name', True)
+ event_name = self.get_data("name", True)
- event_source = self.get_data('event_source') or self.get_data('context.event_source')
- if event_source == 'browser' and event_name == 'problem_check':
- verb = self.verb_map['problem_check_browser']
+ event_source = self.get_data("event_source") or self.get_data("context.event_source")
+ if event_source == "browser" and event_name == "problem_check":
+ verb = self.verb_map["problem_check_browser"]
else:
verb = self.verb_map[event_name]
- return Verb(
- id=verb['id'],
- display=LanguageMap({constants.EN: verb['display']})
- )
+ return Verb(id=verb["id"], display=LanguageMap({constants.EN: verb["display"]}))
class OneToManyXApiTransformerMixin:
@@ -205,6 +204,7 @@ class OneToManyXApiTransformerMixin:
* 1 parent xAPI event, plus
* N "child" xAPI events, where N>=0
"""
+
@property
def child_transformer_class(self):
"""
@@ -257,7 +257,8 @@ def transform_children(self, parent):
child_id=child_id,
parent=parent,
event=self.event,
- ).transform() for child_id in child_ids
+ ).transform()
+ for child_id in child_ids
]
@@ -295,8 +296,8 @@ def get_event_id(self):
# any change in generation of UUID.
actor = self.get_actor()
event_timestamp = self.get_timestamp()
- name = f'{actor.to_json()}-{event_timestamp}'
- namespace_key = f'{self.get_verb().to_json()}-{self.child_id}'
+ name = f"{actor.to_json()}-{event_timestamp}"
+ namespace_key = f"{self.get_verb().to_json()}-{self.child_id}"
return get_uuid5(namespace_key, name)
def get_context(self):
diff --git a/event_routing_backends/processors/xapi/transformer_processor.py b/event_routing_backends/processors/xapi/transformer_processor.py
index d09488e9..5da0e5be 100644
--- a/event_routing_backends/processors/xapi/transformer_processor.py
+++ b/event_routing_backends/processors/xapi/transformer_processor.py
@@ -1,6 +1,7 @@
"""
xAPI processor for transforming and routing events.
"""
+
import json
from logging import getLogger
@@ -11,7 +12,7 @@
from event_routing_backends.processors.xapi.registry import XApiTransformersRegistry
logger = getLogger(__name__)
-xapi_logger = getLogger('xapi_tracking')
+xapi_logger = getLogger("xapi_tracking")
class XApiProcessor(BaseTransformerProcessorMixin):
diff --git a/event_routing_backends/settings/common.py b/event_routing_backends/settings/common.py
index dbea6c90..b98f55d0 100644
--- a/event_routing_backends/settings/common.py
+++ b/event_routing_backends/settings/common.py
@@ -62,7 +62,7 @@ def plugin_settings(settings):
# mbox_sha1sum: "f427d80dc332a166bf5f160ec15f009ce7e68c4c"
# }
# ```
- settings.XAPI_AGENT_IFI_TYPE = 'external_id'
+ settings.XAPI_AGENT_IFI_TYPE = "external_id"
# .. setting_name: EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS
# .. setting_default: [
@@ -75,9 +75,9 @@ def plugin_settings(settings):
# in case multiple attempts to rout them to relevant LRS are failed. Once persisted we can retry sending
# them once issues are resolved.
settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS = [
- 'edx.course.enrollment.activated',
- 'edx.course.enrollment.deactivated',
- 'edx.course.grade.passed.first_time'
+ "edx.course.enrollment.activated",
+ "edx.course.enrollment.deactivated",
+ "edx.course.grade.passed.first_time",
]
# .. setting_name: EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS
# .. setting_default: [
@@ -88,74 +88,74 @@ def plugin_settings(settings):
# ]
# .. setting_description: Contains the full list of events to be processed by the xAPI backend.
# If this setting has already been initialized, we append to the existing list.
- if not hasattr(settings, 'EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS'):
+ if not hasattr(settings, "EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS"):
settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS = []
settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS += [
- 'edx.course.enrollment.activated',
- 'edx.course.enrollment.deactivated',
- 'edx.course.enrollment.mode_changed',
- 'edx.grades.subsection.grade_calculated',
- 'edx.grades.course.grade_calculated',
- 'edx.special_exam.timed.attempt.created',
- 'edx.special_exam.timed.attempt.submitted',
- 'edx.special_exam.practice.attempt.created',
- 'edx.special_exam.practice.attempt.submitted',
- 'edx.special_exam.proctored.attempt.created',
- 'edx.special_exam.proctored.attempt.submitted',
- 'edx.completion.block_completion.changed',
- 'edx.forum.thread.created',
- 'edx.forum.thread.deleted',
- 'edx.forum.thread.edited',
- 'edx.forum.thread.viewed',
- 'edx.forum.thread.reported',
- 'edx.forum.thread.unreported',
- 'edx.forum.thread.voted',
- 'edx.forum.response.created',
- 'edx.forum.response.deleted',
- 'edx.forum.response.edited',
- 'edx.forum.response.reported',
- 'edx.forum.response.unreported',
- 'edx.forum.response.voted',
- 'edx.forum.comment.created',
- 'edx.forum.comment.deleted',
- 'edx.forum.comment.edited',
- 'edx.forum.comment.reported',
- 'edx.forum.comment.unreported',
- 'edx.ui.lms.link_clicked',
- 'edx.ui.lms.sequence.outline.selected',
- 'edx.ui.lms.outline.selected',
- 'edx.ui.lms.sequence.next_selected',
- 'edx.ui.lms.sequence.previous_selected',
- 'edx.ui.lms.sequence.tab_selected',
- 'showanswer',
- 'edx.problem.hint.demandhint_displayed',
- 'problem_check',
- 'load_video',
- 'edx.video.loaded',
- 'play_video',
- 'edx.video.played',
- 'complete_video',
- 'edx.video.completed',
- 'stop_video',
- 'edx.video.stopped',
- 'pause_video',
- 'edx.video.paused',
- 'seek_video',
- 'edx.video.position.changed',
- 'hide_transcript',
- 'edx.video.transcript.hidden',
- 'show_transcript',
- 'edx.video.transcript.shown',
- 'speed_change_video',
- 'video_hide_cc_menu',
- 'edx.video.closed_captions.shown',
- 'edx.video.closed_captions.hidden',
- 'edx.video.language_menu.hidden',
- 'video_show_cc_menu',
- 'edx.video.language_menu.shown',
- 'edx.course.grade.passed.first_time',
- 'edx.course.grade.now_passed',
- 'edx.course.grade.now_failed',
+ "edx.course.enrollment.activated",
+ "edx.course.enrollment.deactivated",
+ "edx.course.enrollment.mode_changed",
+ "edx.grades.subsection.grade_calculated",
+ "edx.grades.course.grade_calculated",
+ "edx.special_exam.timed.attempt.created",
+ "edx.special_exam.timed.attempt.submitted",
+ "edx.special_exam.practice.attempt.created",
+ "edx.special_exam.practice.attempt.submitted",
+ "edx.special_exam.proctored.attempt.created",
+ "edx.special_exam.proctored.attempt.submitted",
+ "edx.completion.block_completion.changed",
+ "edx.forum.thread.created",
+ "edx.forum.thread.deleted",
+ "edx.forum.thread.edited",
+ "edx.forum.thread.viewed",
+ "edx.forum.thread.reported",
+ "edx.forum.thread.unreported",
+ "edx.forum.thread.voted",
+ "edx.forum.response.created",
+ "edx.forum.response.deleted",
+ "edx.forum.response.edited",
+ "edx.forum.response.reported",
+ "edx.forum.response.unreported",
+ "edx.forum.response.voted",
+ "edx.forum.comment.created",
+ "edx.forum.comment.deleted",
+ "edx.forum.comment.edited",
+ "edx.forum.comment.reported",
+ "edx.forum.comment.unreported",
+ "edx.ui.lms.link_clicked",
+ "edx.ui.lms.sequence.outline.selected",
+ "edx.ui.lms.outline.selected",
+ "edx.ui.lms.sequence.next_selected",
+ "edx.ui.lms.sequence.previous_selected",
+ "edx.ui.lms.sequence.tab_selected",
+ "showanswer",
+ "edx.problem.hint.demandhint_displayed",
+ "problem_check",
+ "load_video",
+ "edx.video.loaded",
+ "play_video",
+ "edx.video.played",
+ "complete_video",
+ "edx.video.completed",
+ "stop_video",
+ "edx.video.stopped",
+ "pause_video",
+ "edx.video.paused",
+ "seek_video",
+ "edx.video.position.changed",
+ "hide_transcript",
+ "edx.video.transcript.hidden",
+ "show_transcript",
+ "edx.video.transcript.shown",
+ "speed_change_video",
+ "video_hide_cc_menu",
+ "edx.video.closed_captions.shown",
+ "edx.video.closed_captions.hidden",
+ "edx.video.language_menu.hidden",
+ "video_show_cc_menu",
+ "edx.video.language_menu.shown",
+ "edx.course.grade.passed.first_time",
+ "edx.course.grade.now_passed",
+ "edx.course.grade.now_failed",
]
# .. setting_name: EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS
# .. setting_default: [
@@ -166,49 +166,50 @@ def plugin_settings(settings):
# ]
# .. setting_description: Contains the full list of events to be processed by the Caliper backend.
# If this setting has already been initialized, we append to the existing list.
- if not hasattr(settings, 'EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS'):
+ if not hasattr(settings, "EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS"):
settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS = []
settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS += [
- 'edx.course.enrollment.activated',
- 'edx.course.enrollment.deactivated',
- 'edx.ui.lms.link_clicked',
- 'edx.ui.lms.sequence.outline.selected',
- 'edx.ui.lms.outline.selected',
- 'edx.ui.lms.sequence.next_selected',
- 'edx.ui.lms.sequence.previous_selected',
- 'edx.ui.lms.sequence.tab_selected',
- 'showanswer',
- 'edx.problem.hint.demandhint_displayed',
- 'problem_check',
- 'load_video',
- 'edx.video.loaded',
- 'play_video',
- 'edx.video.played',
- 'complete_video',
- 'edx.video.completed',
- 'stop_video',
- 'edx.video.stopped',
- 'pause_video',
- 'edx.video.paused',
- 'seek_video',
- 'edx.video.position.changed',
- 'edx.course.grade.passed.first_time',
- 'edx.course.grade.now_passed',
- 'edx.course.grade.now_failed'
+ "edx.course.enrollment.activated",
+ "edx.course.enrollment.deactivated",
+ "edx.ui.lms.link_clicked",
+ "edx.ui.lms.sequence.outline.selected",
+ "edx.ui.lms.outline.selected",
+ "edx.ui.lms.sequence.next_selected",
+ "edx.ui.lms.sequence.previous_selected",
+ "edx.ui.lms.sequence.tab_selected",
+ "showanswer",
+ "edx.problem.hint.demandhint_displayed",
+ "problem_check",
+ "load_video",
+ "edx.video.loaded",
+ "play_video",
+ "edx.video.played",
+ "complete_video",
+ "edx.video.completed",
+ "stop_video",
+ "edx.video.stopped",
+ "pause_video",
+ "edx.video.paused",
+ "seek_video",
+ "edx.video.position.changed",
+ "edx.course.grade.passed.first_time",
+ "edx.course.grade.now_passed",
+ "edx.course.grade.now_failed",
]
# Operators can configure the event bus allowed events via EVENT_BUS_TRACKING_LOGS and by default
# we are allowing the supported events by xAPI and Caliper so that operators don't need to configure
# the events manually.
settings.EVENT_BUS_TRACKING_LOGS = set(
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS +
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS
+ settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS + settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS
)
- if not hasattr(settings, 'EVENT_TRACKING_BACKENDS') or not settings.EVENT_TRACKING_BACKENDS:
+ if not hasattr(settings, "EVENT_TRACKING_BACKENDS") or not settings.EVENT_TRACKING_BACKENDS:
settings.EVENT_TRACKING_BACKENDS = {}
- settings.EVENT_TRACKING_BACKENDS.update(event_tracking_backends_config(
- settings,
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS,
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS,
- ))
+ settings.EVENT_TRACKING_BACKENDS.update(
+ event_tracking_backends_config(
+ settings,
+ settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS,
+ settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS,
+ )
+ )
diff --git a/event_routing_backends/settings/production.py b/event_routing_backends/settings/production.py
index c530e7e9..b21f664a 100644
--- a/event_routing_backends/settings/production.py
+++ b/event_routing_backends/settings/production.py
@@ -8,58 +8,43 @@ def plugin_settings(settings):
Override the default event_routing_backends app settings with production settings.
"""
settings.EVENT_ROUTING_BACKEND_MAX_RETRIES = settings.ENV_TOKENS.get(
- 'EVENT_ROUTING_BACKEND_MAX_RETRIES',
- settings.EVENT_ROUTING_BACKEND_MAX_RETRIES
+ "EVENT_ROUTING_BACKEND_MAX_RETRIES", settings.EVENT_ROUTING_BACKEND_MAX_RETRIES
)
settings.EVENT_ROUTING_BACKEND_COUNTDOWN = settings.ENV_TOKENS.get(
- 'EVENT_ROUTING_BACKEND_COUNTDOWN',
- settings.EVENT_ROUTING_BACKEND_COUNTDOWN
+ "EVENT_ROUTING_BACKEND_COUNTDOWN", settings.EVENT_ROUTING_BACKEND_COUNTDOWN
)
settings.EVENT_ROUTING_BACKEND_BATCH_SIZE = settings.ENV_TOKENS.get(
- 'EVENT_ROUTING_BACKEND_BATCH_SIZE',
- settings.EVENT_ROUTING_BACKEND_BATCH_SIZE
+ "EVENT_ROUTING_BACKEND_BATCH_SIZE", settings.EVENT_ROUTING_BACKEND_BATCH_SIZE
)
settings.EVENT_ROUTING_BACKEND_BATCHING_ENABLED = settings.ENV_TOKENS.get(
- 'EVENT_ROUTING_BACKEND_BATCHING_ENABLED',
- settings.EVENT_ROUTING_BACKEND_BATCHING_ENABLED
+ "EVENT_ROUTING_BACKEND_BATCHING_ENABLED",
+ settings.EVENT_ROUTING_BACKEND_BATCHING_ENABLED,
)
settings.EVENT_ROUTING_BACKEND_BATCH_INTERVAL = settings.ENV_TOKENS.get(
- 'EVENT_ROUTING_BACKEND_BATCH_INTERVAL',
- settings.EVENT_ROUTING_BACKEND_BATCH_INTERVAL
- )
- settings.CALIPER_EVENTS_ENABLED = settings.ENV_TOKENS.get(
- 'CALIPER_EVENTS_ENABLED',
- settings.CALIPER_EVENTS_ENABLED
+ "EVENT_ROUTING_BACKEND_BATCH_INTERVAL",
+ settings.EVENT_ROUTING_BACKEND_BATCH_INTERVAL,
)
+ settings.CALIPER_EVENTS_ENABLED = settings.ENV_TOKENS.get("CALIPER_EVENTS_ENABLED", settings.CALIPER_EVENTS_ENABLED)
settings.CALIPER_EVENT_LOGGING_ENABLED = settings.ENV_TOKENS.get(
- 'CALIPER_EVENT_LOGGING_ENABLED',
- settings.CALIPER_EVENT_LOGGING_ENABLED
- )
- settings.XAPI_EVENTS_ENABLED = settings.ENV_TOKENS.get(
- 'XAPI_EVENTS_ENABLED',
- settings.XAPI_EVENTS_ENABLED
+ "CALIPER_EVENT_LOGGING_ENABLED", settings.CALIPER_EVENT_LOGGING_ENABLED
)
+ settings.XAPI_EVENTS_ENABLED = settings.ENV_TOKENS.get("XAPI_EVENTS_ENABLED", settings.XAPI_EVENTS_ENABLED)
settings.XAPI_EVENT_LOGGING_ENABLED = settings.ENV_TOKENS.get(
- 'XAPI_EVENT_LOGGING_ENABLED',
- settings.XAPI_EVENT_LOGGING_ENABLED
+ "XAPI_EVENT_LOGGING_ENABLED", settings.XAPI_EVENT_LOGGING_ENABLED
)
settings.EVENT_TRACKING_BACKENDS = settings.ENV_TOKENS.get(
- 'EVENT_TRACKING_BACKENDS',
- settings.EVENT_TRACKING_BACKENDS
+ "EVENT_TRACKING_BACKENDS", settings.EVENT_TRACKING_BACKENDS
)
settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS = settings.ENV_TOKENS.get(
- 'EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS',
- settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS
+ "EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS",
+ settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS,
)
settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS = settings.ENV_TOKENS.get(
- 'EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS',
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS
+ "EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS",
+ settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS,
)
settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS = settings.ENV_TOKENS.get(
- 'EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS',
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS
- )
- settings.XAPI_AGENT_IFI_TYPE = settings.ENV_TOKENS.get(
- 'XAPI_AGENT_IFI_TYPE',
- settings.XAPI_AGENT_IFI_TYPE
+ "EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS",
+ settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS,
)
+ settings.XAPI_AGENT_IFI_TYPE = settings.ENV_TOKENS.get("XAPI_AGENT_IFI_TYPE", settings.XAPI_AGENT_IFI_TYPE)
diff --git a/event_routing_backends/tasks.py b/event_routing_backends/tasks.py
index 58c70298..a3f74cf3 100644
--- a/event_routing_backends/tasks.py
+++ b/event_routing_backends/tasks.py
@@ -1,6 +1,7 @@
"""
Celery tasks.
"""
+
from celery import shared_task
from celery.utils.log import get_task_logger
from celery_utils.persist_on_failure import LoggedPersistOnFailureTask
@@ -13,8 +14,8 @@
logger = get_task_logger(__name__)
ROUTER_STRATEGY_MAPPING = {
- 'AUTH_HEADERS': HttpClient,
- 'XAPI_LRS': LrsClient,
+ "AUTH_HEADERS": HttpClient,
+ "XAPI_LRS": LrsClient,
}
@@ -33,7 +34,9 @@ def dispatch_event_persistent(self, event_name, event, router_type, host_config)
send_event(self, event_name, event, router_type, host_config)
-@shared_task(bind=True,)
+@shared_task(
+ bind=True,
+)
def dispatch_event(self, event_name, event, router_type, host_config):
"""
Send event to configured client.
@@ -62,7 +65,7 @@ def send_event(task, event_name, event, router_type, host_config):
try:
client_class = ROUTER_STRATEGY_MAPPING[router_type]
except KeyError:
- logger.error('Unsupported routing strategy detected: {}'.format(router_type))
+ logger.error("Unsupported routing strategy detected: {}".format(router_type))
return
try:
@@ -70,26 +73,26 @@ def send_event(task, event_name, event, router_type, host_config):
client.send(event, event_name)
logger.debug(
'Successfully dispatched transformed version of edx event "{}" using client: {}'.format(
- event_name,
- client_class
+ event_name, client_class
)
)
except EventNotDispatched as exc:
logger.exception(
'Exception occurred while trying to dispatch edx event "{}" using client: {}'.format(
- event_name,
- client_class
+ event_name, client_class
),
- exc_info=True
+ exc_info=True,
)
# If this function is called synchronously, we want to raise the exception
# to inform about errors. If it's called asynchronously, we want to retry
# the celery task till it succeeds or reaches max retries.
if not task:
raise exc
- raise task.retry(exc=exc, countdown=getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 30),
- max_retries=getattr(settings, ''
- 'EVENT_ROUTING_BACKEND_MAX_RETRIES', 3))
+ raise task.retry(
+ exc=exc,
+ countdown=getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 30),
+ max_retries=getattr(settings, "EVENT_ROUTING_BACKEND_MAX_RETRIES", 3),
+ )
@shared_task(bind=True)
@@ -119,30 +122,31 @@ def bulk_send_events(task, events, router_type, host_config):
try:
client_class = ROUTER_STRATEGY_MAPPING[router_type]
except KeyError:
- logger.error('Unsupported routing strategy detected: {}'.format(router_type))
+ logger.error("Unsupported routing strategy detected: {}".format(router_type))
return
try:
client = client_class(**host_config)
client.bulk_send(events)
logger.debug(
- 'Successfully bulk dispatched transformed versions of {} events using client: {}'.format(
- len(events),
- client_class
+ "Successfully bulk dispatched transformed versions of {} events using client: {}".format(
+ len(events), client_class
)
)
except EventNotDispatched as exc:
logger.exception(
- 'Exception occurred while trying to bulk dispatch {} events using client: {}'.format(
- len(events),
- client_class
+ "Exception occurred while trying to bulk dispatch {} events using client: {}".format(
+ len(events), client_class
),
- exc_info=True
+ exc_info=True,
)
# If this function is called synchronously, we want to raise the exception
# to inform about errors. If it's called asynchronously, we want to retry
# the celery task till it succeeds or reaches max retries.
if not task:
raise exc
- raise task.retry(exc=exc, countdown=getattr(settings, 'EVENT_ROUTING_BACKEND_COUNTDOWN', 30),
- max_retries=getattr(settings, 'EVENT_ROUTING_BACKEND_MAX_RETRIES', 3))
+ raise task.retry(
+ exc=exc,
+ countdown=getattr(settings, "EVENT_ROUTING_BACKEND_COUNTDOWN", 30),
+ max_retries=getattr(settings, "EVENT_ROUTING_BACKEND_MAX_RETRIES", 3),
+ )
diff --git a/event_routing_backends/tests/factories.py b/event_routing_backends/tests/factories.py
index 6fd974db..ad08513a 100644
--- a/event_routing_backends/tests/factories.py
+++ b/event_routing_backends/tests/factories.py
@@ -1,6 +1,7 @@
"""
Factories needed for unit tests in the app
"""
+
from django.contrib.auth import get_user_model
from factory.django import DjangoModelFactory
diff --git a/event_routing_backends/tests/test_helpers.py b/event_routing_backends/tests/test_helpers.py
index 9fdac254..87f2e2be 100644
--- a/event_routing_backends/tests/test_helpers.py
+++ b/event_routing_backends/tests/test_helpers.py
@@ -1,6 +1,7 @@
"""
Test the helper methods.
"""
+
from unittest.mock import patch
from ddt import data, ddt
@@ -25,65 +26,61 @@ class TestHelpers(TestCase):
def setUp(self):
super().setUp()
- self.edx_user = UserFactory.create(username='edx', email='edx@example.com')
- UserFactory.create(username='10228945687', email='edx@example.com')
+ self.edx_user = UserFactory.create(username="edx", email="edx@example.com")
+ UserFactory.create(username="10228945687", email="edx@example.com")
def test_get_block_id_from_event_referrer_with_error(self):
- sample_event = {
- 'context': {
- 'referer': None
- }
- }
- self.assertEqual(get_block_id_from_event_referrer(sample_event['context']['referer']), None)
+ sample_event = {"context": {"referer": None}}
+ self.assertEqual(get_block_id_from_event_referrer(sample_event["context"]["referer"]), None)
def test_get_user_email(self):
- with patch('event_routing_backends.helpers.get_potentially_retired_user_by_username') as mock_pr_user:
+ with patch("event_routing_backends.helpers.get_potentially_retired_user_by_username") as mock_pr_user:
mock_pr_user.return_value = None
- email = get_user_email('unknown')
- self.assertEqual(email, 'unknown@example.com')
- with patch('event_routing_backends.helpers.get_potentially_retired_user_by_username') as mock_pr_user:
- mock_pr_user.side_effect = Exception('User not found')
- email = get_user_email('unknown')
- self.assertEqual(email, 'unknown@example.com')
- email = get_user_email('edx')
- self.assertEqual(email, 'edx@example.com')
-
- @patch('event_routing_backends.helpers.ExternalId')
+ email = get_user_email("unknown")
+ self.assertEqual(email, "unknown@example.com")
+ with patch("event_routing_backends.helpers.get_potentially_retired_user_by_username") as mock_pr_user:
+ mock_pr_user.side_effect = Exception("User not found")
+ email = get_user_email("unknown")
+ self.assertEqual(email, "unknown@example.com")
+ email = get_user_email("edx")
+ self.assertEqual(email, "edx@example.com")
+
+ @patch("event_routing_backends.helpers.ExternalId")
def test_get_anonymous_user_id_with_error(self, mocked_external_id):
mocked_external_id.add_new_user_id.return_value = (None, False)
# Test that a failure to add an external id raises an error
with self.assertRaises(ValueError):
- get_anonymous_user_id('edx2', 'XAPI')
+ get_anonymous_user_id("edx2", "XAPI")
# Test that an unknown user raises this error
with self.assertRaises(ValueError):
- get_anonymous_user_id('12345678', 'XAPI')
+ get_anonymous_user_id("12345678", "XAPI")
def test_get_uuid5(self):
- actor = '''{
+ actor = """{
"objectType": "Agent",
"mbox": "mailto:edx@example.com"
- }'''
- verb = '''{
+ }"""
+ verb = """{
"id": "http://id.tincanapi.com/verb/unregistered",
"display": {
"en": "unregistered"
- }'''
- timestamp = '2023-05-09T06:36:11.256Z'
- name = f'{actor}-{timestamp}'
+ }"""
+ timestamp = "2023-05-09T06:36:11.256Z"
+ name = f"{actor}-{timestamp}"
uuid_1 = get_uuid5(verb, name)
uuid_2 = get_uuid5(verb, name)
self.assertEqual(uuid_1, uuid_2)
- another_actor = '''{
+ another_actor = """{
"objectType": "Agent",
"mbox": "mailto:test@example.com"
- }'''
- name = f'{another_actor}-{timestamp}'
+ }"""
+ name = f"{another_actor}-{timestamp}"
uuid_3 = get_uuid5(verb, name)
self.assertNotEqual(uuid_1, uuid_3)
- @patch('event_routing_backends.helpers.get_course_overviews')
+ @patch("event_routing_backends.helpers.get_course_overviews")
def test_get_course_from_id_unknown_course(self, mock_get_course_overviews):
mock_get_course_overviews.return_value = []
with self.assertRaises(ValueError):
@@ -101,7 +98,7 @@ def test_get_user_by_username(self, username):
self.assertEqual(username, user.username)
def test_get_user_by_id(self):
- """ Test that the method get_user returns the right user based on the user id.
+ """Test that the method get_user returns the right user based on the user id.
Expected behavior:
- Returned user is the edx_user
@@ -118,9 +115,9 @@ def test_get_user_priority(self):
Expected behavior:
- Returned user corresponds to the id.
"""
- right_user = UserFactory.create(username='testing', email='testing@example.com')
+ right_user = UserFactory.create(username="testing", email="testing@example.com")
# Create user with the previous user id as username.
- UserFactory.create(username=right_user.id, email='wrong-testing@example.com')
+ UserFactory.create(username=right_user.id, email="wrong-testing@example.com")
user = get_user(str(right_user.id))
diff --git a/event_routing_backends/tests/test_mixin.py b/event_routing_backends/tests/test_mixin.py
index b7624b59..411f7d2c 100644
--- a/event_routing_backends/tests/test_mixin.py
+++ b/event_routing_backends/tests/test_mixin.py
@@ -1,6 +1,7 @@
"""
Mixin for testing transformers for all of the currently supported events
"""
+
from event_routing_backends.tests.factories import RouterConfigurationFactory
@@ -9,7 +10,7 @@ class RouterTestMixin:
Test `Router` Mixin
"""
- def create_router_configuration(self, config_fixture, backend_name='first'):
+ def create_router_configuration(self, config_fixture, backend_name="first"):
"""
Return RouterConfigurationFactory object for given configurations and backend name.
@@ -24,6 +25,6 @@ def create_router_configuration(self, config_fixture, backend_name='first'):
return RouterConfigurationFactory(
configurations=config_fixture,
enabled=True,
- route_url='http://test2.com',
- backend_name=backend_name
+ route_url="http://test2.com",
+ backend_name=backend_name,
)
diff --git a/event_routing_backends/tests/test_models.py b/event_routing_backends/tests/test_models.py
index a3d8e5c1..5757fb6c 100644
--- a/event_routing_backends/tests/test_models.py
+++ b/event_routing_backends/tests/test_models.py
@@ -1,6 +1,7 @@
"""
Test the django models
"""
+
import ddt
from django.test import TestCase
from edx_django_utils.cache.utils import TieredCache
@@ -10,31 +11,24 @@
from event_routing_backends.tests.test_mixin import RouterTestMixin
ROUTER_CONFIG_FIXTURE = [
- {
- 'match_params': {
- 'context.org_id': 'abc',
- 'name': None
- },
- 'host_configurations': {
- 'url': 'http://test1.com',
- 'headers': {
- 'authorization': 'Token test'
- }
- }
- },
- {
- 'match_params': {
- 'context.org_id': 'test',
- 'name': ['problem_check', 'showanswer', 'stop_video']
- },
- 'host_configurations': {
- 'url': 'http://test1.com',
- 'headers': {
- 'authorization': 'Token test'
- }
- }
- }
- ]
+ {
+ "match_params": {"context.org_id": "abc", "name": None},
+ "host_configurations": {
+ "url": "http://test1.com",
+ "headers": {"authorization": "Token test"},
+ },
+ },
+ {
+ "match_params": {
+ "context.org_id": "test",
+ "name": ["problem_check", "showanswer", "stop_video"],
+ },
+ "host_configurations": {
+ "url": "http://test1.com",
+ "headers": {"authorization": "Token test"},
+ },
+ },
+]
@ddt.ddt
@@ -48,59 +42,62 @@ def test_str_method(self):
def test_enabled_router_is_returned(self):
first_router = RouterConfigurationFactory(
- configurations='{}',
+ configurations="{}",
enabled=True,
- route_url='http://test2.com',
- backend_name='first'
+ route_url="http://test2.com",
+ backend_name="first",
)
second_router = RouterConfigurationFactory(
- configurations='{}',
+ configurations="{}",
enabled=False,
- route_url='http://test3.com',
- backend_name='second'
+ route_url="http://test3.com",
+ backend_name="second",
)
- self.assertEqual(RouterConfiguration.get_enabled_routers('first')[0], first_router)
- self.assertEqual(RouterConfiguration.get_enabled_routers('second'), None)
+ self.assertEqual(RouterConfiguration.get_enabled_routers("first")[0], first_router)
+ self.assertEqual(RouterConfiguration.get_enabled_routers("second"), None)
second_router.enabled = True
second_router.save()
TieredCache.dangerous_clear_all_tiers()
- self.assertEqual(RouterConfiguration.get_enabled_routers('second')[0], second_router)
+ self.assertEqual(RouterConfiguration.get_enabled_routers("second")[0], second_router)
@ddt.data(
- ({'context.org_id': 'test'}, True),
- ({'non_existing.id.value': 'test'}, False),
- ({'context.org_id': 'abc', 'name': None}, False),
- ({'context.org_id': 'test', 'name': ['problem_check', 'showanswer', 'stop_video']}, True),
- ({'context.org_id': 'test', 'name': [None]}, False),
- ({'context.org_id': 'abc', 'name': 'problem_check'}, False),
- ({'context.org_id': 'test', 'name': 'problem_check'}, True),
- ({"course_id": r"^.*course-v.:edX\+.*\+2021.*$", "name": "problem_check"}, True),
- ({'context.org_id': 'test', "name": ["^problem.*", "video"]}, True),
+ ({"context.org_id": "test"}, True),
+ ({"non_existing.id.value": "test"}, False),
+ ({"context.org_id": "abc", "name": None}, False),
+ (
+ {
+ "context.org_id": "test",
+ "name": ["problem_check", "showanswer", "stop_video"],
+ },
+ True,
+ ),
+ ({"context.org_id": "test", "name": [None]}, False),
+ ({"context.org_id": "abc", "name": "problem_check"}, False),
+ ({"context.org_id": "test", "name": "problem_check"}, True),
+ (
+ {"course_id": r"^.*course-v.:edX\+.*\+2021.*$", "name": "problem_check"},
+ True,
+ ),
+ ({"context.org_id": "test", "name": ["^problem.*", "video"]}, True),
)
@ddt.unpack
def test_allowed_hosts(self, match_params, found):
config_fixture = {
- 'match_params': match_params,
- 'host_configurations': {
- 'url': 'http://test1.com',
- 'headers': {
- 'authorization': 'Token test'
- }
- }
- }
+ "match_params": match_params,
+ "host_configurations": {
+ "url": "http://test1.com",
+ "headers": {"authorization": "Token test"},
+ },
+ }
original_event = {
- 'course_id': 'course-v1:edX+E2E+2021+course',
- 'name': 'problem_check',
- 'context': {
- 'org_id': 'test'
- },
- 'data': {
- 'id': 'test_id'
- }
+ "course_id": "course-v1:edX+E2E+2021+course",
+ "name": "problem_check",
+ "context": {"org_id": "test"},
+ "data": {"id": "test_id"},
}
- router = self.create_router_configuration(config_fixture, 'first')
+ router = self.create_router_configuration(config_fixture, "first")
host = router.get_allowed_host(original_event)
if found:
@@ -110,40 +107,42 @@ def test_allowed_hosts(self, match_params, found):
def test_model_cache(self):
test_cache_router = RouterConfigurationFactory(
- configurations='{}',
+ configurations="{}",
enabled=True,
- route_url='http://test2.com',
- backend_name='test_cache'
+ route_url="http://test2.com",
+ backend_name="test_cache",
)
- self.assertEqual(RouterConfiguration.get_enabled_routers('test_cache')[0], test_cache_router)
+ self.assertEqual(RouterConfiguration.get_enabled_routers("test_cache")[0], test_cache_router)
- test_cache_router.route_url = 'http://test3.com'
+ test_cache_router.route_url = "http://test3.com"
test_cache_router.save()
- self.assertNotEqual(RouterConfiguration.get_enabled_routers('test_cache')[0], test_cache_router)
+ self.assertNotEqual(RouterConfiguration.get_enabled_routers("test_cache")[0], test_cache_router)
def test_multiple_routers_of_backend(self):
- backend_name = 'multiple_routers_test'
+ backend_name = "multiple_routers_test"
test_cache_router = RouterConfigurationFactory(
- configurations='{}',
+ configurations="{}",
enabled=True,
- route_url='http://test2.com',
- backend_name=backend_name
+ route_url="http://test2.com",
+ backend_name=backend_name,
)
test_cache_router1 = RouterConfigurationFactory(
- configurations='{}',
+ configurations="{}",
enabled=True,
- route_url='http://test1.com',
- backend_name=backend_name
+ route_url="http://test1.com",
+ backend_name=backend_name,
)
- self.assertEqual(list(RouterConfiguration.get_enabled_routers(backend_name)),
- [test_cache_router1, test_cache_router])
+ self.assertEqual(
+ list(RouterConfiguration.get_enabled_routers(backend_name)),
+ [test_cache_router1, test_cache_router],
+ )
def test_empty_backend(self):
- self.assertEqual(RouterConfiguration.get_enabled_routers(''), None)
+ self.assertEqual(RouterConfiguration.get_enabled_routers(""), None)
def test_empty_configurations(self):
- router = self.create_router_configuration(None, 'first')
+ router = self.create_router_configuration(None, "first")
host = router.get_allowed_host({})
- self.assertEqual(host, {'host_configurations': {}})
+ self.assertEqual(host, {"host_configurations": {}})
diff --git a/event_routing_backends/tests/test_settings.py b/event_routing_backends/tests/test_settings.py
index f6957738..3cdd2622 100644
--- a/event_routing_backends/tests/test_settings.py
+++ b/event_routing_backends/tests/test_settings.py
@@ -21,29 +21,37 @@ def test_common_settings(self):
"""
common_settings.plugin_settings(settings)
- self.assertIn('event_transformer', settings.EVENT_TRACKING_BACKENDS)
- self.assertIn('OPTIONS', settings.EVENT_TRACKING_BACKENDS["event_transformer"])
+ self.assertIn("event_transformer", settings.EVENT_TRACKING_BACKENDS)
+ self.assertIn("OPTIONS", settings.EVENT_TRACKING_BACKENDS["event_transformer"])
transformer_options = settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]
self.assertEqual(
set(
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS +
- settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS,
+ settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS
+ + settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS,
),
- transformer_options["processors"][0]["OPTIONS"]["whitelist"]
+ transformer_options["processors"][0]["OPTIONS"]["whitelist"],
)
self.assertIn("xapi", transformer_options["backends"])
self.assertEqual(
settings.EVENT_TRACKING_BACKENDS_ALLOWED_XAPI_EVENTS,
- transformer_options["backends"]["xapi"]["OPTIONS"]["processors"][0]["OPTIONS"]["whitelist"])
+ transformer_options["backends"]["xapi"]["OPTIONS"]["processors"][0]["OPTIONS"]["whitelist"],
+ )
- self.assertIn("caliper", settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]["backends"])
+ self.assertIn(
+ "caliper",
+ settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]["backends"],
+ )
self.assertEqual(
settings.EVENT_TRACKING_BACKENDS_ALLOWED_CALIPER_EVENTS,
- transformer_options["backends"]["caliper"]["OPTIONS"]["processors"][0]["OPTIONS"]["whitelist"])
+ transformer_options["backends"]["caliper"]["OPTIONS"]["processors"][0]["OPTIONS"]["whitelist"],
+ )
- self.assertIn('edx.course.enrollment.activated', settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS)
+ self.assertIn(
+ "edx.course.enrollment.activated",
+ settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS,
+ )
self.assertFalse(settings.CALIPER_EVENTS_ENABLED)
self.assertFalse(settings.CALIPER_EVENT_LOGGING_ENABLED)
self.assertTrue(settings.XAPI_EVENTS_ENABLED)
@@ -54,10 +62,19 @@ def test_devstack_settings(self):
Test devstack settings
"""
devstack_settings.plugin_settings(settings)
- self.assertIn('event_transformer', settings.EVENT_TRACKING_BACKENDS)
- self.assertIn('xapi', settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]["backends"])
- self.assertIn('caliper', settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]["backends"])
- self.assertIn('edx.course.enrollment.deactivated', settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS)
+ self.assertIn("event_transformer", settings.EVENT_TRACKING_BACKENDS)
+ self.assertIn(
+ "xapi",
+ settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]["backends"],
+ )
+ self.assertIn(
+ "caliper",
+ settings.EVENT_TRACKING_BACKENDS["event_transformer"]["OPTIONS"]["backends"],
+ )
+ self.assertIn(
+ "edx.course.enrollment.deactivated",
+ settings.EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS,
+ )
self.assertFalse(settings.CALIPER_EVENTS_ENABLED)
self.assertFalse(settings.CALIPER_EVENT_LOGGING_ENABLED)
self.assertTrue(settings.XAPI_EVENTS_ENABLED)
@@ -68,12 +85,12 @@ def test_production_settings(self):
Test production settings
"""
settings.ENV_TOKENS = {
- 'EVENT_TRACKING_BACKENDS': None,
- 'CALIPER_EVENTS_ENABLED': False,
- 'CALIPER_EVENT_LOGGING_ENABLED': True,
- 'XAPI_EVENTS_ENABLED': False,
- 'XAPI_EVENT_LOGGING_ENABLED': True,
- 'EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS': [],
+ "EVENT_TRACKING_BACKENDS": None,
+ "CALIPER_EVENTS_ENABLED": False,
+ "CALIPER_EVENT_LOGGING_ENABLED": True,
+ "XAPI_EVENTS_ENABLED": False,
+ "XAPI_EVENT_LOGGING_ENABLED": True,
+ "EVENT_TRACKING_BACKENDS_BUSINESS_CRITICAL_EVENTS": [],
}
production_setttings.plugin_settings(settings)
self.assertIsNone(settings.EVENT_TRACKING_BACKENDS)
diff --git a/event_routing_backends/utils/fields.py b/event_routing_backends/utils/fields.py
index 6f30ef87..d9f1bb28 100644
--- a/event_routing_backends/utils/fields.py
+++ b/event_routing_backends/utils/fields.py
@@ -1,9 +1,10 @@
"""
Custom django model fields.
"""
+
from fernet_fields import EncryptedField
from jsonfield.fields import JSONField
class EncryptedJSONField(EncryptedField, JSONField):
- description = 'Field to store encrypted JSON data.'
+ description = "Field to store encrypted JSON data."
diff --git a/event_routing_backends/utils/http_client.py b/event_routing_backends/utils/http_client.py
index 337fdaf2..4a054703 100644
--- a/event_routing_backends/utils/http_client.py
+++ b/event_routing_backends/utils/http_client.py
@@ -1,6 +1,7 @@
"""
A generic HTTP Client.
"""
+
from logging import getLogger
import requests
@@ -18,13 +19,13 @@ class HttpClient:
def __init__( # pylint: disable=too-many-positional-arguments
self,
- url='',
- auth_scheme='',
- auth_key='',
+ url="",
+ auth_scheme="",
+ auth_key="",
headers=None,
username=None,
password=None,
- **options
+ **options,
):
"""
Initialize the client with provided configurations.
@@ -53,9 +54,7 @@ def get_auth_header(self):
dict
"""
if self.AUTH_SCHEME == RouterConfiguration.AUTH_BEARER:
- return {
- 'Authorization': f'{self.AUTH_SCHEME} {self.AUTH_KEY}'
- }
+ return {"Authorization": f"{self.AUTH_SCHEME} {self.AUTH_KEY}"}
return {}
def bulk_send(self, events):
@@ -72,26 +71,30 @@ def bulk_send(self, events):
headers.update(self.get_auth_header())
options = self.options.copy()
- options.update({
- 'url': self.URL,
- 'json': events,
- 'headers': headers,
- })
+ options.update(
+ {
+ "url": self.URL,
+ "json": events,
+ "headers": headers,
+ }
+ )
if self.AUTH_SCHEME == RouterConfiguration.AUTH_BASIC:
- options.update({'auth': (self.username, self.password)})
- logger.debug('Sending caliper version of {} edx events to {}'.format(len(events), self.URL))
- response = requests.post(**options) # pylint: disable=missing-timeout
+ options.update({"auth": (self.username, self.password)})
+ logger.debug("Sending caliper version of {} edx events to {}".format(len(events), self.URL))
+ response = requests.post(**options) # pylint: disable=missing-timeout
if not 200 <= response.status_code < 300:
logger.warning(
- '{} request failed for sending Caliper version of {} edx events to {}.Response code: {}. '
- 'Response: '
- '{}'.format(
+ "{} request failed for sending Caliper version of {} edx events to {}.Response code: {}. "
+ "Response: "
+ "{}".format(
response.request.method,
- len(events), self.URL,
+ len(events),
+ self.URL,
response.status_code,
- response.text
- ))
+ response.text,
+ )
+ )
raise EventNotDispatched
def send(self, event, event_name):
@@ -109,23 +112,27 @@ def send(self, event, event_name):
headers.update(self.get_auth_header())
options = self.options.copy()
- options.update({
- 'url': self.URL,
- 'json': event,
- 'headers': headers,
- })
+ options.update(
+ {
+ "url": self.URL,
+ "json": event,
+ "headers": headers,
+ }
+ )
if self.AUTH_SCHEME == RouterConfiguration.AUTH_BASIC:
- options.update({'auth': (self.username, self.password)})
+ options.update({"auth": (self.username, self.password)})
logger.debug('Sending caliper version of edx event "{}" to {}'.format(event_name, self.URL))
- response = requests.post(**options) # pylint: disable=missing-timeout
+ response = requests.post(**options) # pylint: disable=missing-timeout
if not 200 <= response.status_code < 300:
logger.warning(
'{} request failed for sending Caliper version of edx event "{}" to {}.Response code: {}. Response: '
- '{}'.format(
+ "{}".format(
response.request.method,
- event_name, self.URL,
+ event_name,
+ self.URL,
response.status_code,
- response.text
- ))
+ response.text,
+ )
+ )
raise EventNotDispatched
diff --git a/event_routing_backends/utils/settings.py b/event_routing_backends/utils/settings.py
index 6161c6ca..c4f10ea3 100644
--- a/event_routing_backends/utils/settings.py
+++ b/event_routing_backends/utils/settings.py
@@ -14,66 +14,56 @@ def event_tracking_backends_config(settings, allowed_xapi_events: List[str], all
all_allowed_events = set(allowed_xapi_events + allowed_caliper_events)
return {
- 'event_transformer': {
- 'ENGINE': 'eventtracking.backends.async_routing.AsyncRoutingBackend',
- 'OPTIONS': {
- 'backend_name': 'event_transformer',
- 'processors': [
+ "event_transformer": {
+ "ENGINE": "eventtracking.backends.async_routing.AsyncRoutingBackend",
+ "OPTIONS": {
+ "backend_name": "event_transformer",
+ "processors": [
{
- 'ENGINE': 'eventtracking.processors.whitelist.NameWhitelistProcessor',
- 'OPTIONS': {
- 'whitelist': all_allowed_events
- }
+ "ENGINE": "eventtracking.processors.whitelist.NameWhitelistProcessor",
+ "OPTIONS": {"whitelist": all_allowed_events},
},
],
- 'backends': {
- 'xapi': {
- 'ENGINE': 'event_routing_backends.backends.async_events_router.AsyncEventsRouter',
- 'OPTIONS': {
- 'processors': [
+ "backends": {
+ "xapi": {
+ "ENGINE": "event_routing_backends.backends.async_events_router.AsyncEventsRouter",
+ "OPTIONS": {
+ "processors": [
{
- 'ENGINE': 'eventtracking.processors.whitelist.NameWhitelistProcessor',
- 'OPTIONS': {
- 'whitelist': allowed_xapi_events
- }
+ "ENGINE": "eventtracking.processors.whitelist.NameWhitelistProcessor",
+ "OPTIONS": {"whitelist": allowed_xapi_events},
},
{
- 'ENGINE':
- 'event_routing_backends.processors.xapi.transformer_processor.XApiProcessor',
- 'OPTIONS': {}
- }
+ "ENGINE": "event_routing_backends.processors."
+ "xapi.transformer_processor.XApiProcessor",
+ "OPTIONS": {},
+ },
],
- 'backend_name': 'xapi',
- }
+ "backend_name": "xapi",
+ },
},
"caliper": {
- 'ENGINE': 'event_routing_backends.backends.async_events_router.AsyncEventsRouter',
+ "ENGINE": "event_routing_backends.backends.async_events_router.AsyncEventsRouter",
"OPTIONS": {
"processors": [
{
"ENGINE": "eventtracking.processors.whitelist.NameWhitelistProcessor",
- "OPTIONS": {
- "whitelist": allowed_caliper_events
- }
+ "OPTIONS": {"whitelist": allowed_caliper_events},
},
{
- "ENGINE":
- "event_routing_backends.processors."
- "caliper.transformer_processor.CaliperProcessor",
- "OPTIONS": {}
+ "ENGINE": "event_routing_backends.processors."
+ "caliper.transformer_processor.CaliperProcessor",
+ "OPTIONS": {},
},
{
- "ENGINE":
- "event_routing_backends.processors."
- "caliper.envelope_processor.CaliperEnvelopeProcessor",
- "OPTIONS": {
- "sensor_id": settings.LMS_ROOT_URL
- }
- }
+ "ENGINE": "event_routing_backends.processors."
+ "caliper.envelope_processor.CaliperEnvelopeProcessor",
+ "OPTIONS": {"sensor_id": settings.LMS_ROOT_URL},
+ },
],
- "backend_name": "caliper"
- }
- }
+ "backend_name": "caliper",
+ },
+ },
},
},
}
diff --git a/event_routing_backends/utils/xapi_lrs_client.py b/event_routing_backends/utils/xapi_lrs_client.py
index 561ad596..c00e9004 100644
--- a/event_routing_backends/utils/xapi_lrs_client.py
+++ b/event_routing_backends/utils/xapi_lrs_client.py
@@ -1,6 +1,7 @@
"""
An LRS client for xAPI stores.
"""
+
from json.decoder import JSONDecodeError
from logging import getLogger
@@ -24,7 +25,7 @@ def __init__( # pylint: disable=too-many-positional-arguments
auth_scheme=None,
auth_key=None,
username=None,
- password=None
+ password=None,
):
"""
Initialize the client with provided configurations.
@@ -48,13 +49,13 @@ def __init__( # pylint: disable=too-many-positional-arguments
version=self.VERSION,
endpoint=self.URL,
username=username,
- password=password
+ password=password,
)
else:
self.lrs_client = RemoteLRS(
version=self.VERSION,
endpoint=self.URL,
- auth=self.get_auth_header_value()
+ auth=self.get_auth_header_value(),
)
def get_auth_header_value(self):
@@ -65,7 +66,7 @@ def get_auth_header_value(self):
str
"""
if self.AUTH_SCHEME and self.AUTH_SCHEME == RouterConfiguration.AUTH_BEARER and self.AUTH_KEY:
- return f'{self.AUTH_SCHEME} {self.AUTH_KEY}'
+ return f"{self.AUTH_SCHEME} {self.AUTH_KEY}"
return None
@@ -79,7 +80,7 @@ def bulk_send(self, statement_data):
Returns:
requests.Response object
"""
- logger.debug('Sending {} xAPI statements to {}'.format(len(statement_data), self.URL))
+ logger.debug("Sending {} xAPI statements to {}".format(len(statement_data), self.URL))
response = None
try:
@@ -97,9 +98,15 @@ def bulk_send(self, statement_data):
logger.warning(f"Duplicate event id found in: {response.request.content}")
else:
logger.warning(f"Failed request: {response.request.content}")
- logger.warning('{} request failed for sending xAPI statement of edx events to {}. '
- 'Response code: {}. Response: {}'.format(response.request.method, self.URL,
- response.response.code, response.data))
+ logger.warning(
+ "{} request failed for sending xAPI statement of edx events to {}. "
+ "Response code: {}. Response: {}".format(
+ response.request.method,
+ self.URL,
+ response.response.code,
+ response.data,
+ )
+ )
raise EventNotDispatched
def send(self, statement_data, event_name):
@@ -121,9 +128,16 @@ def send(self, statement_data, event_name):
# when the event id already exists, causing many retries that will never
# succeed, so we can eat this here.
if response.response.status == 409:
- logger.info('Event {} received a 409 error indicating the event id already exists.'.format(event_name))
+ logger.info("Event {} received a 409 error indicating the event id already exists.".format(event_name))
else:
- logger.warning('{} request failed for sending xAPI statement of edx event "{}" to {}. '
- 'Response code: {}. Response: {}'.format(response.request.method, event_name, self.URL,
- response.response.code, response.data))
+ logger.warning(
+ '{} request failed for sending xAPI statement of edx event "{}" to {}. '
+ "Response code: {}. Response: {}".format(
+ response.request.method,
+ event_name,
+ self.URL,
+ response.response.code,
+ response.data,
+ )
+ )
raise EventNotDispatched
diff --git a/manage.py b/manage.py
index e1d35a46..2cad2941 100644
--- a/manage.py
+++ b/manage.py
@@ -8,8 +8,8 @@
PWD = os.path.abspath(os.path.dirname(__file__))
-if __name__ == '__main__':
- os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_settings')
+if __name__ == "__main__":
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
sys.path.append(PWD)
try:
from django.core.management import execute_from_command_line
diff --git a/setup.cfg b/setup.cfg
index 5fdd2a45..1e323d7c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -11,6 +11,7 @@ indent = ' '
line_length = 120
multi_line_output = 3
skip_glob = migrations
+profile = black
[wheel]
universal = 1
diff --git a/setup.py b/setup.py
index 4f068d2c..de480d31 100644
--- a/setup.py
+++ b/setup.py
@@ -18,12 +18,11 @@ def get_version(*file_paths):
version string
"""
filename = os.path.join(os.path.dirname(__file__), *file_paths)
- version_file = open(filename, encoding='utf-8').read() # pylint: disable=consider-using-with
- version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
- version_file, re.M)
+ version_file = open(filename, encoding="utf-8").read() # pylint: disable=consider-using-with
+ version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
- raise RuntimeError('Unable to find version string.')
+ raise RuntimeError("Unable to find version string.")
def load_requirements(*requirements_paths):
@@ -48,14 +47,14 @@ def check_name_consistent(package):
with extras we don't constrain it without mentioning the extras (since
that too would interfere with matching constraints.)
"""
- canonical = package.lower().replace('_', '-').split('[')[0]
+ canonical = package.lower().replace("_", "-").split("[")[0]
seen_spelling = by_canonical_name.get(canonical)
if seen_spelling is None:
by_canonical_name[canonical] = package
elif seen_spelling != package:
raise Exception(
f'Encountered both "{seen_spelling}" and "{package}" in requirements '
- 'and constraints files; please use just one or the other.'
+ "and constraints files; please use just one or the other."
)
requirements = {}
@@ -65,8 +64,7 @@ def check_name_consistent(package):
re_package_name_base_chars = r"a-zA-Z0-9\-_." # chars allowed in base package name
# Two groups: name[maybe,extras], and optionally a constraint
requirement_line_regex = re.compile(
- r"([%s]+(?:\[[%s,\s]+\])?)([<>=][^#\s]+)?"
- % (re_package_name_base_chars, re_package_name_base_chars)
+ r"([%s]+(?:\[[%s,\s]+\])?)([<>=][^#\s]+)?" % (re_package_name_base_chars, re_package_name_base_chars)
)
def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present):
@@ -79,10 +77,12 @@ def add_version_constraint_or_raise(current_line, current_requirements, add_if_n
# It's fine to add constraints to an unconstrained package,
# but raise an error if there are already constraints in place.
if existing_version_constraints and existing_version_constraints != version_constraints:
- raise BaseException(f'Multiple constraint definitions found for {package}:'
- f' "{existing_version_constraints}" and "{version_constraints}".'
- f'Combine constraints into one location with {package}'
- f'{existing_version_constraints},{version_constraints}.')
+ raise BaseException(
+ f"Multiple constraint definitions found for {package}:"
+ f' "{existing_version_constraints}" and "{version_constraints}".'
+ f"Combine constraints into one location with {package}"
+ f"{existing_version_constraints},{version_constraints}."
+ )
if add_if_not_present or package in current_requirements:
current_requirements[package] = version_constraints
@@ -93,8 +93,8 @@ def add_version_constraint_or_raise(current_line, current_requirements, add_if_n
for line in reqs:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, True)
- if line and line.startswith('-c') and not line.startswith('-c http'):
- constraint_files.add(os.path.dirname(path) + '/' + line.split('#')[0].replace('-c', '').strip())
+ if line and line.startswith("-c") and not line.startswith("-c http"):
+ constraint_files.add(os.path.dirname(path) + "/" + line.split("#")[0].replace("-c", "").strip())
# process constraint files: add constraints to existing requirements
for constraint_file in constraint_files:
@@ -118,50 +118,54 @@ def is_requirement(line):
"""
# UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why
- return line and line.strip() and not line.startswith(('-r', '#', '-e', 'git+', '-c'))
+ return line and line.strip() and not line.startswith(("-r", "#", "-e", "git+", "-c"))
-VERSION = get_version('event_routing_backends', '__init__.py')
+VERSION = get_version("event_routing_backends", "__init__.py")
-if sys.argv[-1] == 'tag':
+if sys.argv[-1] == "tag":
print("Tagging the version on github:")
os.system("git tag -a %s -m 'version %s'" % (VERSION, VERSION))
os.system("git push --tags")
sys.exit()
-README = open(os.path.join(os.path.dirname(__file__), 'README.rst'), # pylint: disable=consider-using-with
- encoding='utf-8').read()
-CHANGELOG = open(os.path.join(os.path.dirname(__file__), 'CHANGELOG.rst'), # pylint: disable=consider-using-with
- encoding='utf-8').read()
+README = open( # pylint: disable=consider-using-with
+ os.path.join(os.path.dirname(__file__), "README.rst"),
+ encoding="utf-8",
+).read()
+CHANGELOG = open( # pylint: disable=consider-using-with
+ os.path.join(os.path.dirname(__file__), "CHANGELOG.rst"),
+ encoding="utf-8",
+).read()
setup(
- name='edx-event-routing-backends',
+ name="edx-event-routing-backends",
version=VERSION,
description="""Various backends for receiving edX LMS events.""",
- long_description=README + '\n\n' + CHANGELOG,
- long_description_content_type='text/x-rst',
- author='edX',
- author_email='oscm@edx.org',
- url='https://github.com/openedx/event-routing-backends',
+ long_description=README + "\n\n" + CHANGELOG,
+ long_description_content_type="text/x-rst",
+ author="edX",
+ author_email="oscm@edx.org",
+ url="https://github.com/openedx/event-routing-backends",
packages=[
- 'event_routing_backends',
+ "event_routing_backends",
],
include_package_data=True,
- install_requires=load_requirements('requirements/base.in'),
+ install_requires=load_requirements("requirements/base.in"),
python_requires=">=3.11",
license="AGPL 3.0",
zip_safe=False,
- keywords='Python edx',
+ keywords="Python edx",
classifiers=[
- 'Development Status :: 3 - Alpha',
- 'Framework :: Django',
- 'Framework :: Django :: 4.2',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
- 'Natural Language :: English',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.11',
- 'Programming Language :: Python :: 3.12',
+ "Development Status :: 3 - Alpha",
+ "Framework :: Django",
+ "Framework :: Django :: 4.2",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
+ "Natural Language :: English",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
],
entry_points={
"lms.djangoapp": [
@@ -170,5 +174,5 @@ def is_requirement(line):
"cms.djangoapp": [
"event_routing_backends = event_routing_backends.apps:EventRoutingBackendsConfig",
],
- }
+ },
)
diff --git a/test_settings.py b/test_settings.py
index 8c1f586b..7551202b 100644
--- a/test_settings.py
+++ b/test_settings.py
@@ -18,35 +18,35 @@ def root(*args):
DATABASES = {
- 'default': {
- 'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': 'default.db',
- 'USER': '',
- 'PASSWORD': '',
- 'HOST': '',
- 'PORT': '',
+ "default": {
+ "ENGINE": "django.db.backends.sqlite3",
+ "NAME": "default.db",
+ "USER": "",
+ "PASSWORD": "",
+ "HOST": "",
+ "PORT": "",
}
}
INSTALLED_APPS = (
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'event_routing_backends',
- 'celery_utils',
+ "django.contrib.auth",
+ "django.contrib.contenttypes",
+ "event_routing_backends",
+ "celery_utils",
)
LOCALE_PATHS = [
- root('event_routing_backends', 'conf', 'locale'),
+ root("event_routing_backends", "conf", "locale"),
]
-SECRET_KEY = 'insecure-secret-key'
-LMS_ROOT_URL = 'http://localhost:18000'
+SECRET_KEY = "insecure-secret-key"
+LMS_ROOT_URL = "http://localhost:18000"
CELERY_ALWAYS_EAGER = True
XAPI_EVENTS_ENABLED = True
XAPI_EVENT_LOGGING_ENABLED = False
RUNNING_WITH_TEST_SETTINGS = True
EVENT_TRACKING_BACKENDS = {}
-XAPI_AGENT_IFI_TYPE = 'external_id'
+XAPI_AGENT_IFI_TYPE = "external_id"
EVENT_ROUTING_BACKEND_BATCHING_ENABLED = False
EVENT_ROUTING_BACKEND_BATCH_INTERVAL = 100
EVENT_TRACKING_ENABLED = True
diff --git a/test_utils/__init__.py b/test_utils/__init__.py
index 2d809cd0..6a8f49d5 100644
--- a/test_utils/__init__.py
+++ b/test_utils/__init__.py
@@ -8,6 +8,7 @@
So this package is the place to put them.
"""
+
import sys
from unittest import mock
@@ -18,36 +19,39 @@ def _mock_third_party_modules():
"""
# mock external_user_ids module
external_id = mock.MagicMock()
- external_id.external_user_id = '32e08e30-f8ae-4ce2-94a8-c2bfe38a70cb'
+ external_id.external_user_id = "32e08e30-f8ae-4ce2-94a8-c2bfe38a70cb"
external_user_ids_module = mock.MagicMock()
- external_user_ids_module.ExternalId.add_new_user_id.return_value = (external_id, True)
- external_user_ids_module.ExternalIdType.XAPI = 'xapi'
- sys.modules['openedx.core.djangoapps.external_user_ids.models'] = external_user_ids_module
+ external_user_ids_module.ExternalId.add_new_user_id.return_value = (
+ external_id,
+ True,
+ )
+ external_user_ids_module.ExternalIdType.XAPI = "xapi"
+ sys.modules["openedx.core.djangoapps.external_user_ids.models"] = external_user_ids_module
# mock course
mocked_course = {
- 'display_name': 'Demonstration Course',
+ "display_name": "Demonstration Course",
}
mocked_courses = mock.MagicMock()
mocked_courses.get_course_overviews.return_value = [mocked_course]
- sys.modules['openedx.core.djangoapps.content.course_overviews.api'] = mocked_courses
+ sys.modules["openedx.core.djangoapps.content.course_overviews.api"] = mocked_courses
# mock opaque keys module
mocked_keys = mock.MagicMock()
- sys.modules['opaque_keys.edx.keys'] = mocked_keys
+ sys.modules["opaque_keys.edx.keys"] = mocked_keys
# mock retired user
mocked_user = mock.MagicMock()
- mocked_user.username = 'edx_retired'
- mocked_user.email = 'edx_retired@example.com'
+ mocked_user.username = "edx_retired"
+ mocked_user.email = "edx_retired@example.com"
mocked_models = mock.MagicMock()
mocked_models.get_potentially_retired_user_by_username.return_value = mocked_user
- sys.modules['common.djangoapps.student.models'] = mocked_models
+ sys.modules["common.djangoapps.student.models"] = mocked_models
def mocked_course_reverse(_, kwargs):
"""
Return the reverse method to return course root URL.
"""
- return '/courses/{}/'.format(kwargs.get('course_id'))
+ return "/courses/{}/".format(kwargs.get("course_id"))
diff --git a/tox.ini b/tox.ini
index 1903935c..6459d0fb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,6 +6,7 @@ ignore = D001
[pycodestyle]
exclude = .git,.tox,migrations
+ignore = E203,E701,W503
max-line-length = 120
[pydocstyle]
@@ -66,6 +67,7 @@ allowlist_externals =
make
rm
touch
+ black
deps =
-r{toxinidir}/requirements/quality.txt
commands =
@@ -73,6 +75,7 @@ commands =
pycodestyle event_routing_backends manage.py setup.py
pydocstyle event_routing_backends manage.py setup.py
isort --check-only --diff test_utils event_routing_backends manage.py setup.py test_settings.py
+ black --check --diff test_utils event_routing_backends manage.py setup.py test_settings.py
make selfcheck
[testenv:pii_check]