diff --git a/kolibri/core/auth/constants/commands_errors.py b/kolibri/core/auth/constants/commands_errors.py new file mode 100644 index 00000000000..ec447ee42f1 --- /dev/null +++ b/kolibri/core/auth/constants/commands_errors.py @@ -0,0 +1,30 @@ +from django.utils.translation import gettext_lazy as _ + +# Error messages ### +UNEXPECTED_EXCEPTION = 0 +TOO_LONG = 1 +INVALID = 2 +DUPLICATED_USERNAME = 3 +INVALID_USERNAME = 4 +REQUIRED_COLUMN = 5 +INVALID_HEADER = 6 +NO_FACILITY = 7 +FILE_READ_ERROR = 8 +FILE_WRITE_ERROR = 9 + +MESSAGES = { + UNEXPECTED_EXCEPTION: _("Unexpected exception [{}]: {}"), + TOO_LONG: _("'{}' is too long"), + INVALID: _("Not a valid '{}'"), + DUPLICATED_USERNAME: _("Duplicated Username"), + INVALID_USERNAME: _( + "Username only can contain characters, numbers and underscores" + ), + REQUIRED_COLUMN: _("The column '{}' is required"), + INVALID_HEADER: _("Mix of valid and/or invalid header labels found in first row"), + NO_FACILITY: _( + "No default facility exists, please make sure to provision this device before running this command" + ), + FILE_READ_ERROR: _("Error trying to read csv file: {}"), + FILE_WRITE_ERROR: _("Error trying to write csv file: {}"), +} diff --git a/kolibri/core/logger/api_urls.py b/kolibri/core/logger/api_urls.py index 545a2cb4422..2d5b722b4b0 100644 --- a/kolibri/core/logger/api_urls.py +++ b/kolibri/core/logger/api_urls.py @@ -29,14 +29,18 @@ router.urls.append( url( - r"^downloadcsvfile/(?P.*)/$", + r"^downloadcsvfile/(?P.*)/(?P.*)/$", download_csv_file, name="download_csv_file", ) ) router.urls.append( - url(r"^exportedlogsinfo/$", exported_logs_info, name="exportedlogsinfo") + url( + r"^exportedlogsinfo/(?P.*)/(?P.*)/$", + exported_logs_info, + name="exportedlogsinfo", + ) ) urlpatterns = router.urls diff --git a/kolibri/core/logger/csv_export.py b/kolibri/core/logger/csv_export.py index b834ecefeaf..ef64a1fc31e 100644 --- a/kolibri/core/logger/csv_export.py +++ b/kolibri/core/logger/csv_export.py @@ -16,6 +16,7 @@ from .models import ContentSessionLog from .models import ContentSummaryLog +from kolibri.core.auth.models import Facility from kolibri.core.content.models import ChannelMetadata from kolibri.core.content.models import ContentNode from kolibri.utils import conf @@ -23,6 +24,11 @@ logger = logging.getLogger(__name__) +CSV_EXPORT_FILENAMES = { + "session": "{}_{}_content_session_logs.csv", + "summary": "{}_{}_content_summary_logs.csv", +} + def cache_channel_name(obj): channel_id = obj["channel_id"] @@ -89,7 +95,7 @@ def map_object(obj): classes_info = { "session": { "queryset": ContentSessionLog.objects.all(), - "filename": "content_session_logs.csv", + "filename": CSV_EXPORT_FILENAMES["session"], "db_columns": ( "user__username", "user__facility__name", @@ -104,7 +110,7 @@ def map_object(obj): }, "summary": { "queryset": ContentSummaryLog.objects.all(), - "filename": "content_summary_logs.csv", + "filename": CSV_EXPORT_FILENAMES["summary"], "db_columns": ( "user__username", "user__facility__name", @@ -121,7 +127,8 @@ def map_object(obj): } -def csv_file_generator(log_type, filepath, overwrite=False): +def csv_file_generator(facility, log_type, filepath, overwrite=False): + if log_type not in ("summary", "session"): raise ValueError( "Impossible to create a csv export file for {}".format(log_type) @@ -131,7 +138,7 @@ def csv_file_generator(log_type, filepath, overwrite=False): if not overwrite and os.path.exists(filepath): raise ValueError("{} already exists".format(filepath)) - queryset = log_info["queryset"] + queryset = log_info["queryset"].filter(dataset_id=facility.dataset_id) # Exclude completion timestamp for the sessionlog CSV header_labels = tuple( @@ -156,21 +163,19 @@ def csv_file_generator(log_type, filepath, overwrite=False): yield -def exported_logs_info(request): +def exported_logs_info(request, facility_id, facility): """ Get the last modification timestamp of the summary logs exported :returns: An object with the files informatin """ - logs_dir = os.path.join(conf.KOLIBRI_HOME, "log_export") csv_statuses = {} - csv_export_filenames = { - "session": "content_session_logs.csv", - "summary": "content_summary_logs.csv", - } - for log_type in csv_export_filenames.keys(): - log_path = os.path.join(logs_dir, csv_export_filenames[log_type]) + + for log_type in CSV_EXPORT_FILENAMES.keys(): + log_path = os.path.join( + logs_dir, CSV_EXPORT_FILENAMES[log_type].format(facility, facility_id[:4]) + ) if os.path.exists(log_path): csv_statuses[log_type] = os.path.getmtime(log_path) else: @@ -179,14 +184,18 @@ def exported_logs_info(request): return HttpResponse(json.dumps(csv_statuses), content_type="application/json") -def download_csv_file(request, log_type): - csv_export_filenames = { - "session": "content_session_logs.csv", - "summary": "content_summary_logs.csv", - } - if log_type in csv_export_filenames.keys(): +def download_csv_file(request, log_type, facility_id): + if facility_id: + facility_name = Facility.objects.get(pk=facility_id).name + else: + facility_name = request.user.facility.name + facility_id = request.user.facility.id + + if log_type in CSV_EXPORT_FILENAMES.keys(): filepath = os.path.join( - conf.KOLIBRI_HOME, "log_export", csv_export_filenames[log_type] + conf.KOLIBRI_HOME, + "log_export", + CSV_EXPORT_FILENAMES[log_type].format(facility_name, facility_id[:4]), ) else: filepath = None @@ -202,7 +211,7 @@ def download_csv_file(request, log_type): # set the content-disposition as attachment to force download response["Content-Disposition"] = "attachment; filename={}".format( - csv_export_filenames[log_type] + CSV_EXPORT_FILENAMES[log_type].format(facility_name, facility_id[:4]) ) # set the content-length to the file size diff --git a/kolibri/core/logger/management/commands/exportlogs.py b/kolibri/core/logger/management/commands/exportlogs.py index 4920e2938d8..2a2c2286eab 100644 --- a/kolibri/core/logger/management/commands/exportlogs.py +++ b/kolibri/core/logger/management/commands/exportlogs.py @@ -1,10 +1,19 @@ import logging +import ntpath import os -import sys +from django.conf import settings +from django.core.management.base import CommandError +from django.utils import translation + +from kolibri.core.auth.constants.commands_errors import FILE_WRITE_ERROR +from kolibri.core.auth.constants.commands_errors import MESSAGES +from kolibri.core.auth.constants.commands_errors import NO_FACILITY +from kolibri.core.auth.models import Facility from kolibri.core.logger.csv_export import classes_info from kolibri.core.logger.csv_export import csv_file_generator from kolibri.core.tasks.management.commands.base import AsyncCommand +from kolibri.core.tasks.utils import get_current_job logger = logging.getLogger(__name__) @@ -37,29 +46,76 @@ def add_arguments(self, parser): default=False, help="Allows overwritten of the exported file in case it exists", ) + parser.add_argument( + "--facility", + action="store", + type=str, + help="Facility id to import the users into", + ) + parser.add_argument( + "--locale", + action="store", + type=str, + default=None, + help="Code of the language for the messages to be translated", + ) + + def get_facility(self, options): + if options["facility"]: + default_facility = Facility.objects.get(pk=options["facility"]) + else: + default_facility = Facility.get_default_facility() + + return default_facility def handle_async(self, *args, **options): - log_type = options["log_type"] - log_info = classes_info[log_type] + # set language for the translation of the messages + locale = settings.LANGUAGE_CODE if not options["locale"] else options["locale"] + translation.activate(locale) + + self.overall_error = "" + job = get_current_job() + + facility = self.get_facility(options) + if not facility: + self.overall_error = str(MESSAGES[NO_FACILITY]) - if options["output_file"] is None: - filename = log_info["filename"] else: - filename = options["output_file"] + log_type = options["log_type"] - filepath = os.path.join(os.getcwd(), filename) + log_info = classes_info[log_type] - queryset = log_info["queryset"] + if options["output_file"] is None: + filename = log_info["filename"].format(facility.name, facility.id[:4]) + else: + filename = options["output_file"] - total_rows = queryset.count() + filepath = os.path.join(os.getcwd(), filename) + + queryset = log_info["queryset"] + + total_rows = queryset.count() + + with self.start_progress(total=total_rows) as progress_update: + try: + for row in csv_file_generator( + facility, log_type, filepath, overwrite=options["overwrite"] + ): + progress_update(1) + except (ValueError, IOError) as e: + self.overall_error = str(MESSAGES[FILE_WRITE_ERROR].format(e)) + + if job: + job.extra_metadata["overall_error"] = self.overall_error + self.job.extra_metadata["filename"] = ntpath.basename(filepath) + job.save_meta() + else: + if self.overall_error: + raise CommandError(self.overall_error) + else: + logger.info( + "Created csv file {} with {} lines".format(filepath, total_rows) + ) - with self.start_progress(total=total_rows) as progress_update: - try: - for row in csv_file_generator( - log_type, filepath, overwrite=options["overwrite"] - ): - progress_update(1) - except (ValueError, IOError) as e: - logger.error("Error trying to write csv file: {}".format(e)) - sys.exit(1) + translation.deactivate() diff --git a/kolibri/core/tasks/api.py b/kolibri/core/tasks/api.py index 901a0e1d9fb..62aab6601c3 100644 --- a/kolibri/core/tasks/api.py +++ b/kolibri/core/tasks/api.py @@ -13,6 +13,7 @@ from rest_framework.response import Response from six import string_types +from kolibri.core.auth.models import Facility from kolibri.core.content.models import ChannelMetadata from kolibri.core.content.permissions import CanExportLogs from kolibri.core.content.permissions import CanManageContent @@ -23,6 +24,7 @@ from kolibri.core.content.utils.paths import get_content_database_file_path from kolibri.core.content.utils.upgrade import diff_stats from kolibri.core.discovery.models import NetworkLocation +from kolibri.core.logger.csv_export import CSV_EXPORT_FILENAMES from kolibri.core.tasks.exceptions import JobNotFound from kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.job import State @@ -579,14 +581,19 @@ def startexportlogcsv(self, request): :returns: An object with the job information """ - csv_export_filenames = { - "session": "content_session_logs.csv", - "summary": "content_summary_logs.csv", - } + facility_id = request.data.get("facility", None) + if facility_id: + facility = Facility.objects.get(pk=facility_id) + else: + facility = request.user.facility + log_type = request.data.get("logtype", "summary") - if log_type in csv_export_filenames.keys(): + if log_type in CSV_EXPORT_FILENAMES.keys(): logs_dir = os.path.join(conf.KOLIBRI_HOME, "log_export") - filepath = os.path.join(logs_dir, csv_export_filenames[log_type]) + filepath = os.path.join( + logs_dir, + CSV_EXPORT_FILENAMES[log_type].format(facility.name, facility_id[:4]), + ) else: raise Http404( "Impossible to create a csv export file for {}".format(log_type) @@ -598,13 +605,18 @@ def startexportlogcsv(self, request): "EXPORTSUMMARYLOGCSV" if log_type == "summary" else "EXPORTSESSIONLOGCSV" ) - job_metadata = {"type": job_type, "started_by": request.user.pk} + job_metadata = { + "type": job_type, + "started_by": request.user.pk, + "facility": facility.id, + } job_id = priority_queue.enqueue( call_command, "exportlogs", log_type=log_type, output_file=filepath, + facility=facility.id, overwrite="true", extra_metadata=job_metadata, track_progress=True, diff --git a/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js b/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js index 5401f5acc7f..8ca828f1980 100644 --- a/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js +++ b/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js @@ -9,6 +9,7 @@ const logging = logger.getLogger(__filename); function startCSVExport(store, logtype, creating, commitStart) { const params = { logtype: logtype, + facility: store.rootGetters.activeFacilityId, }; if (!creating) { let promise = TaskResource.startexportlogcsv(params); @@ -39,7 +40,10 @@ function startSessionCSVExport(store) { function getExportedLogsInfo(store) { return client({ - path: urls['kolibri:core:exportedlogsinfo'](), + path: urls['kolibri:core:exportedlogsinfo']( + store.rootGetters.activeFacilityId, + store.rootGetters.currentFacilityName + ), }).then(response => { const data = response.entity; let sessionTimeStamp = null; @@ -56,16 +60,19 @@ function getExportedLogsInfo(store) { } function checkTaskStatus(store, newTasks, taskType, taskId, commitStart, commitFinish) { + const myNewTasks = newTasks.filter(task => { + return task.facility === store.rootGetters.activeFacilityId; + }); // if task job has already been fetched, just continually check if its completed if (taskId) { - const task = newTasks.find(task => task.id === taskId); + const task = myNewTasks.find(task => task.id === taskId); if (task && task.status === TaskStatuses.COMPLETED) { store.commit(commitFinish, new Date()); TaskResource.deleteFinishedTask(taskId); } } else { - const running = newTasks.filter(task => { + const running = myNewTasks.filter(task => { return ( task.type === taskType && task.status !== TaskStatuses.COMPLETED && diff --git a/kolibri/plugins/facility/assets/src/views/DataPage/index.vue b/kolibri/plugins/facility/assets/src/views/DataPage/index.vue index 73a099fe472..721d7ffec83 100644 --- a/kolibri/plugins/facility/assets/src/views/DataPage/index.vue +++ b/kolibri/plugins/facility/assets/src/views/DataPage/index.vue @@ -116,6 +116,7 @@ 'availableSessionCSVLog', 'availableSummaryCSVLog', ]), + ...mapGetters(['activeFacilityId']), ...mapState('manageCSV', ['sessionDateCreated', 'summaryDateCreated']), cannotDownload() { return isEmbeddedWebView; @@ -170,10 +171,16 @@ } }, downloadSessionLog() { - window.open(urls['kolibri:core:download_csv_file']('session'), '_blank'); + window.open( + urls['kolibri:core:download_csv_file']('session', this.activeFacilityId), + '_blank' + ); }, downloadSummaryLog() { - window.open(urls['kolibri:core:download_csv_file']('summary'), '_blank'); + window.open( + urls['kolibri:core:download_csv_file']('summary', this.activeFacilityId), + '_blank' + ); }, }, $trs: {