Skip to content

Commit

Permalink
Merge pull request #5 from jredrejo/import_per_facility
Browse files Browse the repository at this point in the history
Export summary and session logs per facility
  • Loading branch information
jonboiser authored Apr 13, 2020
2 parents 68fe2b8 + e0731f1 commit 962622d
Show file tree
Hide file tree
Showing 7 changed files with 177 additions and 52 deletions.
30 changes: 30 additions & 0 deletions kolibri/core/auth/constants/commands_errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from django.utils.translation import gettext_lazy as _

# Error messages ###
UNEXPECTED_EXCEPTION = 0
TOO_LONG = 1
INVALID = 2
DUPLICATED_USERNAME = 3
INVALID_USERNAME = 4
REQUIRED_COLUMN = 5
INVALID_HEADER = 6
NO_FACILITY = 7
FILE_READ_ERROR = 8
FILE_WRITE_ERROR = 9

MESSAGES = {
UNEXPECTED_EXCEPTION: _("Unexpected exception [{}]: {}"),
TOO_LONG: _("'{}' is too long"),
INVALID: _("Not a valid '{}'"),
DUPLICATED_USERNAME: _("Duplicated Username"),
INVALID_USERNAME: _(
"Username only can contain characters, numbers and underscores"
),
REQUIRED_COLUMN: _("The column '{}' is required"),
INVALID_HEADER: _("Mix of valid and/or invalid header labels found in first row"),
NO_FACILITY: _(
"No default facility exists, please make sure to provision this device before running this command"
),
FILE_READ_ERROR: _("Error trying to read csv file: {}"),
FILE_WRITE_ERROR: _("Error trying to write csv file: {}"),
}
8 changes: 6 additions & 2 deletions kolibri/core/logger/api_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,18 @@

router.urls.append(
url(
r"^downloadcsvfile/(?P<log_type>.*)/$",
r"^downloadcsvfile/(?P<log_type>.*)/(?P<facility_id>.*)/$",
download_csv_file,
name="download_csv_file",
)
)

router.urls.append(
url(r"^exportedlogsinfo/$", exported_logs_info, name="exportedlogsinfo")
url(
r"^exportedlogsinfo/(?P<facility_id>.*)/(?P<facility>.*)/$",
exported_logs_info,
name="exportedlogsinfo",
)
)

urlpatterns = router.urls
49 changes: 29 additions & 20 deletions kolibri/core/logger/csv_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,19 @@

from .models import ContentSessionLog
from .models import ContentSummaryLog
from kolibri.core.auth.models import Facility
from kolibri.core.content.models import ChannelMetadata
from kolibri.core.content.models import ContentNode
from kolibri.utils import conf


logger = logging.getLogger(__name__)

CSV_EXPORT_FILENAMES = {
"session": "{}_{}_content_session_logs.csv",
"summary": "{}_{}_content_summary_logs.csv",
}


def cache_channel_name(obj):
channel_id = obj["channel_id"]
Expand Down Expand Up @@ -89,7 +95,7 @@ def map_object(obj):
classes_info = {
"session": {
"queryset": ContentSessionLog.objects.all(),
"filename": "content_session_logs.csv",
"filename": CSV_EXPORT_FILENAMES["session"],
"db_columns": (
"user__username",
"user__facility__name",
Expand All @@ -104,7 +110,7 @@ def map_object(obj):
},
"summary": {
"queryset": ContentSummaryLog.objects.all(),
"filename": "content_summary_logs.csv",
"filename": CSV_EXPORT_FILENAMES["summary"],
"db_columns": (
"user__username",
"user__facility__name",
Expand All @@ -121,7 +127,8 @@ def map_object(obj):
}


def csv_file_generator(log_type, filepath, overwrite=False):
def csv_file_generator(facility, log_type, filepath, overwrite=False):

if log_type not in ("summary", "session"):
raise ValueError(
"Impossible to create a csv export file for {}".format(log_type)
Expand All @@ -131,7 +138,7 @@ def csv_file_generator(log_type, filepath, overwrite=False):

if not overwrite and os.path.exists(filepath):
raise ValueError("{} already exists".format(filepath))
queryset = log_info["queryset"]
queryset = log_info["queryset"].filter(dataset_id=facility.dataset_id)

# Exclude completion timestamp for the sessionlog CSV
header_labels = tuple(
Expand All @@ -156,21 +163,19 @@ def csv_file_generator(log_type, filepath, overwrite=False):
yield


def exported_logs_info(request):
def exported_logs_info(request, facility_id, facility):
"""
Get the last modification timestamp of the summary logs exported
:returns: An object with the files informatin
"""

logs_dir = os.path.join(conf.KOLIBRI_HOME, "log_export")
csv_statuses = {}
csv_export_filenames = {
"session": "content_session_logs.csv",
"summary": "content_summary_logs.csv",
}
for log_type in csv_export_filenames.keys():
log_path = os.path.join(logs_dir, csv_export_filenames[log_type])

for log_type in CSV_EXPORT_FILENAMES.keys():
log_path = os.path.join(
logs_dir, CSV_EXPORT_FILENAMES[log_type].format(facility, facility_id[:4])
)
if os.path.exists(log_path):
csv_statuses[log_type] = os.path.getmtime(log_path)
else:
Expand All @@ -179,14 +184,18 @@ def exported_logs_info(request):
return HttpResponse(json.dumps(csv_statuses), content_type="application/json")


def download_csv_file(request, log_type):
csv_export_filenames = {
"session": "content_session_logs.csv",
"summary": "content_summary_logs.csv",
}
if log_type in csv_export_filenames.keys():
def download_csv_file(request, log_type, facility_id):
if facility_id:
facility_name = Facility.objects.get(pk=facility_id).name
else:
facility_name = request.user.facility.name
facility_id = request.user.facility.id

if log_type in CSV_EXPORT_FILENAMES.keys():
filepath = os.path.join(
conf.KOLIBRI_HOME, "log_export", csv_export_filenames[log_type]
conf.KOLIBRI_HOME,
"log_export",
CSV_EXPORT_FILENAMES[log_type].format(facility_name, facility_id[:4]),
)
else:
filepath = None
Expand All @@ -202,7 +211,7 @@ def download_csv_file(request, log_type):

# set the content-disposition as attachment to force download
response["Content-Disposition"] = "attachment; filename={}".format(
csv_export_filenames[log_type]
CSV_EXPORT_FILENAMES[log_type].format(facility_name, facility_id[:4])
)

# set the content-length to the file size
Expand Down
92 changes: 74 additions & 18 deletions kolibri/core/logger/management/commands/exportlogs.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,19 @@
import logging
import ntpath
import os
import sys

from django.conf import settings
from django.core.management.base import CommandError
from django.utils import translation

from kolibri.core.auth.constants.commands_errors import FILE_WRITE_ERROR
from kolibri.core.auth.constants.commands_errors import MESSAGES
from kolibri.core.auth.constants.commands_errors import NO_FACILITY
from kolibri.core.auth.models import Facility
from kolibri.core.logger.csv_export import classes_info
from kolibri.core.logger.csv_export import csv_file_generator
from kolibri.core.tasks.management.commands.base import AsyncCommand
from kolibri.core.tasks.utils import get_current_job

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -37,29 +46,76 @@ def add_arguments(self, parser):
default=False,
help="Allows overwritten of the exported file in case it exists",
)
parser.add_argument(
"--facility",
action="store",
type=str,
help="Facility id to import the users into",
)
parser.add_argument(
"--locale",
action="store",
type=str,
default=None,
help="Code of the language for the messages to be translated",
)

def get_facility(self, options):
if options["facility"]:
default_facility = Facility.objects.get(pk=options["facility"])
else:
default_facility = Facility.get_default_facility()

return default_facility

def handle_async(self, *args, **options):
log_type = options["log_type"]

log_info = classes_info[log_type]
# set language for the translation of the messages
locale = settings.LANGUAGE_CODE if not options["locale"] else options["locale"]
translation.activate(locale)

self.overall_error = ""
job = get_current_job()

facility = self.get_facility(options)
if not facility:
self.overall_error = str(MESSAGES[NO_FACILITY])

if options["output_file"] is None:
filename = log_info["filename"]
else:
filename = options["output_file"]
log_type = options["log_type"]

filepath = os.path.join(os.getcwd(), filename)
log_info = classes_info[log_type]

queryset = log_info["queryset"]
if options["output_file"] is None:
filename = log_info["filename"].format(facility.name, facility.id[:4])
else:
filename = options["output_file"]

total_rows = queryset.count()
filepath = os.path.join(os.getcwd(), filename)

queryset = log_info["queryset"]

total_rows = queryset.count()

with self.start_progress(total=total_rows) as progress_update:
try:
for row in csv_file_generator(
facility, log_type, filepath, overwrite=options["overwrite"]
):
progress_update(1)
except (ValueError, IOError) as e:
self.overall_error = str(MESSAGES[FILE_WRITE_ERROR].format(e))

if job:
job.extra_metadata["overall_error"] = self.overall_error
self.job.extra_metadata["filename"] = ntpath.basename(filepath)
job.save_meta()
else:
if self.overall_error:
raise CommandError(self.overall_error)
else:
logger.info(
"Created csv file {} with {} lines".format(filepath, total_rows)
)

with self.start_progress(total=total_rows) as progress_update:
try:
for row in csv_file_generator(
log_type, filepath, overwrite=options["overwrite"]
):
progress_update(1)
except (ValueError, IOError) as e:
logger.error("Error trying to write csv file: {}".format(e))
sys.exit(1)
translation.deactivate()
26 changes: 19 additions & 7 deletions kolibri/core/tasks/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from rest_framework.response import Response
from six import string_types

from kolibri.core.auth.models import Facility
from kolibri.core.content.models import ChannelMetadata
from kolibri.core.content.permissions import CanExportLogs
from kolibri.core.content.permissions import CanManageContent
Expand All @@ -23,6 +24,7 @@
from kolibri.core.content.utils.paths import get_content_database_file_path
from kolibri.core.content.utils.upgrade import diff_stats
from kolibri.core.discovery.models import NetworkLocation
from kolibri.core.logger.csv_export import CSV_EXPORT_FILENAMES
from kolibri.core.tasks.exceptions import JobNotFound
from kolibri.core.tasks.exceptions import UserCancelledError
from kolibri.core.tasks.job import State
Expand Down Expand Up @@ -579,14 +581,19 @@ def startexportlogcsv(self, request):
:returns: An object with the job information
"""
csv_export_filenames = {
"session": "content_session_logs.csv",
"summary": "content_summary_logs.csv",
}
facility_id = request.data.get("facility", None)
if facility_id:
facility = Facility.objects.get(pk=facility_id)
else:
facility = request.user.facility

log_type = request.data.get("logtype", "summary")
if log_type in csv_export_filenames.keys():
if log_type in CSV_EXPORT_FILENAMES.keys():
logs_dir = os.path.join(conf.KOLIBRI_HOME, "log_export")
filepath = os.path.join(logs_dir, csv_export_filenames[log_type])
filepath = os.path.join(
logs_dir,
CSV_EXPORT_FILENAMES[log_type].format(facility.name, facility_id[:4]),
)
else:
raise Http404(
"Impossible to create a csv export file for {}".format(log_type)
Expand All @@ -598,13 +605,18 @@ def startexportlogcsv(self, request):
"EXPORTSUMMARYLOGCSV" if log_type == "summary" else "EXPORTSESSIONLOGCSV"
)

job_metadata = {"type": job_type, "started_by": request.user.pk}
job_metadata = {
"type": job_type,
"started_by": request.user.pk,
"facility": facility.id,
}

job_id = priority_queue.enqueue(
call_command,
"exportlogs",
log_type=log_type,
output_file=filepath,
facility=facility.id,
overwrite="true",
extra_metadata=job_metadata,
track_progress=True,
Expand Down
13 changes: 10 additions & 3 deletions kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ const logging = logger.getLogger(__filename);
function startCSVExport(store, logtype, creating, commitStart) {
const params = {
logtype: logtype,
facility: store.rootGetters.activeFacilityId,
};
if (!creating) {
let promise = TaskResource.startexportlogcsv(params);
Expand Down Expand Up @@ -39,7 +40,10 @@ function startSessionCSVExport(store) {

function getExportedLogsInfo(store) {
return client({
path: urls['kolibri:core:exportedlogsinfo'](),
path: urls['kolibri:core:exportedlogsinfo'](
store.rootGetters.activeFacilityId,
store.rootGetters.currentFacilityName
),
}).then(response => {
const data = response.entity;
let sessionTimeStamp = null;
Expand All @@ -56,16 +60,19 @@ function getExportedLogsInfo(store) {
}

function checkTaskStatus(store, newTasks, taskType, taskId, commitStart, commitFinish) {
const myNewTasks = newTasks.filter(task => {
return task.facility === store.rootGetters.activeFacilityId;
});
// if task job has already been fetched, just continually check if its completed
if (taskId) {
const task = newTasks.find(task => task.id === taskId);
const task = myNewTasks.find(task => task.id === taskId);

if (task && task.status === TaskStatuses.COMPLETED) {
store.commit(commitFinish, new Date());
TaskResource.deleteFinishedTask(taskId);
}
} else {
const running = newTasks.filter(task => {
const running = myNewTasks.filter(task => {
return (
task.type === taskType &&
task.status !== TaskStatuses.COMPLETED &&
Expand Down
Loading

0 comments on commit 962622d

Please sign in to comment.