diff --git a/app/cp_nigeria/urls.py b/app/cp_nigeria/urls.py index ff932cc3..fdffe3f1 100644 --- a/app/cp_nigeria/urls.py +++ b/app/cp_nigeria/urls.py @@ -35,4 +35,5 @@ path("ajax//cpn_kpi_results", cpn_kpi_results, name="cpn_kpi_results"), path("simulation/cancel/", cpn_simulation_cancel, name="cpn_simulation_cancel"), path("simulation/request/", cpn_simulation_request, name="cpn_simulation_request"), + path("kobo-testing", kobo_testing, name="kobo_testing"), ] diff --git a/app/cp_nigeria/views.py b/app/cp_nigeria/views.py index f924e4e8..1a1e7292 100644 --- a/app/cp_nigeria/views.py +++ b/app/cp_nigeria/views.py @@ -1268,6 +1268,16 @@ def cpn_outputs(request, proj_id, step_id=STEP_MAPPING["outputs"], complex=False return render(request, html_template, context) +@login_required +@require_http_methods(["GET", "POST"]) +def kobo_testing(request): + if request.method == "GET": + return render(request, "kobo_testing.html") + + else: + return HttpResponse("Only GET requests are allowed for this view.") + + # TODO for later create those views instead of simply serving the html templates CPN_STEPS = { "choose_location": cpn_scenario_create, @@ -1601,3 +1611,125 @@ def ajax_download_report(request): implementation_plan.save(response) return response + + +@login_required +@require_http_methods(["GET", "POST"]) +def calculate_consumption_tiers(request, kobo_file): + # this file should correspond to the file created when you export the data from kobotoolbox + # TODO set this up to be able to handle both the csv and excel exports + survey = pd.read_excel(kobo_file) + # TODO create csv for the IWI scores that will be saved in the static folder, include bool column for multiple choice + IWI_scores = pd.read_excel(IWI_scores_file) + + # define question names relevant for IWI scores + IWI_questions = list(set([name for name in IWI_scores.kobo_name[1:]])) + multiple_choice_qs = list(set([name for name in IWI_scores.loc[IWI_scores.multiple_choice == True].kobo_name])) + + # convert possible survey answers for each IWI score to list of integers + clean_list = [ + [int(item)] if isinstance(item, (int, float)) else list(map(int, item.split(", "))) + for item in IWI_scores.kobo_value.tolist()[1:] + ] + clean_list.insert(0, np.nan) + IWI_scores.kobo_value = clean_list + + # convert multiple choice survey to list of integers + for question in multiple_choice_qs: + nan_indices = survey.index[survey[question].isna()].tolist() + # clean the survey answers if not nan + clean_list = [ + [int(item)] if isinstance(item, (int, float)) else list(map(int, item.split(" "))) + for item in survey[question].tolist() + if not pd.isna(item) + ] + # reinsert nan back into list + for index in nan_indices: + clean_list.insert(index, np.nan) + + survey[question] = clean_list + + # TODO maybe do this at the very beginning + # filter out only relevant questions for IWI calculations + iwi_df = survey.loc[:, IWI_questions] + + # check if any of the questions are not answered and drop the columns with empty values + iwi_df.isnull().T.any() + empty_list = np.where(iwi_df.isnull().T.any())[0] + iwi_df.drop(empty_list, inplace=True) + + # TODO figure out how to handle this properly + if len(empty_list) > 0: + print( + f"The survey contains non answered questions in the following rows: {empty_list}. These surveys will not" + f"be included in the calculation. Please be aware that missing data will lead to a less reliable estimate." + ) + + bool_dict = {} + + # create boolean values according to survey results + for index, row in IWI_scores.iterrows(): + if index == 0: + continue + name = f"{row.kobo_name.replace('_', ' ').title()}: {row.Characteristic}" + # bool_list = [] + if row.kobo_name in multiple_choice_qs: + bool_list = iwi_df[row.kobo_name].apply(lambda x: any(answer in x for answer in row.kobo_value)) + else: + bool_list = iwi_df[row.kobo_name].apply(lambda x: x in row.kobo_value) + + bool_dict[name] = bool_list + + # join the expensive utensils categories + bool_dict["Expensive utensils"] = ( + bool_dict["Appliances: Expensive utensils"] | bool_dict["Vehicles: Expensive utensils"] + ) + del bool_dict["Appliances: Expensive utensils"] + del bool_dict["Vehicles: Expensive utensils"] + + # create dataframe and move expensive utensils back to its place + bool_table = pd.DataFrame(bool_dict) + col = bool_table.pop("Expensive utensils") + bool_table.insert(6, col.name, col) + + # calculate IWI scores + scores = IWI_scores.Weights.tolist()[1:] + IWI_constant = IWI_scores.Weights.tolist()[0] + scores.pop(7) + scores = list(zip(bool_table.columns.tolist(), scores)) + + IWI_score = IWI_constant + for i in range(len(scores)): + IWI_score += bool_table[scores[i][0]] * scores[i][1] + + survey["IWI_score"] = IWI_score + + # construct quintiles and divide scores into them + quintiles = [20, 40, 60, 80, 100] + iwi_quintiles = {} + previous_quintile = 0 + for quintile in quintiles: + iwi_quintiles[quintile] = {} + iwi_quintiles[quintile] = len( + survey[(survey["IWI_score"] < quintile) & (survey["IWI_score"] > previous_quintile)].IWI_score + ) + previous_quintile = quintile + + +@json_view +@login_required +@require_http_methods(["GET", "POST"]) +def upload_survey(request): + if request.method == "GET": + form = UploadFileForm(labels=dict(file=_("Survey file"))) + context = {"form": form} + + return render(request, "asset/upload_timeseries.html", context) + + elif request.method == "POST": + qs = request.POST + form = UploadTimeseriesForm(qs) + + if form.is_valid(): + ts = form.save(commit=False) + ts.user = request.user diff --git a/app/epa/settings.py b/app/epa/settings.py index d81f559f..e5334cd4 100644 --- a/app/epa/settings.py +++ b/app/epa/settings.py @@ -1,255 +1,259 @@ -""" -Django settings for EPA project. - -Generated by 'django-admin startproject' using Django 3.0. - -For more information on this file, see -https://docs.djangoproject.com/en/3.0/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/3.0/ref/settings/ -""" -import ast -import os - -from django.contrib.messages import constants as messages - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = ast.literal_eval(os.getenv("DEBUG", "False")) - -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.0/howto/static-files/ -STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),) -STATIC_URL = "/static/" -STATIC_ROOT = os.path.join(BASE_DIR, "cdn_static_root") - -STATICFILES_FINDERS = ["django.contrib.staticfiles.finders.FileSystemFinder"] - -if DEBUG is True: - STATICFILES_FINDERS.append("sass_processor.finders.CssFinder") - SASS_PROCESSOR_ROOT = STATIC_ROOT - SASS_PRECISION = 8 -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = os.getenv("EPA_SECRET_KEY", "v@p9^=@lc3#1u_xtx*^xhrv0l3li1(+8ik^k@g-_bzmexb0$7n") - -ALLOWED_HOSTS = ["*"] - -CSRF_TRUSTED_ORIGINS = [ - f"https://{os.getenv('TRUSTED_HOST')}", - f"http://{os.getenv('TRUSTED_HOST')}", -] -# Application definition - -INSTALLED_APPS = [ - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "django.contrib.humanize", - "django.forms", - "users.apps.UsersConfig", - "projects.apps.ProjectsConfig", - "dashboard.apps.DashboardConfig", - "cp_nigeria.apps.CPNigeriaConfig", - "business_model.apps.BusinessModelConfig", - # 3rd Party - "crispy_forms", - "django_q", -] - -if DEBUG is True: - INSTALLED_APPS.append("sass_processor") - -MIDDLEWARE = [ - "django.middleware.security.SecurityMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.locale.LocaleMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", -] - -FILE_UPLOAD_HANDLERS = [ - "django.core.files.uploadhandler.MemoryFileUploadHandler", - "django.core.files.uploadhandler.TemporaryFileUploadHandler", -] - -ROOT_URLCONF = "epa.urls" - -FORM_RENDERER = "django.forms.renderers.TemplatesSetting" - -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [os.path.join(BASE_DIR, "templates")], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - "epa.context_processors.debug", - ] - }, - } -] - -WSGI_APPLICATION = "epa.wsgi.application" - -# Database -# https://docs.djangoproject.com/en/3.0/ref/settings/#databases -# SQLite is used if no other database system is set via environment variables. -DATABASES = { - "default": { - "ENGINE": os.environ.get("SQL_ENGINE"), - "NAME": os.environ.get("SQL_DATABASE"), - "USER": os.environ.get("SQL_USER"), - "PASSWORD": os.environ.get("SQL_PASSWORD"), - "HOST": os.environ.get("SQL_HOST"), - "PORT": os.environ.get("SQL_PORT"), - } - if os.environ.get("SQL_ENGINE") - else { - "ENGINE": os.environ.get("SQL_ENGINE", "django.db.backends.sqlite3"), - "NAME": os.environ.get("SQL_DATABASE", os.path.join(BASE_DIR, "db.sqlite3")), - } -} - -DEFAULT_AUTO_FIELD = "django.db.models.AutoField" - -# Password validation -# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, - {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, - {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, - {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, -] - -# Internationalization -# https://docs.djangoproject.com/en/3.0/topics/i18n/ - -LANGUAGE_CODE = "en" - -LOCALE_PATHS = (os.path.join(BASE_DIR, "locale"),) - -LANGUAGES = [("de", "German"), ("en", "English")] - -TIME_ZONE = "Europe/Copenhagen" - -USE_I18N = True - -USE_L10N = True - -USE_TZ = False - -# Other configs - -AUTH_USER_MODEL = "users.CustomUser" - -LOGIN_URL = "login" -LOGIN_REDIRECT_URL = "home_cpn" -LOGOUT_REDIRECT_URL = "home_cpn" - -CRISPY_TEMPLATE_PACK = "bootstrap4" - -# Please note, we don't use Django's internal email system, -# we implement our own, using exchangelib -USE_EXCHANGE_EMAIL_BACKEND = ast.literal_eval(os.getenv("USE_EXCHANGE_EMAIL_BACKEND", "True")) -# The Exchange account which sends emails -EXCHANGE_ACCOUNT = os.getenv("EXCHANGE_ACCOUNT", "dummy@dummy.com") -EXCHANGE_PW = os.getenv("EXCHANGE_PW", "dummypw") -EXCHANGE_EMAIL = os.getenv("EXCHANGE_EMAIL", "dummy@dummy.com") -EXCHANGE_SERVER = os.getenv("EXCHANGE_SERVER", "dummy.com") -# Email addresses to which feedback emails will be sent -RECIPIENTS = os.getenv("RECIPIENTS", "dummy@dummy.com,dummy2@dummy.com").split(",") -EMAIL_SUBJECT_PREFIX = os.getenv("EMAIL_SUBJECT_PREFIX", "[open_plan] ") - -MESSAGE_TAGS = { - messages.DEBUG: "alert-info", - messages.INFO: "alert-info", - messages.SUCCESS: "alert-success", - messages.WARNING: "alert-warning", - messages.ERROR: "alert-danger", -} - -USE_PROXY = ast.literal_eval(os.getenv("USE_PROXY", "True")) -PROXY_ADDRESS_LINK = os.getenv("PROXY_ADDRESS", "http://proxy:port") -PROXY_CONFIG = ({"http://": PROXY_ADDRESS_LINK, "https://": PROXY_ADDRESS_LINK}) if USE_PROXY else ({}) - -MVS_API_HOST = os.getenv("MVS_API_HOST", "https://mvs-eland.rl-institut.de") -MVS_POST_URL = f"{MVS_API_HOST}/sendjson/" -MVS_GET_URL = f"{MVS_API_HOST}/check/" -MVS_LP_FILE_URL = f"{MVS_API_HOST}/get_lp_file/" -MVS_SA_POST_URL = f"{MVS_API_HOST}/sendjson/openplan/sensitivity-analysis" -MVS_SA_GET_URL = f"{MVS_API_HOST}/check-sensitivity-analysis/" - -# Allow iframes to show in page -X_FRAME_OPTIONS = "SAMEORIGIN" - -# API key to fetch exchange rates -EXCHANGE_RATES_API_KEY = os.getenv("EXCHANGE_RATES_API_KEY") -EXCHANGE_RATES_URL = f"https://v6.exchangerate-api.com/v6/{EXCHANGE_RATES_API_KEY}/latest/USD" - -import sys - -LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "dtlnm": { - "format": "%(asctime)s - %(levelname)8s - %(name)s - %(message)s", - "datefmt": "%Y-%m-%d %H:%M:%S", - } - }, - "handlers": { - "info_file": { - "level": "INFO", - "class": "logging.FileHandler", - "filename": "django_epa_info.log", - "formatter": "dtlnm", - }, - "warnings_file": { - "level": "WARNING", - "class": "logging.FileHandler", - "filename": "django_epa_warning.log", - "formatter": "dtlnm", - }, - "console": { - "level": "WARNING", - "class": "logging.StreamHandler", - "stream": sys.stdout, - }, - }, - "loggers": { - "": { - "handlers": ["info_file", "warnings_file", "console"], - "level": "DEBUG", - "propagate": True, - }, - "asyncio": {"level": "WARNING"}, - }, -} - -# DJANGO-Q CONFIGURATION -# source: https://django-q.readthedocs.io/en/latest/configure.html -Q_CLUSTER = { - "name": "django_q_orm", - "workers": 4, - "timeout": 90, - "retry": 120, - "queue_limit": 50, - "orm": "default", -} +""" +Django settings for EPA project. + +Generated by 'django-admin startproject' using Django 3.0. + +For more information on this file, see +https://docs.djangoproject.com/en/3.0/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.0/ref/settings/ +""" +import ast +import os + +from django.contrib.messages import constants as messages + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = ast.literal_eval(os.getenv("DEBUG", "False")) + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.0/howto/static-files/ +STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),) +STATIC_URL = "/static/" +STATIC_ROOT = os.path.join(BASE_DIR, "cdn_static_root") + +STATICFILES_FINDERS = ["django.contrib.staticfiles.finders.FileSystemFinder"] + +if DEBUG is True: + STATICFILES_FINDERS.append("sass_processor.finders.CssFinder") + SASS_PROCESSOR_ROOT = STATIC_ROOT + SASS_PRECISION = 8 +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = os.getenv("EPA_SECRET_KEY", "v@p9^=@lc3#1u_xtx*^xhrv0l3li1(+8ik^k@g-_bzmexb0$7n") + +ALLOWED_HOSTS = ["*"] + +CSRF_TRUSTED_ORIGINS = [ + f"https://{os.getenv('TRUSTED_HOST')}", + f"http://{os.getenv('TRUSTED_HOST')}", +] +# Application definition + +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django.contrib.humanize", + "django.forms", + "users.apps.UsersConfig", + "projects.apps.ProjectsConfig", + "dashboard.apps.DashboardConfig", + "cp_nigeria.apps.CPNigeriaConfig", + "business_model.apps.BusinessModelConfig", + # 3rd Party + "crispy_forms", + "django_q", +] + +if DEBUG is True: + INSTALLED_APPS.append("sass_processor") + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.locale.LocaleMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +FILE_UPLOAD_HANDLERS = [ + "django.core.files.uploadhandler.MemoryFileUploadHandler", + "django.core.files.uploadhandler.TemporaryFileUploadHandler", +] + +ROOT_URLCONF = "epa.urls" + +FORM_RENDERER = "django.forms.renderers.TemplatesSetting" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [os.path.join(BASE_DIR, "templates")], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + "epa.context_processors.debug", + ] + }, + } +] + +WSGI_APPLICATION = "epa.wsgi.application" + +# Database +# https://docs.djangoproject.com/en/3.0/ref/settings/#databases +# SQLite is used if no other database system is set via environment variables. +DATABASES = { + "default": { + "ENGINE": os.environ.get("SQL_ENGINE"), + "NAME": os.environ.get("SQL_DATABASE"), + "USER": os.environ.get("SQL_USER"), + "PASSWORD": os.environ.get("SQL_PASSWORD"), + "HOST": os.environ.get("SQL_HOST"), + "PORT": os.environ.get("SQL_PORT"), + } + if os.environ.get("SQL_ENGINE") + else { + "ENGINE": os.environ.get("SQL_ENGINE", "django.db.backends.sqlite3"), + "NAME": os.environ.get("SQL_DATABASE", os.path.join(BASE_DIR, "db.sqlite3")), + } +} + +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" + +# Password validation +# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, + {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, + {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, + {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, +] + +# Internationalization +# https://docs.djangoproject.com/en/3.0/topics/i18n/ + +LANGUAGE_CODE = "en" + +LOCALE_PATHS = (os.path.join(BASE_DIR, "locale"),) + +LANGUAGES = [("de", "German"), ("en", "English")] + +TIME_ZONE = "Europe/Copenhagen" + +USE_I18N = True + +USE_L10N = True + +USE_TZ = False + +# Other configs + +AUTH_USER_MODEL = "users.CustomUser" + +LOGIN_URL = "login" +LOGIN_REDIRECT_URL = "home_cpn" +LOGOUT_REDIRECT_URL = "home_cpn" + +CRISPY_TEMPLATE_PACK = "bootstrap4" + +# Please note, we don't use Django's internal email system, +# we implement our own, using exchangelib +USE_EXCHANGE_EMAIL_BACKEND = ast.literal_eval(os.getenv("USE_EXCHANGE_EMAIL_BACKEND", "True")) +# The Exchange account which sends emails +EXCHANGE_ACCOUNT = os.getenv("EXCHANGE_ACCOUNT", "dummy@dummy.com") +EXCHANGE_PW = os.getenv("EXCHANGE_PW", "dummypw") +EXCHANGE_EMAIL = os.getenv("EXCHANGE_EMAIL", "dummy@dummy.com") +EXCHANGE_SERVER = os.getenv("EXCHANGE_SERVER", "dummy.com") +# Email addresses to which feedback emails will be sent +RECIPIENTS = os.getenv("RECIPIENTS", "dummy@dummy.com,dummy2@dummy.com").split(",") +EMAIL_SUBJECT_PREFIX = os.getenv("EMAIL_SUBJECT_PREFIX", "[open_plan] ") + +MESSAGE_TAGS = { + messages.DEBUG: "alert-info", + messages.INFO: "alert-info", + messages.SUCCESS: "alert-success", + messages.WARNING: "alert-warning", + messages.ERROR: "alert-danger", +} + +USE_PROXY = ast.literal_eval(os.getenv("USE_PROXY", "True")) +PROXY_ADDRESS_LINK = os.getenv("PROXY_ADDRESS", "http://proxy:port") +PROXY_CONFIG = ({"http://": PROXY_ADDRESS_LINK, "https://": PROXY_ADDRESS_LINK}) if USE_PROXY else ({}) + +MVS_API_HOST = os.getenv("MVS_API_HOST", "https://mvs-eland.rl-institut.de") +MVS_POST_URL = f"{MVS_API_HOST}/sendjson/" +MVS_GET_URL = f"{MVS_API_HOST}/check/" +MVS_LP_FILE_URL = f"{MVS_API_HOST}/get_lp_file/" +MVS_SA_POST_URL = f"{MVS_API_HOST}/sendjson/openplan/sensitivity-analysis" +MVS_SA_GET_URL = f"{MVS_API_HOST}/check-sensitivity-analysis/" + +# Allow iframes to show in page +X_FRAME_OPTIONS = "SAMEORIGIN" + +# API key to fetch exchange rates +EXCHANGE_RATES_API_KEY = os.getenv("EXCHANGE_RATES_API_KEY") +EXCHANGE_RATES_URL = f"https://v6.exchangerate-api.com/v6/{EXCHANGE_RATES_API_KEY}/latest/USD" + +# API to connect with KoboToolbox +KOBO_API_URL = "https://kf.kobotoolbox.org/api/v2/" +KOBO_API_TOKEN = os.getenv("KOBO_API_TOKEN") + +import sys + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "dtlnm": { + "format": "%(asctime)s - %(levelname)8s - %(name)s - %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", + } + }, + "handlers": { + "info_file": { + "level": "INFO", + "class": "logging.FileHandler", + "filename": "django_epa_info.log", + "formatter": "dtlnm", + }, + "warnings_file": { + "level": "WARNING", + "class": "logging.FileHandler", + "filename": "django_epa_warning.log", + "formatter": "dtlnm", + }, + "console": { + "level": "WARNING", + "class": "logging.StreamHandler", + "stream": sys.stdout, + }, + }, + "loggers": { + "": { + "handlers": ["info_file", "warnings_file", "console"], + "level": "DEBUG", + "propagate": True, + }, + "asyncio": {"level": "WARNING"}, + }, +} + +# DJANGO-Q CONFIGURATION +# source: https://django-q.readthedocs.io/en/latest/configure.html +Q_CLUSTER = { + "name": "django_q_orm", + "workers": 4, + "timeout": 90, + "retry": 120, + "queue_limit": 50, + "orm": "default", +} diff --git a/app/projects/requests.py b/app/projects/requests.py index c7d2214f..2c2b4184 100644 --- a/app/projects/requests.py +++ b/app/projects/requests.py @@ -1,224 +1,356 @@ -from datetime import datetime -import httpx as requests -import json -import numpy as np - -# from requests.exceptions import HTTPError -from epa.settings import PROXY_CONFIG, MVS_POST_URL, MVS_GET_URL, MVS_SA_POST_URL, MVS_SA_GET_URL, EXCHANGE_RATES_URL -from dashboard.models import ( - FancyResults, - AssetsResults, - KPICostsMatrixResults, - KPIScalarResults, - FlowResults, -) -from projects.constants import DONE, PENDING, ERROR -import logging - -logger = logging.getLogger(__name__) - - -def request_exchange_rate(currency): - try: - response = requests.get(EXCHANGE_RATES_URL) - response.raise_for_status() - - except requests.HTTPError as http_err: - logger.info("Current exchange rate could not be fetched. Setting default value.") - exchange_rate = 774 - else: - data = response.json() - exchange_rate = round(data["conversion_rates"][currency], 2) - - return exchange_rate - - -def mvs_simulation_request(data: dict): - headers = {"content-type": "application/json"} - payload = json.dumps(data) - - try: - response = requests.post( - MVS_POST_URL, - data=payload, - headers=headers, - proxies=PROXY_CONFIG, - verify=False, - ) - - # If the response was successful, no Exception will be raised - response.raise_for_status() - except requests.HTTPError as http_err: - logger.error(f"HTTP error occurred: {http_err}") - return None - except Exception as err: - logger.error(f"Other error occurred: {err}") - return None - else: - logger.info("The simulation was sent successfully to MVS API.") - return json.loads(response.text) - - -def mvs_simulation_check_status(token): - try: - response = requests.get(MVS_GET_URL + token, proxies=PROXY_CONFIG, verify=False) - response.raise_for_status() - except requests.HTTPError as http_err: - logger.error(f"HTTP error occurred: {http_err}") - return None - except Exception as err: - logger.error(f"Other error occurred: {err}") - return None - else: - logger.info("Success!") - return json.loads(response.text) - - -def mvs_sa_check_status(token): - try: - response = requests.get(MVS_SA_GET_URL + token, proxies=PROXY_CONFIG, verify=False) - response.raise_for_status() - except requests.HTTPError as http_err: - logger.error(f"HTTP error occurred: {http_err}") - return None - except Exception as err: - logger.error(f"Other error occurred: {err}") - return None - else: - logger.info("Success!") - return json.loads(response.text) - - -def fetch_mvs_simulation_results(simulation): - if simulation.status == PENDING: - response = mvs_simulation_check_status(token=simulation.mvs_token) - try: - simulation.status = response["status"] - simulation.errors = json.dumps(response["results"][ERROR]) if simulation.status == ERROR else None - simulation.results = ( - parse_mvs_results(simulation, response["results"]) if simulation.status == DONE else None - ) - simulation.mvs_version = response["mvs_version"] - logger.info(f"The simulation {simulation.id} is finished") - except: - simulation.status = ERROR - simulation.results = None - - simulation.elapsed_seconds = (datetime.now() - simulation.start_date).seconds - simulation.end_date = datetime.now() if response["status"] in [ERROR, DONE] else None - simulation.save() - - return simulation.status != PENDING - - -def fetch_mvs_sa_results(simulation): - if simulation.status == PENDING: - response = mvs_sa_check_status(token=simulation.mvs_token) - - simulation.parse_server_response(response) - - if simulation.status == DONE: - logger.info(f"The simulation {simulation.id} is finished") - - return simulation.status != PENDING - - -def parse_mvs_results(simulation, response_results): - data = json.loads(response_results) - asset_key_list = [ - "energy_consumption", - "energy_conversion", - "energy_production", - "energy_providers", - "energy_storage", - ] - - if not set(asset_key_list).issubset(data.keys()): - raise KeyError("There are missing keys from the received dictionary.") - - # Write Scalar KPIs to db - qs = KPIScalarResults.objects.filter(simulation=simulation) - if qs.exists(): - kpi_scalar = qs.first() - kpi_scalar.scalar_values = json.dumps(data["kpi"]["scalars"]) - kpi_scalar.save() - else: - KPIScalarResults.objects.create(scalar_values=json.dumps(data["kpi"]["scalars"]), simulation=simulation) - # Write Cost Matrix KPIs to db - qs = KPICostsMatrixResults.objects.filter(simulation=simulation) - if qs.exists(): - kpi_costs = qs.first() - kpi_costs.cost_values = json.dumps(data["kpi"]["cost_matrix"]) - kpi_costs.save() - else: - KPICostsMatrixResults.objects.create(cost_values=json.dumps(data["kpi"]["cost_matrix"]), simulation=simulation) - # Write Assets to db - data_subdict = {category: v for category, v in data.items() if category in asset_key_list} - qs = AssetsResults.objects.filter(simulation=simulation) - if qs.exists(): - asset_results = qs.first() - asset_results.asset_list = json.dumps(data_subdict) - asset_results.save() - else: - AssetsResults.objects.create(assets_list=json.dumps(data_subdict), simulation=simulation) - - qs = FancyResults.objects.filter(simulation=simulation) - if qs.exists(): - raise ValueError("Already existing FancyResults") - else: - # TODO add safety here with json schema - # Raw results is a panda dataframe which was saved to json using "split" - if "raw_results" in data: - results = data["raw_results"] - js = json.loads(results) - js_data = np.array(js["data"]) - - hdrs = [ - "bus", - "energy_vector", - "direction", - "asset", - "asset_type", - "oemof_type", - "flow_data", - "optimized_capacity", - ] - - # each columns already contains the values of the hdrs except for flow_data and optimized_capacity - # we append those values here - for i, col in enumerate(js["columns"]): - col.append(js_data[:-1, i].tolist()) - col.append(js_data[-1, i]) - - kwargs = {hdr: item for hdr, item in zip(hdrs, col)} - kwargs["simulation"] = simulation - fr = FancyResults(**kwargs) - fr.save() - - return response_results - - -def mvs_sensitivity_analysis_request(data: dict): - headers = {"content-type": "application/json"} - payload = json.dumps(data) - - try: - response = requests.post( - MVS_SA_POST_URL, - data=payload, - headers=headers, - proxies=PROXY_CONFIG, - verify=False, - ) - - # If the response was successful, no Exception will be raised - response.raise_for_status() - except requests.HTTPError as http_err: - logger.error(f"HTTP error occurred: {http_err}") - return None - except Exception as err: - logger.error(f"Other error occurred: {err}") - return None - else: - logger.info("The simulation was sent successfully to MVS API.") - return json.loads(response.text) +from datetime import datetime +import httpx as requests +import json +import numpy as np + +# from requests.exceptions import HTTPError +from epa.settings import ( + PROXY_CONFIG, + MVS_POST_URL, + MVS_GET_URL, + MVS_SA_POST_URL, + MVS_SA_GET_URL, + EXCHANGE_RATES_URL, + KOBO_API_TOKEN, + KOBO_API_URL, +) +from dashboard.models import ( + FancyResults, + AssetsResults, + KPICostsMatrixResults, + KPIScalarResults, + FlowResults, +) +from projects.constants import DONE, PENDING, ERROR +import logging + +logger = logging.getLogger(__name__) + + +class KoboToolbox: + base_survey_id = "aEpaGRFXWbaDyLSXTiQTZQ" + request_headers = {"Accept": "application/json", "Authorization": "Token " + KOBO_API_TOKEN} + + def __init__(self, project_id=None): + """When the class is initialized, a survey is cloned from the base survey, deployed and the permissions + are changed so that anonymous users can submit to the form. The web form url is returned when the form is + deployed""" + # TODO save these somewhere (maybe in Options) so that the survey stays assigned to the project + # TODO set up multiple scenarios for one project so that one community can all have the same survey across multiple scenarios + # TODO only create a new survey if this project doesn't already have a survey assigned to it + # project = Project.objects.get(pk=project_id) + # if project.options.kobo_survey is None: + self.project_survey_id = self.clone_form() + self.assign_permissions("add_submissions", "AnonymousUser") + self.assign_permissions("view_asset", "AnonymousUser") + self.project_survey_url = self.deploy_form() + + def request_data(self, form_id): + pass + + def clone_form(self, form_id=None): + """Clones a KoboToolbox form. If no form is given, the base form given in the class will be cloned + (corresponds to the basic IWI household questions survey). Returns the id of the newly created survey""" + if form_id is None: + form_id = self.base_survey_id + + payload = {"clone_from": form_id, "name": "API_test", "asset_type": "survey"} + + try: + response = requests.post(KOBO_API_URL + "assets/", data=payload, headers=self.request_headers, timeout=10) + # If the response was successful, no Exception will be raised + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + new_form_id = json.loads(response.text)["uid"] + logger.info(f"Cloned household survey to survey with id {new_form_id}.") + return new_form_id + pass + + def deploy_form(self, form_id=None): + """This call deploys the form. Form_id should be the id returned by clone_form. When the form is cloned, + it is initially saved as a draft before being deployed. Returns the enketo url needed to fill + out the survey""" + + if form_id is None: + form_id = self.project_survey_id + + # this parameter makes sure that the form is deployed as active (otherwise it will default to archived) + payload = {"active": True} + + try: + response = requests.post( + KOBO_API_URL + f"assets/{form_id}/deployment/", data=payload, headers=self.request_headers, timeout=10 + ) + + # If the response was successful, no Exception will be raised + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + enketo_url = json.loads(response.text)["asset"]["deployment__links"]["offline_url"] + logger.info(f"Successfully deployed survey with id {form_id}. Survey available at {enketo_url}.") + return enketo_url + + def assign_permissions(self, permission_codename, username, form_id=None): + """Assigns user permissions on a given form. For permissions without a KoboToolbox account, username should + be 'AnonymousUser'. The basic permissions needed to anonymously submit to the form are view_asset and + add_submissions""" + if form_id is None: + form_id = self.project_survey_id + + permission_list = [ + "change_asset", + "view_asset", + "manage_asset", + "delete_asset", + "change_submissions", + "delete_submissions", + "validate_submissions", + "add_submissions", + "view_submissions", + ] + + if permission_codename not in permission_list: + logger.warning(f"Permission doesn't exist. Available permission codenames are: '{permission_list}'") + return None + + payload = { + "permission": f"https://kf.kobotoolbox.org/api/v2/permissions/{permission_codename}/", + "user": f"https://kf.kobotoolbox.org/api/v2/users/{username}/", + } + + try: + response = requests.post( + KOBO_API_URL + f"assets/{form_id}/permission-assignments/", + data=payload, + headers=self.request_headers, + timeout=5, + ) + + # If the response was successful, no Exception will be raised + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + logger.info(f"Successfully assigned permission '{permission_codename}' to {form_id}. ") + return None + + +def request_exchange_rate(currency): + try: + response = requests.get(EXCHANGE_RATES_URL) + response.raise_for_status() + + except requests.HTTPError as http_err: + logger.info("Current exchange rate could not be fetched. Setting default value.") + exchange_rate = 774 + else: + data = response.json() + exchange_rate = round(data["conversion_rates"][currency], 2) + + return exchange_rate + + +def mvs_simulation_request(data: dict): + headers = {"content-type": "application/json"} + payload = json.dumps(data) + + try: + response = requests.post( + MVS_POST_URL, + data=payload, + headers=headers, + proxies=PROXY_CONFIG, + verify=False, + ) + + # If the response was successful, no Exception will be raised + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + logger.info("The simulation was sent successfully to MVS API.") + return json.loads(response.text) + + +def mvs_simulation_check_status(token): + try: + response = requests.get(MVS_GET_URL + token, proxies=PROXY_CONFIG, verify=False) + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + logger.info("Success!") + return json.loads(response.text) + + +def mvs_sa_check_status(token): + try: + response = requests.get(MVS_SA_GET_URL + token, proxies=PROXY_CONFIG, verify=False) + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + logger.info("Success!") + return json.loads(response.text) + + +def fetch_mvs_simulation_results(simulation): + if simulation.status == PENDING: + response = mvs_simulation_check_status(token=simulation.mvs_token) + try: + simulation.status = response["status"] + simulation.errors = json.dumps(response["results"][ERROR]) if simulation.status == ERROR else None + simulation.results = ( + parse_mvs_results(simulation, response["results"]) if simulation.status == DONE else None + ) + simulation.mvs_version = response["mvs_version"] + logger.info(f"The simulation {simulation.id} is finished") + except: + simulation.status = ERROR + simulation.results = None + + simulation.elapsed_seconds = (datetime.now() - simulation.start_date).seconds + simulation.end_date = datetime.now() if response["status"] in [ERROR, DONE] else None + simulation.save() + + return simulation.status != PENDING + + +def fetch_mvs_sa_results(simulation): + if simulation.status == PENDING: + response = mvs_sa_check_status(token=simulation.mvs_token) + + simulation.parse_server_response(response) + + if simulation.status == DONE: + logger.info(f"The simulation {simulation.id} is finished") + + return simulation.status != PENDING + + +def parse_mvs_results(simulation, response_results): + data = json.loads(response_results) + asset_key_list = [ + "energy_consumption", + "energy_conversion", + "energy_production", + "energy_providers", + "energy_storage", + ] + + if not set(asset_key_list).issubset(data.keys()): + raise KeyError("There are missing keys from the received dictionary.") + + # Write Scalar KPIs to db + qs = KPIScalarResults.objects.filter(simulation=simulation) + if qs.exists(): + kpi_scalar = qs.first() + kpi_scalar.scalar_values = json.dumps(data["kpi"]["scalars"]) + kpi_scalar.save() + else: + KPIScalarResults.objects.create(scalar_values=json.dumps(data["kpi"]["scalars"]), simulation=simulation) + # Write Cost Matrix KPIs to db + qs = KPICostsMatrixResults.objects.filter(simulation=simulation) + if qs.exists(): + kpi_costs = qs.first() + kpi_costs.cost_values = json.dumps(data["kpi"]["cost_matrix"]) + kpi_costs.save() + else: + KPICostsMatrixResults.objects.create(cost_values=json.dumps(data["kpi"]["cost_matrix"]), simulation=simulation) + # Write Assets to db + data_subdict = {category: v for category, v in data.items() if category in asset_key_list} + qs = AssetsResults.objects.filter(simulation=simulation) + if qs.exists(): + asset_results = qs.first() + asset_results.asset_list = json.dumps(data_subdict) + asset_results.save() + else: + AssetsResults.objects.create(assets_list=json.dumps(data_subdict), simulation=simulation) + + qs = FancyResults.objects.filter(simulation=simulation) + if qs.exists(): + raise ValueError("Already existing FancyResults") + else: + # TODO add safety here with json schema + # Raw results is a panda dataframe which was saved to json using "split" + if "raw_results" in data: + results = data["raw_results"] + js = json.loads(results) + js_data = np.array(js["data"]) + + hdrs = [ + "bus", + "energy_vector", + "direction", + "asset", + "asset_type", + "oemof_type", + "flow_data", + "optimized_capacity", + ] + + # each columns already contains the values of the hdrs except for flow_data and optimized_capacity + # we append those values here + for i, col in enumerate(js["columns"]): + col.append(js_data[:-1, i].tolist()) + col.append(js_data[-1, i]) + + kwargs = {hdr: item for hdr, item in zip(hdrs, col)} + kwargs["simulation"] = simulation + fr = FancyResults(**kwargs) + fr.save() + + return response_results + + +def mvs_sensitivity_analysis_request(data: dict): + headers = {"content-type": "application/json"} + payload = json.dumps(data) + + try: + response = requests.post( + MVS_SA_POST_URL, + data=payload, + headers=headers, + proxies=PROXY_CONFIG, + verify=False, + ) + + # If the response was successful, no Exception will be raised + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error(f"HTTP error occurred: {http_err}") + return None + except Exception as err: + logger.error(f"Other error occurred: {err}") + return None + else: + logger.info("The simulation was sent successfully to MVS API.") + return json.loads(response.text) diff --git a/app/templates/cp_nigeria/kobo_testing.html b/app/templates/cp_nigeria/kobo_testing.html new file mode 100644 index 00000000..40af7125 --- /dev/null +++ b/app/templates/cp_nigeria/kobo_testing.html @@ -0,0 +1,39 @@ +{% load i18n %} +{% load crispy_forms_tags %} + +