From a05e89309bb29400427bbcdcb502553571d40a9e Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Wed, 2 Feb 2022 09:33:15 +0000 Subject: [PATCH 01/24] Syntax upgrade to Python 3.9 and Django 3.2. Without migrations|snapshots|tests --- backend/gunicorn_config.py | 2 +- backend/hct_mis_api/apps/account/admin.py | 289 ++++++++---- backend/hct_mis_api/apps/account/models.py | 91 +++- .../hct_mis_api/apps/account/permissions.py | 61 ++- backend/hct_mis_api/apps/account/schema.py | 46 +- .../hct_mis_api/apps/activity_log/models.py | 30 +- .../apps/cash_assist_datahub/models.py | 2 +- backend/hct_mis_api/apps/core/admin.py | 213 +++++++-- .../hct_mis_api/apps/core/base_test_case.py | 15 +- backend/hct_mis_api/apps/core/currencies.py | 3 +- backend/hct_mis_api/apps/core/datamart/api.py | 41 +- backend/hct_mis_api/apps/core/es_analyzers.py | 11 +- .../apps/core/exchange_rates/models.py | 36 +- .../hct_mis_api/apps/core/export_locations.py | 2 +- .../apps/core/flex_fields_importer.py | 124 ++++-- backend/hct_mis_api/apps/core/kobo/common.py | 21 +- .../hct_mis_api/apps/core/mis_test_runner.py | 39 +- backend/hct_mis_api/apps/core/models.py | 111 +++-- backend/hct_mis_api/apps/core/utils.py | 90 +++- backend/hct_mis_api/apps/geo/models.py | 17 +- backend/hct_mis_api/apps/grievance/models.py | 217 ++++++--- .../hct_mis_api/apps/grievance/mutations.py | 323 ++++++++++---- .../apps/grievance/mutations_extras/utils.py | 233 +++++++--- backend/hct_mis_api/apps/grievance/schema.py | 251 ++++++++--- backend/hct_mis_api/apps/household/admin.py | 194 ++++++-- backend/hct_mis_api/apps/household/const.py | 4 +- backend/hct_mis_api/apps/household/forms.py | 14 +- backend/hct_mis_api/apps/household/models.py | 306 +++++++++---- .../hct_mis_api/apps/mis_datahub/models.py | 16 +- backend/hct_mis_api/apps/payment/models.py | 39 +- backend/hct_mis_api/apps/payment/utils.py | 57 ++- .../hct_mis_api/apps/power_query/models.py | 29 +- backend/hct_mis_api/apps/program/models.py | 61 ++- .../apps/registration_data/models.py | 10 +- .../apps/registration_datahub/models.py | 118 +++-- .../template_generator.py | 25 +- .../apps/registration_datahub/validators.py | 419 ++++++++++++++---- .../generate_dashboard_report_service.py | 250 ++++++++--- backend/hct_mis_api/apps/reporting/models.py | 66 ++- .../apps/sanction_list/tasks/load_xml.py | 156 +++++-- backend/hct_mis_api/apps/steficon/admin.py | 120 +++-- backend/hct_mis_api/apps/steficon/models.py | 46 +- backend/hct_mis_api/apps/steficon/result.py | 2 +- .../apps/steficon/templatetags/engine.py | 29 +- backend/hct_mis_api/apps/targeting/models.py | 199 +++++++-- backend/hct_mis_api/apps/utils/admin.py | 6 +- backend/hct_mis_api/settings/__init__.py | 2 - backend/hct_mis_api/settings/base.py | 81 +++- backend/hct_mis_api/settings/dev.py | 6 +- backend/hct_mis_api/settings/staging.py | 22 +- backend/hct_mis_api/settings/test.py | 40 +- backend/hct_mis_api/urls.py | 22 +- deployment/kobo/files/enketo/create_config.py | 69 +-- fabfile.py | 7 +- get_version.py | 2 +- update_version.py | 7 +- 56 files changed, 3527 insertions(+), 1165 deletions(-) diff --git a/backend/gunicorn_config.py b/backend/gunicorn_config.py index 78f009c8fc..57ac122df0 100644 --- a/backend/gunicorn_config.py +++ b/backend/gunicorn_config.py @@ -48,7 +48,7 @@ def worker_int(worker): import threading import traceback - id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) + id2name = {th.ident: th.name for th in threading.enumerate()} code = [] for threadId, stack in sys._current_frames().items(): code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId)) diff --git a/backend/hct_mis_api/apps/account/admin.py b/backend/hct_mis_api/apps/account/admin.py index 3b5bf707da..6735b12879 100644 --- a/backend/hct_mis_api/apps/account/admin.py +++ b/backend/hct_mis_api/apps/account/admin.py @@ -3,7 +3,6 @@ import re from collections import defaultdict, namedtuple from functools import cached_property -from import_export.widgets import ForeignKeyWidget, ManyToManyWidget from urllib.parse import unquote from django import forms @@ -18,11 +17,10 @@ from django.contrib.auth.admin import UserAdmin as BaseUserAdmin from django.contrib.auth.forms import UserCreationForm, UsernameField from django.contrib.auth.models import Group, Permission -from django.contrib.postgres.fields import JSONField from django.core.exceptions import ValidationError from django.core.mail import send_mail from django.db import router, transaction -from django.db.models import Q +from django.db.models import JSONField, Q from django.db.transaction import atomic from django.forms import EmailField, ModelChoiceField, MultipleChoiceField from django.forms.models import BaseInlineFormSet, ModelForm @@ -40,8 +38,9 @@ from adminfilters.autocomplete import AutoCompleteFilter from adminfilters.filters import AllValuesComboFilter from constance import config -from import_export import resources, fields +from import_export import fields, resources from import_export.admin import ImportExportModelAdmin +from import_export.widgets import ForeignKeyWidget, ManyToManyWidget from jsoneditor.forms import JSONEditor from requests import HTTPError from smart_admin.decorators import smart_register @@ -87,7 +86,9 @@ def clean(self): user = self.cleaned_data["user"] business_area = self.cleaned_data["business_area"] - account_models.IncompatibleRoles.objects.validate_user_role(user, business_area, role) + account_models.IncompatibleRoles.objects.validate_user_role( + user, business_area, role + ) class UserRoleInlineFormSet(BaseInlineFormSet): @@ -110,22 +111,28 @@ def clean(self): business_area = form.cleaned_data["business_area"] role = form.cleaned_data["role"] incompatible_roles = list( - account_models.IncompatibleRoles.objects.filter(role_one=role).values_list("role_two", flat=True) + account_models.IncompatibleRoles.objects.filter( + role_one=role + ).values_list("role_two", flat=True) ) + list( - account_models.IncompatibleRoles.objects.filter(role_two=role).values_list("role_one", flat=True) + account_models.IncompatibleRoles.objects.filter( + role_two=role + ).values_list("role_one", flat=True) ) error_forms = [ form_two.cleaned_data["role"].name for form_two in self.forms if form_two.cleaned_data - and not form_two.cleaned_data.get("DELETE") - and form_two.cleaned_data["business_area"] == business_area - and form_two.cleaned_data["role"].id in incompatible_roles + and not form_two.cleaned_data.get("DELETE") + and form_two.cleaned_data["business_area"] == business_area + and form_two.cleaned_data["role"].id in incompatible_roles ] if error_forms: if "role" not in form._errors: form._errors["role"] = ErrorList() - form._errors["role"].append(_(f"{role.name} is incompatible with {', '.join(error_forms)}.")) + form._errors["role"].append( + _(f"{role.name} is incompatible with {', '.join(error_forms)}.") + ) class UserRoleInline(admin.TabularInline): @@ -148,11 +155,13 @@ class UserRoleInline(admin.TabularInline): def get_valid_kobo_username(user: User): - return user.username.replace("@", "_at_").replace(".", "_").replace("+", "_").lower() + return ( + user.username.replace("@", "_at_").replace(".", "_").replace("+", "_").lower() + ) class DjAdminManager: - regex = re.compile('class="errorlist"><li>(.*)(?=<\/li>)') + regex = re.compile(r'class="errorlist"><li>(.*)(?=<\/li>)') class ResponseException(Exception): pass @@ -183,7 +192,11 @@ def assert_response(self, status: [int], location: str = None, custom_error=""): self._last_error = self._last_response raise self.ResponseException(msg) - if location and (redir_to := self._last_response.headers.get("location", "N/A")) != location: + if ( + location + and (redir_to := self._last_response.headers.get("location", "N/A")) + != location + ): msg = f"Unexpected redirect:{redir_to} <> {location}: {custom_error}" self._last_error = self._last_response raise self.ResponseException(msg) @@ -257,8 +270,7 @@ def list_users(self, q=""): if matches[0] == last_match: break last_match = matches[0] - for m in matches: - yield m + yield from matches page += 1 @@ -272,10 +284,16 @@ def get_csrfmiddlewaretoken(self): def delete_user(self, username, pk): self.login() - for url in [f"{self.admin_url_kc}auth/user/{pk}/delete/", f"{self.admin_url}auth/user/{pk}/delete/"]: + for url in [ + f"{self.admin_url_kc}auth/user/{pk}/delete/", + f"{self.admin_url}auth/user/{pk}/delete/", + ]: self._get(url) self.assert_response([200, 404, 302], custom_error=url) - if self._last_response.status_code == 302 and "/login/" in self._last_response.headers["Location"]: + if ( + self._last_response.status_code == 302 + and "/login/" in self._last_response.headers["Location"] + ): raise Exception(f"Cannot access to {url}") if self._last_response.status_code == 200: @@ -298,7 +316,9 @@ def queryset(self, request, queryset): Q(custom_fields__kobo_pk__isnull=True) | Q(custom_fields__kobo_pk=None), ) elif self.value() == "1": - return queryset.filter(custom_fields__kobo_pk__isnull=False).exclude(custom_fields__kobo_pk=None) + return queryset.filter(custom_fields__kobo_pk__isnull=False).exclude( + custom_fields__kobo_pk=None + ) return queryset @@ -308,7 +328,11 @@ class BusinessAreaFilter(SimpleListFilter): template = "adminfilters/combobox.html" def lookups(self, request, model_admin): - return BusinessArea.objects.filter(user_roles__isnull=False).values_list("id", "name").distinct() + return ( + BusinessArea.objects.filter(user_roles__isnull=False) + .values_list("id", "name") + .distinct() + ) def queryset(self, request, queryset): if self.value(): @@ -433,7 +457,9 @@ def kobo_user(self, obj): return obj.custom_fields.get("kobo_username") def get_deleted_objects(self, objs, request): - to_delete, model_count, perms_needed, protected = super().get_deleted_objects(objs, request) + to_delete, model_count, perms_needed, protected = super().get_deleted_objects( + objs, request + ) user = objs[0] kobo_pk = user.custom_fields.get("kobo_pk", None) kobo_username = user.custom_fields.get("kobo_username", None) @@ -453,7 +479,11 @@ def delete_view(self, request, object_id, extra_context=None): api = DjAdminManager() api.login(request) extra_context["kobo_pk"] = kobo_pk - self.message_user(request, "This action will also delete linked Kobo account", messages.WARNING) + self.message_user( + request, + "This action will also delete linked Kobo account", + messages.WARNING, + ) except Exception as e: extra_context["kobo_failed"] = True self.message_user(request, str(e), messages.ERROR) @@ -466,7 +496,9 @@ def delete_model(self, request, obj): if "kobo_username" in obj.custom_fields: api = DjAdminManager() api.login(request) - api.delete_user(obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"]) + api.delete_user( + obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"] + ) super().delete_model(request, obj) except Exception as e: logger.exception(e) @@ -484,7 +516,9 @@ def privileges(self, request, pk): for role in user.user_roles.all(): ba_roles[role.business_area.slug].append(role.role) - for role in user.user_roles.values_list("business_area__slug", flat=True).distinct("business_area"): + for role in user.user_roles.values_list( + "business_area__slug", flat=True + ).distinct("business_area"): ba_perms[role].extend(user.permissions_in_business_area(role)) context["business_ares_permissions"] = dict(ba_perms) @@ -492,7 +526,7 @@ def privileges(self, request, pk): return TemplateResponse(request, "admin/account/user/privileges.html", context) def get_actions(self, request): - actions = super(UserAdmin, self).get_actions(request) + actions = super().get_actions(request) if not request.user.has_perm("account.can_create_kobo_user"): if "create_kobo_user_qs" in actions: del actions["create_kobo_user_qs"] @@ -516,21 +550,31 @@ def add_business_area_role(self, request, queryset): for role in roles: if crud == "ADD": try: - IncompatibleRoles.objects.validate_user_role(u, ba, role) - ur, is_new = u.user_roles.get_or_create(business_area=ba, role=role) + IncompatibleRoles.objects.validate_user_role( + u, ba, role + ) + ur, is_new = u.user_roles.get_or_create( + business_area=ba, role=role + ) if is_new: added += 1 self.log_addition(request, ur, "Role added") except ValidationError as e: self.message_user(request, str(e), messages.ERROR) elif crud == "REMOVE": - to_delete = u.user_roles.filter(business_area=ba, role=role).first() + to_delete = u.user_roles.filter( + business_area=ba, role=role + ).first() if to_delete: removed += 1 - self.log_deletion(request, to_delete, str(to_delete)) + self.log_deletion( + request, to_delete, str(to_delete) + ) to_delete.delete() else: - raise ValueError("Bug found. {} not valid operation for add/rem role") + raise ValueError( + "Bug found. {} not valid operation for add/rem role" + ) if removed: msg = f"{removed} roles removed from {users} users" elif added: @@ -543,12 +587,14 @@ def add_business_area_role(self, request, queryset): else: ctx = self.get_common_context(request, title="Add Role", selection=queryset) ctx["form"] = AddRoleForm() - return render(request, "admin/account/user/business_area_role.html", context=ctx) + return render( + request, "admin/account/user/business_area_role.html", context=ctx + ) add_business_area_role.short_description = "Add/Remove Business Area roles" def _grant_kobo_accesss_to_user(self, user, notify=True, sync=True): - password = get_random_string() + password = get_random_string(length=12) url = f"{settings.KOBO_KF_URL}/authorized_application/users/" username = get_valid_kobo_username(user) res = requests.post( @@ -578,7 +624,9 @@ def _grant_kobo_accesss_to_user(self, user, notify=True, sync=True): if res.status_code == 201 and notify: send_mail( "Kobo credentials", - KOBO_ACCESS_EMAIL.format(email=user.email, password=password, kobo_url=settings.KOBO_KF_URL), + KOBO_ACCESS_EMAIL.format( + email=user.email, password=password, kobo_url=settings.KOBO_KF_URL + ), settings.DEFAULT_FROM_EMAIL, [user.email], ) @@ -591,35 +639,60 @@ def create_kobo_user_qs(self, request, queryset): self._grant_kobo_accesss_to_user(request, user) except Exception as e: logger.exception(e) - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) - self.message_user(request, f"User successfully `{user.username}` created on Kobo", messages.SUCCESS) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) + self.message_user( + request, + f"User successfully `{user.username}` created on Kobo", + messages.SUCCESS, + ) - @button(permission="account.can_create_kobo_user", visible=lambda o, r: not o.custom_fields.get("kobo_username")) + @button( + permission="account.can_create_kobo_user", + visible=lambda o, r: not o.custom_fields.get("kobo_username"), + ) def create_kobo_user(self, request, pk): try: self._grant_kobo_accesss_to_user(self.get_queryset(request).get(pk=pk)) - self.message_user(request, f"Granted access to {settings.KOBO_KF_URL}", messages.SUCCESS) + self.message_user( + request, f"Granted access to {settings.KOBO_KF_URL}", messages.SUCCESS + ) except Exception as e: logger.exception(e) - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) - @button(permission="account.can_create_kobo_user", visible=lambda o, r: o.custom_fields.get("kobo_username")) + @button( + permission="account.can_create_kobo_user", + visible=lambda o, r: o.custom_fields.get("kobo_username"), + ) def remove_kobo_access(self, request, pk): try: obj = self.get_object(request, pk) api = DjAdminManager() - api.delete_user(obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"]) + api.delete_user( + obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"] + ) obj.custom_fields["kobo_username"] = None obj.custom_fields["kobo_pk"] = None obj.save() - self.message_user(request, f"Kobo Access removed from {settings.KOBO_KF_URL}", messages.WARNING) + self.message_user( + request, + f"Kobo Access removed from {settings.KOBO_KF_URL}", + messages.WARNING, + ) except Exception as e: logger.exception(e) - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) @button(label="Import CSV", permission="account.can_upload_to_kobo") def import_csv(self, request): from django.contrib.admin.helpers import AdminForm + context = self.get_common_context(request, processed=False) if request.method == "GET": form = ImportCSVForm(initial={"partner": Partner.objects.first()}) @@ -636,7 +709,10 @@ def import_csv(self, request): role = form.cleaned_data["role"] if csv_file.multiple_chunks(): - raise Exception("Uploaded file is too big (%.2f MB)" % (csv_file.size(1000 * 1000))) + raise Exception( + "Uploaded file is too big (%.2f MB)" + % (csv_file.size(1000 * 1000)) + ) data_set = csv_file.read().decode("utf-8-sig").splitlines() reader = csv.DictReader( data_set, @@ -654,27 +730,55 @@ def import_csv(self, request): try: email = row["email"].strip() except Exception as e: - raise Exception(f"{e.__class__.__name__}: {e} on `{row}`") + raise Exception( + f"{e.__class__.__name__}: {e} on `{row}`" + ) - user_info = {"email": email, "is_new": False, "kobo": False, "error": ""} + user_info = { + "email": email, + "is_new": False, + "kobo": False, + "error": "", + } if "username" in row: username = row["username"].strip() else: - username = row["email"].replace("@", "_").replace(".", "_").lower() + username = ( + row["email"] + .replace("@", "_") + .replace(".", "_") + .lower() + ) u, isnew = account_models.User.objects.get_or_create( - email=email, partner=partner, defaults={"username": username} + email=email, + partner=partner, + defaults={"username": username}, ) if isnew: - ur = u.user_roles.create(business_area=business_area, role=role) - self.log_addition(request, u, "User imported by CSV") + ur = u.user_roles.create( + business_area=business_area, role=role + ) + self.log_addition( + request, u, "User imported by CSV" + ) self.log_addition(request, ur, "User Role added") else: # check role validity try: - IncompatibleRoles.objects.validate_user_role(u, business_area, role) - u.user_roles.get_or_create(business_area=business_area, role=role) - self.log_addition(request, ur, "User Role added") + IncompatibleRoles.objects.validate_user_role( + u, business_area, role + ) + u.user_roles.get_or_create( + business_area=business_area, role=role + ) + self.log_addition( + request, ur, "User Role added" + ) except ValidationError as e: - self.message_user(request, f"Error on {u}: {e}", messages.ERROR) + self.message_user( + request, + f"Error on {u}: {e}", + messages.ERROR, + ) if enable_kobo: self._grant_kobo_accesss_to_user(u, sync=False) @@ -686,9 +790,13 @@ def import_csv(self, request): logger.exception(e) context["form"] = form context["errors"] = [str(e)] - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) else: - self.message_user(request, "Please correct errors below", messages.ERROR) + self.message_user( + request, "Please correct errors below", messages.ERROR + ) context["form"] = form fs = form._fieldsets or [(None, {"fields": form.base_fields})] context["adminform"] = AdminForm(form, fieldsets=fs, prepopulated_fields={}) @@ -709,7 +817,10 @@ def kobo_users_sync(self, request): email=entry[2], defaults={ "username": entry[1], - "custom_fields": {"kobo_pk": entry[0], "kobo_username": entry[1]}, + "custom_fields": { + "kobo_pk": entry[0], + "kobo_username": entry[1], + }, }, ) local.custom_fields["kobo_pk"] = entry[0] @@ -765,9 +876,17 @@ def sync_multi(self, request): except Http404: not_found.append(str(user)) if not_found: - self.message_user(request, f"These users were not found: {', '.join(not_found)}", messages.WARNING) + self.message_user( + request, + f"These users were not found: {', '.join(not_found)}", + messages.WARNING, + ) else: - self.message_user(request, "Active Directory data successfully fetched", messages.SUCCESS) + self.message_user( + request, + "Active Directory data successfully fetched", + messages.SUCCESS, + ) except Exception as e: logger.exception(e) self.message_user(request, str(e), messages.ERROR) @@ -776,7 +895,9 @@ def sync_multi(self, request): def sync_single(self, request, pk): try: self._sync_ad_data(self.get_object(request, pk)) - self.message_user(request, "Active Directory data successfully fetched", messages.SUCCESS) + self.message_user( + request, "Active Directory data successfully fetched", messages.SUCCESS + ) except Exception as e: logger.exception(e) self.message_user(request, str(e), messages.ERROR) @@ -805,7 +926,11 @@ def load_ad_users(self, request): business_area = form.cleaned_data["business_area"] users_to_bulk_create = [] users_role_to_bulk_create = [] - existing = set(account_models.User.objects.filter(email__in=emails).values_list("email", flat=True)) + existing = set( + account_models.User.objects.filter(email__in=emails).values_list( + "email", flat=True + ) + ) results = self.Results([], [], [], []) try: ms_graph = MicrosoftGraphAPI() @@ -817,7 +942,9 @@ def load_ad_users(self, request): results.updated.append(user) else: user_data = ms_graph.get_user_data(email=email) - user_args = build_arg_dict_from_dict(user_data, DJANGO_USER_MAP) + user_args = build_arg_dict_from_dict( + user_data, DJANGO_USER_MAP + ) user = account_models.User(**user_args) if user.first_name is None: user.first_name = "" @@ -828,18 +955,26 @@ def load_ad_users(self, request): user.job_title = job_title user.set_unusable_password() users_to_bulk_create.append(user) - global_business_area = BusinessArea.objects.filter(slug="global").first() - basic_role = account_models.Role.objects.filter(name="Basic User").first() + global_business_area = BusinessArea.objects.filter( + slug="global" + ).first() + basic_role = account_models.Role.objects.filter( + name="Basic User" + ).first() if global_business_area and basic_role: users_role_to_bulk_create.append( account_models.UserRole( - business_area=global_business_area, user=user, role=basic_role + business_area=global_business_area, + user=user, + role=basic_role, ) ) results.created.append(user) users_role_to_bulk_create.append( - account_models.UserRole(role=role, business_area=business_area, user=user) + account_models.UserRole( + role=role, business_area=business_area, user=user + ) ) except HTTPError as e: if e.response.status_code != 404: @@ -848,7 +983,9 @@ def load_ad_users(self, request): except Http404: results.missing.append(email) account_models.User.objects.bulk_create(users_to_bulk_create) - account_models.UserRole.objects.bulk_create(users_role_to_bulk_create, ignore_conflicts=True) + account_models.UserRole.objects.bulk_create( + users_role_to_bulk_create, ignore_conflicts=True + ) ctx["results"] = results return TemplateResponse(request, "admin/load_users.html", ctx) except Exception as e: @@ -877,7 +1014,7 @@ def queryset(self, request, queryset): class RoleResource(resources.ModelResource): class Meta: model = account_models.Role - fields = ('name', 'subsystem', 'permissions') + fields = ("name", "subsystem", "permissions") import_id_fields = ("name", "subsystem") @@ -900,7 +1037,7 @@ def matrix(self, request): ctx = self.get_common_context(request, action="Matrix") matrix1 = {} matrix2 = {} - perms = sorted([str(x.value) for x in Permissions]) + perms = sorted(str(x.value) for x in Permissions) roles = account_models.Role.objects.order_by("name").filter(subsystem="HOPE") for perm in perms: granted_to_roles = [] @@ -987,14 +1124,14 @@ class IncompatibleRolesAdmin(HOPEModelAdminBase): class GroupResource(resources.ModelResource): - permissions = fields.Field(widget=ManyToManyWidget(Permission, - field='codename'), - attribute='permissions') + permissions = fields.Field( + widget=ManyToManyWidget(Permission, field="codename"), attribute="permissions" + ) class Meta: model = Group - fields = ('name', 'permissions') - import_id_fields = ("name", ) + fields = ("name", "permissions") + import_id_fields = ("name",) @smart_register(Group) @@ -1009,7 +1146,11 @@ def import_fixture(self, request): return _import_fixture(self, request) def _perms(self, request, object_id) -> set: - return set(self.get_object(request, object_id).permissions.values_list("codename", flat=True)) + return set( + self.get_object(request, object_id).permissions.values_list( + "codename", flat=True + ) + ) @button() def users(self, request, pk): diff --git a/backend/hct_mis_api/apps/account/models.py b/backend/hct_mis_api/apps/account/models.py index c1484a6622..a9f16472c4 100644 --- a/backend/hct_mis_api/apps/account/models.py +++ b/backend/hct_mis_api/apps/account/models.py @@ -4,7 +4,7 @@ from django import forms from django.contrib.auth import get_user_model from django.contrib.auth.models import AbstractUser -from django.contrib.postgres.fields import ArrayField, CICharField, JSONField +from django.contrib.postgres.fields import ArrayField, CICharField from django.core.exceptions import ValidationError from django.core.validators import ( MaxLengthValidator, @@ -12,10 +12,11 @@ ProhibitNullCharactersValidator, ) from django.db import models +from django.db.models import JSONField from django.db.models.signals import post_save, pre_delete, pre_save from django.dispatch import receiver from django.utils import timezone -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from model_utils import Choices from model_utils.models import UUIDModel @@ -54,9 +55,13 @@ def get_partners_as_choices(cls): class User(AbstractUser, UUIDModel): - status = models.CharField(choices=USER_STATUS_CHOICES, max_length=10, default=INVITED) + status = models.CharField( + choices=USER_STATUS_CHOICES, max_length=10, default=INVITED + ) # org = models.CharField(choices=USER_PARTNER_CHOICES, max_length=10, default=USER_PARTNER_CHOICES.UNICEF) - partner = models.ForeignKey(Partner, on_delete=models.PROTECT, null=True, blank=True) + partner = models.ForeignKey( + Partner, on_delete=models.PROTECT, null=True, blank=True + ) email = models.EmailField(_("email address"), blank=True, unique=True) available_for_export = models.BooleanField( default=True, help_text="Indicating if a User can be exported to CashAssist" @@ -64,7 +69,9 @@ class User(AbstractUser, UUIDModel): custom_fields = JSONField(default=dict, blank=True) job_title = models.CharField(max_length=255, blank=True) - ad_uuid = models.CharField(max_length=64, unique=True, null=True, blank=True, editable=False) + ad_uuid = models.CharField( + max_length=64, unique=True, null=True, blank=True, editable=False + ) # CashAssist DOAP fields last_modify_date = models.DateTimeField(auto_now=True, null=True, blank=True) @@ -72,7 +79,9 @@ class User(AbstractUser, UUIDModel): default=None, null=True, blank=True, help_text="Timestamp of last sync with CA" ) doap_hash = models.TextField( - editable=False, default="", help_text="System field used to check if changes need to be sent to CA" + editable=False, + default="", + help_text="System field used to check if changes need to be sent to CA", ) def __str__(self): @@ -89,17 +98,22 @@ def save(self, *args, **kwargs): def permissions_in_business_area(self, business_area_slug): all_roles_permissions_list = list( - Role.objects.filter(user_roles__user=self, user_roles__business_area__slug=business_area_slug).values_list( - "permissions", flat=True - ) + Role.objects.filter( + user_roles__user=self, + user_roles__business_area__slug=business_area_slug, + ).values_list("permissions", flat=True) ) return [ - permission for roles_permissions in all_roles_permissions_list for permission in roles_permissions or [] + permission + for roles_permissions in all_roles_permissions_list + for permission in roles_permissions or [] ] def has_permission(self, permission, business_area, write=False): query = Role.objects.filter( - permissions__contains=[permission], user_roles__user=self, user_roles__business_area=business_area + permissions__contains=[permission], + user_roles__user=self, + user_roles__business_area=business_area, ) return query.count() > 0 @@ -126,9 +140,15 @@ def formfield(self, **kwargs): class UserRole(TimeStampedUUIDModel): - business_area = models.ForeignKey("core.BusinessArea", related_name="user_roles", on_delete=models.CASCADE) - user = models.ForeignKey("account.User", related_name="user_roles", on_delete=models.CASCADE) - role = models.ForeignKey("account.Role", related_name="user_roles", on_delete=models.CASCADE) + business_area = models.ForeignKey( + "core.BusinessArea", related_name="user_roles", on_delete=models.CASCADE + ) + user = models.ForeignKey( + "account.User", related_name="user_roles", on_delete=models.CASCADE + ) + role = models.ForeignKey( + "account.Role", related_name="user_roles", on_delete=models.CASCADE + ) class Meta: unique_together = ("business_area", "user", "role") @@ -159,7 +179,9 @@ class Role(TimeStampedUUIDModel): ) subsystem = models.CharField(choices=SUBSYSTEMS, max_length=30, default=HOPE) permissions = ChoiceArrayField( - models.CharField(choices=Permissions.choices(), max_length=255), null=True, blank=True + models.CharField(choices=Permissions.choices(), max_length=255), + null=True, + blank=True, ) def clean(self): @@ -203,14 +225,22 @@ def post_save_user(sender, instance, created, *args, **kwargs): business_area = BusinessArea.objects.filter(slug="global").first() role = Role.objects.filter(name="Basic User").first() if business_area and role: - UserRole.objects.get_or_create(business_area=business_area, user=instance, role=role) + UserRole.objects.get_or_create( + business_area=business_area, user=instance, role=role + ) class IncompatibleRolesManager(models.Manager): def validate_user_role(self, user, business_area, role): incompatible_roles = list( - IncompatibleRoles.objects.filter(role_one=role).values_list("role_two", flat=True) - ) + list(IncompatibleRoles.objects.filter(role_two=role).values_list("role_one", flat=True)) + IncompatibleRoles.objects.filter(role_one=role).values_list( + "role_two", flat=True + ) + ) + list( + IncompatibleRoles.objects.filter(role_two=role).values_list( + "role_one", flat=True + ) + ) incompatible_userroles = UserRole.objects.filter( business_area=business_area, role__id__in=incompatible_roles, @@ -234,8 +264,12 @@ class IncompatibleRoles(TimeStampedUUIDModel): user cannot be assigned both of the roles in the same business area at the same time """ - role_one = models.ForeignKey("account.Role", related_name="incompatible_roles_one", on_delete=models.CASCADE) - role_two = models.ForeignKey("account.Role", related_name="incompatible_roles_two", on_delete=models.CASCADE) + role_one = models.ForeignKey( + "account.Role", related_name="incompatible_roles_one", on_delete=models.CASCADE + ) + role_two = models.ForeignKey( + "account.Role", related_name="incompatible_roles_two", on_delete=models.CASCADE + ) objects = IncompatibleRolesManager() @@ -254,10 +288,15 @@ def clean(self): raise ValidationError(_("Choose two different roles.")) failing_users = set() - for role_pair in [(self.role_one, self.role_two), (self.role_two, self.role_one)]: + for role_pair in [ + (self.role_one, self.role_two), + (self.role_two, self.role_one), + ]: for userrole in UserRole.objects.filter(role=role_pair[0]): if UserRole.objects.filter( - user=userrole.user, business_area=userrole.business_area, role=role_pair[1] + user=userrole.user, + business_area=userrole.business_area, + role=role_pair[1], ).exists(): failing_users.add(userrole.user.email) @@ -277,8 +316,12 @@ def validate_unique(self, *args, **kwargs): super().validate_unique(*args, **kwargs) # unique_together will take care of unique couples only if order is the same # since it doesn't matter if role is one or two, we need to check for reverse uniqueness as well - if IncompatibleRoles.objects.filter(role_one=self.role_two, role_two=self.role_one).exists(): + if IncompatibleRoles.objects.filter( + role_one=self.role_two, role_two=self.role_one + ).exists(): logger.error( f"This combination of roles ({self.role_one}, {self.role_two}) already exists as incompatible pair." ) - raise ValidationError(_("This combination of roles already exists as incompatible pair.")) + raise ValidationError( + _("This combination of roles already exists as incompatible pair.") + ) diff --git a/backend/hct_mis_api/apps/account/permissions.py b/backend/hct_mis_api/apps/account/permissions.py index d7c7310725..e2752318f1 100644 --- a/backend/hct_mis_api/apps/account/permissions.py +++ b/backend/hct_mis_api/apps/account/permissions.py @@ -197,11 +197,14 @@ def has_permission(cls, info, **kwargs): else: if business_area_arg is None: return False - business_area = BusinessArea.objects.filter(slug=business_area_arg).first() + business_area = BusinessArea.objects.filter( + slug=business_area_arg + ).first() if business_area is None: return False - return info.context.user.is_authenticated and info.context.user.has_permission( - permission.name, business_area + return ( + info.context.user.is_authenticated + and info.context.user.has_permission(permission.name, business_area) ) return XDPerm @@ -218,7 +221,9 @@ def has_permission(cls, info, **kwargs): else: if business_area_arg is None: return False - business_area = BusinessArea.objects.filter(slug=business_area_arg).first() + business_area = BusinessArea.objects.filter( + slug=business_area_arg + ).first() if business_area is None: return False for permission in permissions: @@ -235,7 +240,10 @@ class BaseNodePermissionMixin: @classmethod def check_node_permission(cls, info, object_instance): business_area = object_instance.business_area - if not any((perm.has_permission(info, business_area=business_area) for perm in cls.permission_classes)): + if not any( + perm.has_permission(info, business_area=business_area) + for perm in cls.permission_classes + ): logger.error("Permission Denied") raise GraphQLError("Permission Denied") @@ -288,7 +296,7 @@ def __init__( self._extra_filter_meta = extra_filter_meta self._base_args = None self.permission_classes = permission_classes - super(DjangoPermissionFilterConnectionField, self).__init__(type, *args, **kwargs) + super().__init__(type, *args, **kwargs) @property def args(self): @@ -306,7 +314,9 @@ def filterset_class(self): if self._extra_filter_meta: meta.update(self._extra_filter_meta) - filterset_class = self._provided_filterset_class or (self.node_type._meta.filterset_class) + filterset_class = self._provided_filterset_class or ( + self.node_type._meta.filterset_class + ) self._filterset_class = get_filterset_class(filterset_class, **meta) return self._filterset_class @@ -316,16 +326,29 @@ def filtering_args(self): return get_filtering_args_from_filterset(self.filterset_class, self.node_type) @classmethod - def resolve_queryset(cls, connection, iterable, info, args, filtering_args, filterset_class, permission_classes): + def resolve_queryset( + cls, + connection, + iterable, + info, + args, + filtering_args, + filterset_class, + permission_classes, + ): filter_kwargs = {k: v for k, v in args.items() if k in filtering_args} - if not any((perm.has_permission(info, **filter_kwargs) for perm in permission_classes)): + if not any( + perm.has_permission(info, **filter_kwargs) for perm in permission_classes + ): logger.error("Permission Denied") raise GraphQLError("Permission Denied") if "permissions" in filtering_args: - filter_kwargs["permissions"] = info.context.user.permissions_in_business_area( + filter_kwargs[ + "permissions" + ] = info.context.user.permissions_in_business_area( filter_kwargs.get("business_area") ) - qs = super(DjangoPermissionFilterConnectionField, cls).resolve_queryset(connection, iterable, info, args) + qs = super().resolve_queryset(connection, iterable, info, args) return filterset_class(data=filter_kwargs, queryset=qs, request=info.context).qs def get_queryset_resolver(self): @@ -384,8 +407,16 @@ def has_creator_or_owner_permission( cls.is_authenticated(info) if not ( cls.has_permission(info, general_permission, business_area_arg, False) - or (is_creator and cls.has_permission(info, creator_permission, business_area_arg, False)) - or (is_owner and cls.has_permission(info, owner_permission, business_area_arg, False)) + or ( + is_creator + and cls.has_permission( + info, creator_permission, business_area_arg, False + ) + ) + or ( + is_owner + and cls.has_permission(info, owner_permission, business_area_arg, False) + ) ): return cls.raise_permission_denied_error(raise_error=raise_error) return True @@ -399,7 +430,9 @@ def raise_permission_denied_error(not_authenticated=False, raise_error=True): raise PermissionDenied("Permission Denied: User is not authenticated.") else: logger.error("Permission Denied: User does not have correct permission.") - raise PermissionDenied("Permission Denied: User does not have correct permission.") + raise PermissionDenied( + "Permission Denied: User does not have correct permission." + ) class PermissionMutation(BaseMutationPermissionMixin, Mutation): diff --git a/backend/hct_mis_api/apps/account/schema.py b/backend/hct_mis_api/apps/account/schema.py index 3520ac8ee6..eb4736af98 100644 --- a/backend/hct_mis_api/apps/account/schema.py +++ b/backend/hct_mis_api/apps/account/schema.py @@ -6,7 +6,7 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db.models import Q from django.db.models.functions import Lower -from django.utils.encoding import force_text +from django.utils.encoding import force_str from django.utils.functional import Promise import graphene @@ -49,8 +49,12 @@ class UsersFilter(FilterSet): business_area = CharFilter(required=True, method="business_area_filter") search = CharFilter(method="search_filter") status = MultipleChoiceFilter(field_name="status", choices=USER_STATUS_CHOICES) - partner = MultipleChoiceFilter(choices=Partner.get_partners_as_choices(), method="partners_filter") - roles = MultipleChoiceFilter(choices=Role.get_roles_as_choices(), method="roles_filter") + partner = MultipleChoiceFilter( + choices=Partner.get_partners_as_choices(), method="partners_filter" + ) + roles = MultipleChoiceFilter( + choices=Role.get_roles_as_choices(), method="roles_filter" + ) class Meta: model = get_user_model() @@ -62,7 +66,14 @@ class Meta: } order_by = CustomOrderingFilter( - fields=(Lower("first_name"), Lower("last_name"), "last_login", "status", "partner", "email") + fields=( + Lower("first_name"), + Lower("last_name"), + "last_login", + "status", + "partner", + "email", + ) ) def search_filter(self, qs, name, value): @@ -87,7 +98,10 @@ def roles_filter(self, qs, name, values): business_area_slug = self.data.get("business_area") q_obj = Q() for value in values: - q_obj |= Q(user_roles__role__id=value, user_roles__business_area__slug=business_area_slug) + q_obj |= Q( + user_roles__role__id=value, + user_roles__business_area__slug=business_area_slug, + ) return qs.filter(q_obj) @@ -107,7 +121,9 @@ class UserBusinessAreaNode(DjangoObjectType): permissions = graphene.List(graphene.String) def resolve_permissions(self, info): - user_roles = UserRole.objects.filter(user=info.context.user, business_area_id=self.id) + user_roles = UserRole.objects.filter( + user=info.context.user, business_area_id=self.id + ) return permissions_resolver(user_roles) class Meta: @@ -150,8 +166,8 @@ class Meta: class LazyEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Promise): - return force_text(obj) - return super(LazyEncoder, self).default(obj) + return force_str(obj) + return super().default(obj) class JSONLazyString(graphene.Scalar): @@ -182,14 +198,18 @@ class Query(graphene.ObjectType): UserNode, filterset_class=UsersFilter, permission_classes=( - hopeOneOfPermissionClass(Permissions.USER_MANAGEMENT_VIEW_LIST, *ALL_GRIEVANCES_CREATE_MODIFY), + hopeOneOfPermissionClass( + Permissions.USER_MANAGEMENT_VIEW_LIST, *ALL_GRIEVANCES_CREATE_MODIFY + ), ), ) # all_log_entries = graphene.ConnectionField(LogEntryObjectConnection, object_id=graphene.String(required=False)) user_roles_choices = graphene.List(ChoiceObject) user_status_choices = graphene.List(ChoiceObject) user_partner_choices = graphene.List(ChoiceObject) - has_available_users_to_export = graphene.Boolean(business_area_slug=graphene.String(required=True)) + has_available_users_to_export = graphene.Boolean( + business_area_slug=graphene.String(required=True) + ) # def resolve_all_log_entries(self, info, **kwargs): # object_id = kwargs.get('object_id') @@ -221,6 +241,10 @@ def resolve_has_available_users_to_export(self, info, business_area_slug, **kwar return ( get_user_model() .objects.prefetch_related("user_roles") - .filter(available_for_export=True, is_superuser=False, user_roles__business_area__slug=business_area_slug) + .filter( + available_for_export=True, + is_superuser=False, + user_roles__business_area__slug=business_area_slug, + ) .exists() ) diff --git a/backend/hct_mis_api/apps/activity_log/models.py b/backend/hct_mis_api/apps/activity_log/models.py index d04a330944..9b7dfa8db2 100644 --- a/backend/hct_mis_api/apps/activity_log/models.py +++ b/backend/hct_mis_api/apps/activity_log/models.py @@ -1,16 +1,19 @@ from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType -from django.contrib.postgres.fields import JSONField -from django.utils.translation import ugettext_lazy as _ from django.db import models +from django.db.models import JSONField +from django.utils.translation import gettext_lazy as _ + from model_utils.fields import UUIDField from hct_mis_api.apps.activity_log.utils import create_diff from hct_mis_api.apps.core.utils import nested_getattr -def log_create(mapping, business_area_field, user=None, old_object=None, new_object=None): +def log_create( + mapping, business_area_field, user=None, old_object=None, new_object=None +): if new_object: instance = new_object else: @@ -53,11 +56,20 @@ class LogEntry(models.Model): ) content_type = models.ForeignKey( - ContentType, on_delete=models.SET_NULL, null=True, related_name="log_entries", db_index=True + ContentType, + on_delete=models.SET_NULL, + null=True, + related_name="log_entries", + db_index=True, ) object_id = models.UUIDField(null=True, db_index=True) content_object = GenericForeignKey("content_type", "object_id") - action = models.CharField(choices=LOG_ENTRY_ACTION_CHOICES, max_length=100, verbose_name=_("action"), db_index=True) + action = models.CharField( + choices=LOG_ENTRY_ACTION_CHOICES, + max_length=100, + verbose_name=_("action"), + db_index=True, + ) object_repr = models.TextField(blank=True) changes = JSONField(null=True, verbose_name=_("change message")) user = models.ForeignKey( @@ -68,9 +80,13 @@ class LogEntry(models.Model): related_name="logs", verbose_name=_("actor"), ) - business_area = models.ForeignKey("core.BusinessArea", on_delete=models.SET_NULL, null=True) + business_area = models.ForeignKey( + "core.BusinessArea", on_delete=models.SET_NULL, null=True + ) - timestamp = models.DateTimeField(auto_now_add=True, verbose_name=_("timestamp"), db_index=True) + timestamp = models.DateTimeField( + auto_now_add=True, verbose_name=_("timestamp"), db_index=True + ) class Meta: get_latest_by = "timestamp" diff --git a/backend/hct_mis_api/apps/cash_assist_datahub/models.py b/backend/hct_mis_api/apps/cash_assist_datahub/models.py index 1aaa4d17cf..78f3302ba6 100644 --- a/backend/hct_mis_api/apps/cash_assist_datahub/models.py +++ b/backend/hct_mis_api/apps/cash_assist_datahub/models.py @@ -2,7 +2,7 @@ from django.core.validators import MinValueValidator from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from hct_mis_api.apps.payment.models import PaymentRecord as InternalPaymentRecord from hct_mis_api.apps.utils.models import AbstractSession diff --git a/backend/hct_mis_api/apps/core/admin.py b/backend/hct_mis_api/apps/core/admin.py index c5db92b054..900fb9b357 100644 --- a/backend/hct_mis_api/apps/core/admin.py +++ b/backend/hct_mis_api/apps/core/admin.py @@ -9,11 +9,11 @@ from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.messages import ERROR from django.contrib.postgres.aggregates import ArrayAgg -from django.contrib.postgres.fields import JSONField from django.core.exceptions import PermissionDenied, ValidationError from django.core.mail import EmailMessage from django.core.validators import RegexValidator from django.db import transaction +from django.db.models import JSONField from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404, redirect from django.template.defaultfilters import slugify @@ -72,7 +72,9 @@ class TestRapidproForm(forms.Form): label="Phone number", required=True, ) - flow_name = forms.CharField(label="Name of the test flow", initial="Test", required=True) + flow_name = forms.CharField( + label="Name of the test flow", initial="Test", required=True + ) class BusinessOfficeCodeValidator(RegexValidator): @@ -116,8 +118,11 @@ class GroupConcat(Aggregate): template = "%(function)s(%(distinct)s%(expressions)s)" def __init__(self, expression, distinct=False, **extra): - super(GroupConcat, self).__init__( - expression, distinct="DISTINCT " if distinct else "", output_field=CharField(), **extra + super().__init__( + expression, + distinct="DISTINCT " if distinct else "", + output_field=CharField(), + **extra, ) @@ -131,7 +136,12 @@ class BusinessAreaAdmin(ExtraUrlMixin, admin.ModelAdmin): "region_code", ) search_fields = ("name", "slug") - list_filter = ("has_data_sharing_agreement", "region_name", BusinessofficeFilter, "is_split") + list_filter = ( + "has_data_sharing_agreement", + "region_name", + BusinessofficeFilter, + "is_split", + ) readonly_fields = ("parent", "is_split") filter_horizontal = ("countries",) # formfield_overrides = { @@ -174,7 +184,7 @@ def split_business_area(self, request, pk): preserved_filters = self.get_preserved_filters(request) redirect_url = reverse( - "admin:%s_%s_change" % (opts.app_label, opts.model_name), + f"admin:{opts.app_label}_{opts.model_name}_change", args=(office.pk,), current_app=self.admin_site.name, ) @@ -189,7 +199,11 @@ def split_business_area(self, request, pk): def _get_doap_matrix(self, obj): matrix = [] - ca_roles = Role.objects.filter(subsystem=Role.CA).order_by("name").values_list("name", flat=True) + ca_roles = ( + Role.objects.filter(subsystem=Role.CA) + .order_by("name") + .values_list("name", flat=True) + ) fields = ["org", "Last Name", "First Name", "Email", "Action"] + list(ca_roles) matrix.append(fields) all_user_data = {} @@ -197,7 +211,9 @@ def _get_doap_matrix(self, obj): user_data = {} if member.user.pk not in all_user_data: user_roles = list( - member.user.user_roles.filter(role__subsystem="CA").values_list("role__name", flat=True) + member.user.user_roles.filter(role__subsystem="CA").values_list( + "role__name", flat=True + ) ) user_data["org"] = member.user.partner.name user_data["Last Name"] = member.user.last_name @@ -210,7 +226,11 @@ def _get_doap_matrix(self, obj): # user_data["user_roles"] = user_roles all_user_data[member.user.pk] = user_data - values = {key: value for (key, value) in user_data.items() if key not in ["action"]} + values = { + key: value + for (key, value) in user_data.items() + if key not in ["action"] + } signature = str(hash(frozenset(values.items()))) user_data["signature"] = signature @@ -239,7 +259,9 @@ def force_sync_doap(self, request, pk): matrix = self._get_doap_matrix(obj) for row in matrix[1:]: User.objects.filter(email=row["Email"]).update(doap_hash=row["signature"]) - return HttpResponseRedirect(reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk])) + return HttpResponseRedirect( + reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk]) + ) @button(label="Send DOAP", group="doap") def send_doap(self, request, pk): @@ -252,11 +274,15 @@ def send_doap(self, request, pk): writer.writeheader() for row in matrix[1:]: writer.writerow(row) - recipients = [request.user.email] + config.CASHASSIST_DOAP_RECIPIENT.split(";") + recipients = [request.user.email] + config.CASHASSIST_DOAP_RECIPIENT.split( + ";" + ) self.log_change(request, obj, f'DOAP sent to {", ".join(recipients)}') buffer.seek(0) mail = EmailMessage( - f"DOAP updates for {obj.name}", f"Please find in attachment DOAP updates for {obj.name}", to=recipients + f"DOAP updates for {obj.name}", + f"Please find in attachment DOAP updates for {obj.name}", + to=recipients, ) mail.attach(f"doap_{obj.name}.csv", buffer.read(), "text/csv") mail.send() @@ -264,15 +290,21 @@ def send_doap(self, request, pk): if row["Action"] == "REMOVE": User.objects.filter(email=row["Email"]).update(doap_hash="") else: - User.objects.filter(email=row["Email"]).update(doap_hash=row["signature"]) + User.objects.filter(email=row["Email"]).update( + doap_hash=row["signature"] + ) obj.custom_fields.update({"hope": {"last_doap_sync": str(timezone.now())}}) obj.save() - self.message_user(request, f'Email sent to {", ".join(recipients)}', messages.SUCCESS) + self.message_user( + request, f'Email sent to {", ".join(recipients)}', messages.SUCCESS + ) except Exception as e: logger.exception(e) self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) - return HttpResponseRedirect(reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk])) + return HttpResponseRedirect( + reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk]) + ) @button(label="Export DOAP", group="doap", permission="can_export_doap") def export_doap(self, request, pk): @@ -303,7 +335,12 @@ def members(self, request, pk): context = self.get_common_context(request, pk, title="Members") context["members"] = ( context["original"] - .user_roles.values("user__id", "user__email", "user__username", "user__custom_fields__kobo_username") + .user_roles.values( + "user__id", + "user__email", + "user__username", + "user__custom_fields__kobo_username", + ) .annotate(roles=ArrayAgg("role__name")) .order_by("user__username") ) @@ -327,7 +364,9 @@ def _test_rapidpro_connection(self, request, pk): context["phone_number"] = phone_number context["flow_name"] = flow_name - error, response = api.test_connection_start_flow(flow_name, phone_number) + error, response = api.test_connection_start_flow( + flow_name, phone_number + ) if response: context["flow_uuid"] = response["flow"]["uuid"] context["flow_status"] = response["status"] @@ -338,7 +377,9 @@ def _test_rapidpro_connection(self, request, pk): else: messages.success(request, "Connection successful") except Exception as e: - self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {e}", messages.ERROR + ) context["form"] = form return TemplateResponse(request, "core/test_rapidpro.html", context) @@ -358,7 +399,9 @@ def mark_submissions(self, request, pk): except Exception as e: logger.exception(e) self.message_user(request, str(e), messages.ERROR) - return HttpResponseRedirect(reverse("admin:core_businessarea_change", args=[business_area.id])) + return HttpResponseRedirect( + reverse("admin:core_businessarea_change", args=[business_area.id]) + ) else: return _confirm_action( self, @@ -379,7 +422,9 @@ class CountryFilter(SimpleListFilter): parameter_name = "country" def lookups(self, request, model_admin): - return AdminArea.objects.filter(admin_area_level__admin_level=0).values_list("id", "title") + return AdminArea.objects.filter(admin_area_level__admin_level=0).values_list( + "id", "title" + ) def value(self): return self.used_parameters.get(self.parameter_name) @@ -443,37 +488,57 @@ def load_from_datamart(self, request): logger.exception(e) if admin_areas_country_name: for admin_area, country_name in admin_areas_country_name: - AdminAreaLevel.objects.filter(country_name=country_name).update(country=admin_area) + AdminAreaLevel.objects.filter(country_name=country_name).update( + country=admin_area + ) class LoadAdminAreaForm(forms.Form): # country = forms.ChoiceField(choices=AdminAreaLevel.objects.get_countries()) - country = forms.ModelChoiceField(queryset=AdminAreaLevel.objects.filter(admin_level=0).order_by("country_name")) + country = forms.ModelChoiceField( + queryset=AdminAreaLevel.objects.filter(admin_level=0).order_by("country_name") + ) geometries = forms.BooleanField(required=False) run_in_background = forms.BooleanField(required=False) page_size = forms.IntegerField(required=True, validators=[lambda x: x >= 1]) - max_records = forms.IntegerField(required=False, help_text="Leave blank for all records") + max_records = forms.IntegerField( + required=False, help_text="Leave blank for all records" + ) - skip_rebuild = forms.BooleanField(required=False, help_text="Do not rebuild MPTT tree") + skip_rebuild = forms.BooleanField( + required=False, help_text="Do not rebuild MPTT tree" + ) class ExportLocationsForm(forms.Form): country = forms.ModelChoiceField( - queryset=AdminArea.objects.filter(admin_area_level__admin_level=0).order_by("title") + queryset=AdminArea.objects.filter(admin_area_level__admin_level=0).order_by( + "title" + ) ) class ImportAreaForm(forms.Form): # country = forms.ChoiceField(choices=AdminAreaLevel.objects.get_countries()) - country = forms.ModelChoiceField(queryset=AdminArea.objects.filter(admin_area_level__admin_level=0)) + country = forms.ModelChoiceField( + queryset=AdminArea.objects.filter(admin_area_level__admin_level=0) + ) file = forms.FileField() @admin.register(AdminArea) class AdminAreaAdmin(ExtraUrlMixin, MPTTModelAdmin): search_fields = ("p_code", "title") - list_display = ("title", "country", "parent", "tree_id", "external_id", "admin_area_level", "p_code") + list_display = ( + "title", + "country", + "parent", + "tree_id", + "external_id", + "admin_area_level", + "p_code", + ) list_filter = ( AdminLevelFilter, CountryFilter, @@ -486,7 +551,9 @@ def rebuild_tree(self, request): try: AdminArea.objects.rebuild() except Exception as e: - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) @button(permission="core.import_from_csv") def import_file(self, request): @@ -501,14 +568,26 @@ def import_file(self, request): csv_file = form.cleaned_data["file"] # If file is too large if csv_file.multiple_chunks(): - raise Exception("Uploaded file is too big (%.2f MB)" % (csv_file.size(1000 * 1000))) + raise Exception( + "Uploaded file is too big (%.2f MB)" + % (csv_file.size(1000 * 1000)) + ) data_set = csv_file.read().decode("utf-8-sig").splitlines() - reader = csv.DictReader(data_set, quoting=csv.QUOTE_NONE, delimiter=";") + reader = csv.DictReader( + data_set, quoting=csv.QUOTE_NONE, delimiter=";" + ) provided = set(reader.fieldnames) - minimum_set = {"area_code", "area_level", "parent_area_code", "area_name"} + minimum_set = { + "area_code", + "area_level", + "parent_area_code", + "area_name", + } if not minimum_set.issubset(provided): - raise Exception(f"Invalid columns {reader.fieldnames}. {provided.difference(minimum_set)}") + raise Exception( + f"Invalid columns {reader.fieldnames}. {provided.difference(minimum_set)}" + ) lines = [] infos = {"skipped": 0} # country = form.cleaned_data['country'] @@ -522,13 +601,21 @@ def import_file(self, request): level, __ = AdminAreaLevel.objects.get_or_create( country=country.admin_area_level, admin_level=level_number, - defaults={"name": row.get("level_name", f"{country.title} {level_number}")}, + defaults={ + "name": row.get( + "level_name", + f"{country.title} {level_number}", + ) + }, ) parent = AdminArea.objects.filter( - tree_id=country.tree_id, p_code=row["parent_area_code"] + tree_id=country.tree_id, + p_code=row["parent_area_code"], ).first() if parent is None: - assert level_number == 0, f"Cannot find parent area for {row}" + assert ( + level_number == 0 + ), f"Cannot find parent area for {row}" AdminArea.objects.create( external_id=external_id, title=row["area_name"], @@ -556,7 +643,9 @@ def import_file(self, request): except Exception as e: logger.exception(e) context["form"] = form - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) else: context["form"] = form @@ -588,13 +677,19 @@ def load_from_datamart(self, request): context["run_in_background"] = True else: results = load_admin_area( - country.id, geom, page_size, max_records, rebuild_mptt=not form.cleaned_data["skip_rebuild"] + country.id, + geom, + page_size, + max_records, + rebuild_mptt=not form.cleaned_data["skip_rebuild"], ) context["admin_areas"] = results except Exception as e: logger.exception(e) context["form"] = form - self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {e}", messages.ERROR + ) else: context["form"] = form @@ -616,7 +711,9 @@ def export_locations(self, request): except Exception as e: logger.exception(e) context["form"] = form - self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {e}", messages.ERROR + ) else: context["form"] = form return TemplateResponse(request, "core/admin/export_locations.html", context) @@ -673,7 +770,13 @@ class FlexibleAttributeChoiceAdmin(SoftDeletableAdminMixin): @admin.register(XLSXKoboTemplate) class XLSXKoboTemplateAdmin(SoftDeletableAdminMixin, ExtraUrlMixin, admin.ModelAdmin): - list_display = ("original_file_name", "uploaded_by", "created_at", "file", "import_status") + list_display = ( + "original_file_name", + "uploaded_by", + "created_at", + "file", + "import_status", + ) list_filter = ( "status", ("uploaded_by", AutoCompleteFilter), @@ -681,7 +784,13 @@ class XLSXKoboTemplateAdmin(SoftDeletableAdminMixin, ExtraUrlMixin, admin.ModelA search_fields = ("file_name",) date_hierarchy = "created_at" exclude = ("is_removed", "file_name", "status", "template_id") - readonly_fields = ("original_file_name", "uploaded_by", "file", "import_status", "error_description") + readonly_fields = ( + "original_file_name", + "uploaded_by", + "file", + "import_status", + "error_description", + ) def import_status(self, obj): if obj.status == self.model.SUCCESSFUL: @@ -716,7 +825,10 @@ def download_last_valid_file(self, request): level=ERROR, ) - @button(label="Rerun KOBO Import", visible=lambda o: o is not None and o.status != XLSXKoboTemplate.SUCCESSFUL) + @button( + label="Rerun KOBO Import", + visible=lambda o: o is not None and o.status != XLSXKoboTemplate.SUCCESSFUL, + ) def rerun_kobo_import(self, request, pk): xlsx_kobo_template_object = get_object_or_404(XLSXKoboTemplate, pk=pk) upload_new_kobo_template_and_update_flex_fields_task.run( @@ -752,9 +864,14 @@ def add_view(self, request, form_url="", extra_context=None): "survey_sheet": wb.sheet_by_name("survey"), "choices_sheet": wb.sheet_by_name("choices"), } - validation_errors = KoboTemplateValidator.validate_kobo_template(**sheets) + validation_errors = KoboTemplateValidator.validate_kobo_template( + **sheets + ) if validation_errors: - errors = [f"Field: {error['field']} - {error['message']}" for error in validation_errors] + errors = [ + f"Field: {error['field']} - {error['message']}" + for error in validation_errors + ] form.add_error(field=None, error=errors) except ValidationError as validation_error: logger.exception(validation_error) @@ -785,10 +902,14 @@ def add_view(self, request, form_url="", extra_context=None): return TemplateResponse(request, "core/xls_form.html", payload) def change_view(self, request, object_id=None, form_url="", extra_context=None): - extra_context = dict(show_save=False, show_save_and_continue=False, show_delete=True) + extra_context = dict( + show_save=False, show_save_and_continue=False, show_delete=True + ) has_add_permission = self.has_add_permission self.has_add_permission = lambda __: False - template_response = super().change_view(request, object_id, form_url, extra_context) + template_response = super().change_view( + request, object_id, form_url, extra_context + ) self.has_add_permission = has_add_permission return template_response diff --git a/backend/hct_mis_api/apps/core/base_test_case.py b/backend/hct_mis_api/apps/core/base_test_case.py index 0177e473d2..fc9ddd84ec 100644 --- a/backend/hct_mis_api/apps/core/base_test_case.py +++ b/backend/hct_mis_api/apps/core/base_test_case.py @@ -2,6 +2,7 @@ from django.contrib.auth.models import AnonymousUser from django.test import RequestFactory, TestCase + from django_countries.data import COUNTRIES from elasticsearch_dsl import connections from graphene.test import Client @@ -49,17 +50,21 @@ def generate_context(self, user=None, files=None): return context_value def generate_document_types_for_all_countries(self): - identification_type_choice = tuple((doc_type, label) for doc_type, label in IDENTIFICATION_TYPE_CHOICE) + identification_type_choice = tuple( + (doc_type, label) for doc_type, label in IDENTIFICATION_TYPE_CHOICE + ) document_types = [] for alpha2 in COUNTRIES: for doc_type, label in identification_type_choice: - document_types.append(DocumentType(country=alpha2, label=label, type=doc_type)) + document_types.append( + DocumentType(country=alpha2, label=label, type=doc_type) + ) DocumentType.objects.bulk_create(document_types, ignore_conflicts=True) @staticmethod def id_to_base64(object_id, name): - return base64.b64encode(f"{name}:{str(object_id)}".encode("utf-8")).decode() + return base64.b64encode(f"{name}:{str(object_id)}".encode()).decode() @staticmethod def __set_context_files(context, files): @@ -73,7 +78,9 @@ def create_user_role_with_permissions(user, permissions, business_area): role, created = Role.objects.update_or_create( name="Role with Permissions", defaults={"permissions": permission_list} ) - user_role, _ = UserRole.objects.get_or_create(user=user, role=role, business_area=business_area) + user_role, _ = UserRole.objects.get_or_create( + user=user, role=role, business_area=business_area + ) return user_role diff --git a/backend/hct_mis_api/apps/core/currencies.py b/backend/hct_mis_api/apps/core/currencies.py index f8bb5c35aa..13b90abfad 100644 --- a/backend/hct_mis_api/apps/core/currencies.py +++ b/backend/hct_mis_api/apps/core/currencies.py @@ -1,5 +1,4 @@ -from django.utils.translation import ugettext_lazy as _ - +from django.utils.translation import gettext_lazy as _ AED = "AED" AFN = "AFN" diff --git a/backend/hct_mis_api/apps/core/datamart/api.py b/backend/hct_mis_api/apps/core/datamart/api.py index d8a32b3a69..d9d74da0fc 100644 --- a/backend/hct_mis_api/apps/core/datamart/api.py +++ b/backend/hct_mis_api/apps/core/datamart/api.py @@ -14,7 +14,9 @@ class DatamartAPI: PAGE_SIZE = 100 - LOCATIONS_ENDPOINT = "/api/latest/datamart/locations/?-serializer=geo&format=json&ordering=id" + LOCATIONS_ENDPOINT = ( + "/api/latest/datamart/locations/?-serializer=geo&format=json&ordering=id" + ) def __init__(self): self._client = requests.session() @@ -33,8 +35,7 @@ def get_admin_levels(self, max_pages=None): while next_url: data = self._handle_get_request(next_url, is_absolute_url=True) next_url = data["next"] - for entry in data["results"]: - yield entry + yield from data["results"] if max_pages and page >= max_pages: break page += 1 @@ -44,7 +45,9 @@ def get_location(self, id): url = f"/api/latest/datamart/locations/{id}/" return self._handle_get_request(url) - def get_locations(self, *, country=None, gis=False, max_records=None, page_size=None): + def get_locations( + self, *, country=None, gis=False, max_records=None, page_size=None + ): url = f"/api/latest/datamart/locations/?&ordering=id,page_size={page_size or self.PAGE_SIZE}" if country: url = f"{url}&country_name={country}" @@ -78,7 +81,9 @@ def _features_to_multi_polygon(self, geometry): if geometry_type != "MultiPolygon": logger.error("Geometry type should be MultiPolygon") raise ValidationError("Geometry type should be MultiPolygon") - return MultiPolygon([Polygon(polygon) for polygon in geometry.get("coordinates")[0]]) + return MultiPolygon( + [Polygon(polygon) for polygon in geometry.get("coordinates")[0]] + ) def generate_admin_areas(self, locations, business_area): self.generate_admin_areas_old_models(locations, business_area) @@ -99,7 +104,9 @@ def generate_admin_areas_old_models(self, locations, business_area): ).first() if admin_area_level is None: admin_area_level = AdminAreaLevel( - admin_level=gateway, business_area=business_area, name=f"{business_area.name}-{gateway}" + admin_level=gateway, + business_area=business_area, + name=f"{business_area.name}-{gateway}", ) admin_area_level_dict[gateway] = admin_area_level @@ -110,7 +117,9 @@ def generate_admin_areas_old_models(self, locations, business_area): admin_area.title = properties.get("name") admin_area.admin_area_level = admin_area_level admin_area.p_code = properties.get("p_code") - admin_area.point = Point(properties.get("longitude"), properties.get("latitude")) + admin_area.point = Point( + properties.get("longitude"), properties.get("latitude") + ) admin_area.geom = self._features_to_multi_polygon(location.get("geometry")) admin_areas_to_create.append(admin_area) admin_areas_external_id_dict[external_id] = admin_area @@ -142,19 +151,29 @@ def generate_admin_areas_new_models(self, locations, business_area): external_id = location.get("id") admin_area_level = admin_area_level_dict.get(gateway) if admin_area_level is None: - admin_area_level = AreaType.objects.filter(area_level=gateway, country__name=business_area.name).first() + admin_area_level = AreaType.objects.filter( + area_level=gateway, country__name=business_area.name + ).first() if admin_area_level is None: country = Country.objects.get(name=business_area.name) - admin_area_level = AreaType(area_level=gateway, country=country, name=f"{business_area.name}-{gateway}") + admin_area_level = AreaType( + area_level=gateway, + country=country, + name=f"{business_area.name}-{gateway}", + ) admin_area_level_dict[gateway] = admin_area_level - admin_area = Area.objects.filter(area_type=admin_area_level, name=properties.get("name")).first() + admin_area = Area.objects.filter( + area_type=admin_area_level, name=properties.get("name") + ).first() if admin_area is None: admin_area = Area() admin_area.name = properties.get("name") admin_area.area_type = admin_area_level admin_area.p_code = properties.get("p_code") - admin_area.point = Point(properties.get("longitude"), properties.get("latitude")) + admin_area.point = Point( + properties.get("longitude"), properties.get("latitude") + ) admin_area.geom = self._features_to_multi_polygon(location.get("geometry")) admin_areas_to_create.append(admin_area) admin_areas_external_id_dict[external_id] = admin_area diff --git a/backend/hct_mis_api/apps/core/es_analyzers.py b/backend/hct_mis_api/apps/core/es_analyzers.py index c6f2b6e654..023900059e 100644 --- a/backend/hct_mis_api/apps/core/es_analyzers.py +++ b/backend/hct_mis_api/apps/core/es_analyzers.py @@ -1,10 +1,15 @@ import os from django.conf import settings -from elasticsearch_dsl import token_filter, analyzer, tokenizer + +from elasticsearch_dsl import analyzer, token_filter, tokenizer phonetic_filter = token_filter( - "my_metaphone", type="phonetic", encoder="double_metaphone", replace=False, langauge_set="common" + "my_metaphone", + type="phonetic", + encoder="double_metaphone", + replace=False, + langauge_set="common", ) phonetic_analyzer = analyzer( @@ -13,7 +18,7 @@ filter=["lowercase", phonetic_filter], ) -with open(os.path.join(settings.PROJECT_ROOT, "../data/synonyms.txt"), "r") as synonyms_file: +with open(os.path.join(settings.PROJECT_ROOT, "../data/synonyms.txt")) as synonyms_file: synonyms = synonyms_file.readlines() name_synonym_analyzer_token_filter = token_filter( diff --git a/backend/hct_mis_api/apps/core/exchange_rates/models.py b/backend/hct_mis_api/apps/core/exchange_rates/models.py index 6f8e592ed1..be74f4f519 100644 --- a/backend/hct_mis_api/apps/core/exchange_rates/models.py +++ b/backend/hct_mis_api/apps/core/exchange_rates/models.py @@ -1,12 +1,15 @@ from datetime import datetime +from typing import Dict, Optional + from dateutil.parser import parse -from typing import Optional, Dict from hct_mis_api.apps.core.exchange_rates.api import ExchangeRateAPI class HistoryExchangeRate: - def __init__(self, VALID_FROM: str, VALID_TO: str, PAST_XRATE: str, PAST_RATIO: str): + def __init__( + self, VALID_FROM: str, VALID_TO: str, PAST_XRATE: str, PAST_RATIO: str + ): self.valid_from = parse(VALID_FROM) self.valid_to = parse(VALID_TO) self.past_xrate = float(PAST_XRATE) @@ -38,7 +41,9 @@ def __init__( self.currency_name = CURRENCY_NAME self.x_rate = float(X_RATE) self.valid_from = parse(VALID_FROM) - self.valid_to = datetime(9999, 12, 31) if VALID_TO == "31-DEC-99" else parse(VALID_TO) + self.valid_to = ( + datetime(9999, 12, 31) if VALID_TO == "31-DEC-99" else parse(VALID_TO) + ) self.ratio = float(RATIO) self.no_of_decimal = int(NO_OF_DECIMAL) @@ -48,12 +53,16 @@ def __init__( else: past_xrates.reverse() - self.historical_exchange_rates = [HistoryExchangeRate(**past_xrate) for past_xrate in past_xrates] + self.historical_exchange_rates = [ + HistoryExchangeRate(**past_xrate) for past_xrate in past_xrates + ] def __repr__(self): return f"SingleExchangeRate(currency_code: {self.currency_code}, ratio: {self.ratio}, x_rate: {self.x_rate})" - def get_exchange_rate_by_dispersion_date(self, dispersion_date: datetime) -> Optional[float]: + def get_exchange_rate_by_dispersion_date( + self, dispersion_date: datetime + ) -> Optional[float]: today = datetime.now() dispersion_date_is_not_provided = dispersion_date is None @@ -61,14 +70,21 @@ def get_exchange_rate_by_dispersion_date(self, dispersion_date: datetime) -> Opt return self.x_rate * self.ratio dispersion_date_is_in_current_date_range = ( - self.valid_from <= dispersion_date <= (today if self.valid_to is None else self.valid_to) + self.valid_from + <= dispersion_date + <= (today if self.valid_to is None else self.valid_to) ) if dispersion_date_is_in_current_date_range: return self.x_rate * self.ratio for historical_exchange_rate in self.historical_exchange_rates: - if historical_exchange_rate.is_valid_for_provided_dispersion_date(dispersion_date): - return historical_exchange_rate.past_xrate * historical_exchange_rate.past_ratio + if historical_exchange_rate.is_valid_for_provided_dispersion_date( + dispersion_date + ): + return ( + historical_exchange_rate.past_xrate + * historical_exchange_rate.past_ratio + ) return None @@ -85,7 +101,9 @@ def __init__(self, with_history: bool = True, api_client: ExchangeRateAPI = None ) @staticmethod - def _convert_response_json_to_exchange_rates(response_json: dict) -> Dict[str, SingleExchangeRate]: + def _convert_response_json_to_exchange_rates( + response_json: dict, + ) -> dict[str, SingleExchangeRate]: raw_exchange_rates = response_json.get("ROWSET", {}).get("ROW", []) return { diff --git a/backend/hct_mis_api/apps/core/export_locations.py b/backend/hct_mis_api/apps/core/export_locations.py index 0edcb387a3..c6efbc6c1d 100644 --- a/backend/hct_mis_api/apps/core/export_locations.py +++ b/backend/hct_mis_api/apps/core/export_locations.py @@ -22,7 +22,7 @@ class ExportLocations: def __init__(self, country): self._country = country self._file_name = "locations.csv" - self._matrix: List[Union[List[str], Dict[str, str]]] = [self.fields] + self._matrix: list[Union[list[str], dict[str, str]]] = [self.fields] def export_to_file(self): admin_areas = self._load_admin_areas() diff --git a/backend/hct_mis_api/apps/core/flex_fields_importer.py b/backend/hct_mis_api/apps/core/flex_fields_importer.py index 92a989caca..638eaf0ec7 100644 --- a/backend/hct_mis_api/apps/core/flex_fields_importer.py +++ b/backend/hct_mis_api/apps/core/flex_fields_importer.py @@ -2,21 +2,26 @@ from collections import defaultdict from os.path import isfile -import xlrd from django.core.exceptions import ValidationError from django.db import transaction from django.utils.html import strip_tags +import xlrd + from hct_mis_api.apps.core.core_fields_attributes import ( - TYPE_STRING, - TYPE_INTEGER, - TYPE_DECIMAL, TYPE_DATE, + TYPE_DECIMAL, TYPE_IMAGE, - TYPE_SELECT_ONE, + TYPE_INTEGER, TYPE_SELECT_MANY, + TYPE_SELECT_ONE, + TYPE_STRING, +) +from hct_mis_api.apps.core.models import ( + FlexibleAttribute, + FlexibleAttributeChoice, + FlexibleAttributeGroup, ) -from hct_mis_api.apps.core.models import FlexibleAttribute, FlexibleAttributeGroup, FlexibleAttributeChoice logger = logging.getLogger(__name__) @@ -40,11 +45,17 @@ class FlexibleAttributeImporter: } # Constants for xls import - ATTRIBUTE_MODEL_FIELDS = [field.name for field in FlexibleAttribute._meta.get_fields()] + ATTRIBUTE_MODEL_FIELDS = [ + field.name for field in FlexibleAttribute._meta.get_fields() + ] - GROUP_MODEL_FIELDS = [field.name for field in FlexibleAttributeGroup._meta.get_fields()] + GROUP_MODEL_FIELDS = [ + field.name for field in FlexibleAttributeGroup._meta.get_fields() + ] - CHOICE_MODEL_FIELDS = [field.name for field in FlexibleAttributeChoice._meta.get_fields()] + CHOICE_MODEL_FIELDS = [ + field.name for field in FlexibleAttributeChoice._meta.get_fields() + ] CORE_FIELD_SUFFIXES = ( "_h_c", @@ -74,7 +85,9 @@ def _get_model_fields(self, object_type_to_add): "choice": self.CHOICE_MODEL_FIELDS, }.get(object_type_to_add) - def _assign_field_values(self, value, header_name, object_type_to_add, row, row_number): + def _assign_field_values( + self, value, header_name, object_type_to_add, row, row_number + ): model_fields = self._get_model_fields(object_type_to_add) if any(header_name.startswith(i) for i in self.JSON_MODEL_FIELDS): @@ -96,16 +109,27 @@ def _assign_field_values(self, value, header_name, object_type_to_add, row, row_ field_suffix = row[1].value[-4:] is_empty_and_not_index_field = not value and not is_index_field is_core_or_flex_field = ( - field_suffix in self.CORE_FIELD_SUFFIXES or field_suffix in self.FLEX_FIELD_SUFFIXES + field_suffix in self.CORE_FIELD_SUFFIXES + or field_suffix in self.FLEX_FIELD_SUFFIXES ) if is_empty_and_not_index_field and is_core_or_flex_field: - logger.error(f"Survey Sheet: Row {row_number + 1}: English label cannot be empty") - raise ValidationError(f"Survey Sheet: Row {row_number + 1}: English label cannot be empty") + logger.error( + f"Survey Sheet: Row {row_number + 1}: English label cannot be empty" + ) + raise ValidationError( + f"Survey Sheet: Row {row_number + 1}: English label cannot be empty" + ) if object_type_to_add == "choice" and not value: - logger.error(f"Choices Sheet: Row {row_number + 1}: English label cannot be empty") - raise ValidationError(f"Choices Sheet: Row {row_number + 1}: English label cannot be empty") + logger.error( + f"Choices Sheet: Row {row_number + 1}: English label cannot be empty" + ) + raise ValidationError( + f"Choices Sheet: Row {row_number + 1}: English label cannot be empty" + ) - self.json_fields_to_create[label].update({language: cleared_value if value else ""}) + self.json_fields_to_create[label].update( + {language: cleared_value if value else ""} + ) return if header_name == "required": @@ -118,13 +142,19 @@ def _assign_field_values(self, value, header_name, object_type_to_add, row, row_ if header_name in model_fields: if header_name == "type": if not value: - logger.error(f"Survey Sheet: Row {row_number + 1}: Type is required") - raise ValidationError(f"Survey Sheet: Row {row_number + 1}: Type is required") + logger.error( + f"Survey Sheet: Row {row_number + 1}: Type is required" + ) + raise ValidationError( + f"Survey Sheet: Row {row_number + 1}: Type is required" + ) choice_key = value.split(" ")[0] if choice_key == "calculate": self.object_fields_to_create["type"] = "calculate" elif choice_key in self.TYPE_CHOICE_MAP.keys(): - self.object_fields_to_create["type"] = self.TYPE_CHOICE_MAP.get(choice_key) + self.object_fields_to_create["type"] = self.TYPE_CHOICE_MAP.get( + choice_key + ) else: is_attribute_name_empty = header_name == "name" and value in (None, "") is_choice_list_name_empty = ( @@ -132,18 +162,29 @@ def _assign_field_values(self, value, header_name, object_type_to_add, row, row_ ) and not value if is_attribute_name_empty: - logger.error(f"Survey Sheet: Row {row_number + 1}: Name is required") - raise ValidationError(f"Survey Sheet: Row {row_number + 1}: Name is required") + logger.error( + f"Survey Sheet: Row {row_number + 1}: Name is required" + ) + raise ValidationError( + f"Survey Sheet: Row {row_number + 1}: Name is required" + ) if is_choice_list_name_empty: - logger.error(f"Survey Sheet: Row {row_number + 1}: List Name is required") - raise ValidationError(f"Survey Sheet: Row {row_number + 1}: List Name is required") + logger.error( + f"Survey Sheet: Row {row_number + 1}: List Name is required" + ) + raise ValidationError( + f"Survey Sheet: Row {row_number + 1}: List Name is required" + ) self.object_fields_to_create[header_name] = value if value else "" is_valid_calculate_field_and_header_is_calculate_field_type = ( object_type_to_add == "attribute" and header_name == "calculated_result_field_type" and row[0].value == "calculate" - and any(self.object_fields_to_create["name"].endswith(i) for i in self.FLEX_FIELD_SUFFIXES) + and any( + self.object_fields_to_create["name"].endswith(i) + for i in self.FLEX_FIELD_SUFFIXES + ) ) if is_valid_calculate_field_and_header_is_calculate_field_type: choice_key = value.strip() if value and isinstance(value, str) else None @@ -163,12 +204,14 @@ def _assign_field_values(self, value, header_name, object_type_to_add, row, row_ logger.error(validation_error_message) raise ValidationError(validation_error_message) else: - self.object_fields_to_create["type"] = self.CALCULATE_TYPE_CHOICE_MAP[choice_key] + self.object_fields_to_create["type"] = self.CALCULATE_TYPE_CHOICE_MAP[ + choice_key + ] def _can_add_row(self, row): - is_core_field = any(row[1].value.endswith(i) for i in self.CORE_FIELD_SUFFIXES) and not row[0].value.endswith( - "_group" - ) + is_core_field = any( + row[1].value.endswith(i) for i in self.CORE_FIELD_SUFFIXES + ) and not row[0].value.endswith("_group") is_in_excluded = row[0].value in self.EXCLUDED_MODEL_FIELDS @@ -195,7 +238,7 @@ def _get_list_of_field_choices(self, sheet): if row[0].value.startswith("select_"): fields_with_choices.append(row) - return set(row[0].value.split(" ")[1] for row in fields_with_choices) + return {row[0].value.split(" ")[1] for row in fields_with_choices} def _get_field_choice_name(self, row): has_choice = row[0].value.startswith("select_") @@ -257,7 +300,9 @@ def _handle_choices(self, sheets): to_create_choices, ) - choices_to_delete = set(choices_from_db).difference(set(created_choices + updated_choices)) + choices_to_delete = set(choices_from_db).difference( + set(created_choices + updated_choices) + ) for choice in choices_to_delete: choice.delete() @@ -280,7 +325,11 @@ def _handle_groups_and_fields(self, sheet): if all([cell.ctype == xlrd.XL_CELL_EMPTY for cell in row]): continue - object_type_to_add = "group" if row[0].value in ("begin_group", "begin_repeat") else "attribute" + object_type_to_add = ( + "group" + if row[0].value in ("begin_group", "begin_repeat") + else "attribute" + ) repeatable = True if row[0].value == "begin_repeat" else False self._reset_model_fields_variables() @@ -298,7 +347,10 @@ def _handle_groups_and_fields(self, sheet): row_number, ) - is_flex_field = any(self.object_fields_to_create["name"].endswith(i) for i in self.FLEX_FIELD_SUFFIXES) + is_flex_field = any( + self.object_fields_to_create["name"].endswith(i) + for i in self.FLEX_FIELD_SUFFIXES + ) if object_type_to_add == "group": obj = FlexibleAttributeGroup.all_objects.filter( @@ -344,9 +396,13 @@ def _handle_groups_and_fields(self, sheet): parent = None if obj: - if obj.type != self.object_fields_to_create["type"] and not obj.is_removed: + if ( + obj.type != self.object_fields_to_create["type"] + and not obj.is_removed + ): logger.error( - f"Survey Sheet: Row {row_number + 1}: Type of the " f"attribute cannot be changed!" + f"Survey Sheet: Row {row_number + 1}: Type of the " + f"attribute cannot be changed!" ) raise ValidationError( f"Survey Sheet: Row {row_number + 1}: Type of the attribute cannot be changed!" diff --git a/backend/hct_mis_api/apps/core/kobo/common.py b/backend/hct_mis_api/apps/core/kobo/common.py index 9997d370fa..5882f118c3 100644 --- a/backend/hct_mis_api/apps/core/kobo/common.py +++ b/backend/hct_mis_api/apps/core/kobo/common.py @@ -62,11 +62,15 @@ def get_field_name(field_name: str) -> str: def reduce_assets_list(assets: list, deployed: bool = True, *args, **kwarg) -> list: if deployed: - return [reduce_asset(asset) for asset in assets if asset["has_deployment"] and asset["deployment__active"]] + return [ + reduce_asset(asset) + for asset in assets + if asset["has_deployment"] and asset["deployment__active"] + ] return [reduce_asset(asset) for asset in assets] -def count_population(results: list, business_area: BusinessArea) -> Tuple[int, int]: +def count_population(results: list, business_area: BusinessArea) -> tuple[int, int]: from hashlib import sha256 from hct_mis_api.apps.core.utils import rename_dict_keys @@ -84,7 +88,9 @@ def count_population(results: list, business_area: BusinessArea) -> Tuple[int, i if business_area.get_sys_option("ignore_amended_kobo_submissions"): submission_meta_data["amended"] = False - submission_exists = KoboImportedSubmission.objects.filter(**submission_meta_data).exists() + submission_exists = KoboImportedSubmission.objects.filter( + **submission_meta_data + ).exists() if submission_exists is False: total_households_count += 1 for individual_data in result[KOBO_FORM_INDIVIDUALS_COLUMN_NAME]: @@ -106,7 +112,10 @@ def count_population(results: list, business_area: BusinessArea) -> Tuple[int, i seen_hash_keys.append(hash_key) total_individuals_count += 1 if ( - reduced_submission.get("relationship_i_c", RELATIONSHIP_UNKNOWN).upper() == NON_BENEFICIARY + reduced_submission.get( + "relationship_i_c", RELATIONSHIP_UNKNOWN + ).upper() + == NON_BENEFICIARY and seen_hash_keys.count(hash_key) > 1 ): total_individuals_count -= 1 @@ -117,6 +126,8 @@ def count_population(results: list, business_area: BusinessArea) -> Tuple[int, i def filter_by_owner(data, business_area): kobo_username = business_area.kobo_username if isinstance(data, list): - return [element for element in data if element["owner__username"] == kobo_username] + return [ + element for element in data if element["owner__username"] == kobo_username + ] if data["owner__username"] == kobo_username: return data diff --git a/backend/hct_mis_api/apps/core/mis_test_runner.py b/backend/hct_mis_api/apps/core/mis_test_runner.py index b490997d8c..32527210f4 100644 --- a/backend/hct_mis_api/apps/core/mis_test_runner.py +++ b/backend/hct_mis_api/apps/core/mis_test_runner.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import os from django.conf import settings @@ -9,7 +7,9 @@ from snapshottest.django import TestRunner -def create_test_db_and_schemas(creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False): +def create_test_db_and_schemas( + creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False +): """ Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created. @@ -61,7 +61,9 @@ def create_test_db_and_schemas(creation, verbosity=1, autoclobber=False, seriali # who are testing on databases without transactions or who are using # a TransactionTestCase still get a clean database on every test run. if serialize: - creation.connection._test_serialized_contents = creation.serialize_db_to_string() + creation.connection._test_serialized_contents = ( + creation.serialize_db_to_string() + ) call_command("createcachetable", database=creation.connection.alias) @@ -71,7 +73,9 @@ def create_test_db_and_schemas(creation, verbosity=1, autoclobber=False, seriali return test_database_name -def create_fake_test_db(creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False): +def create_fake_test_db( + creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False +): """ Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created. @@ -119,7 +123,15 @@ def create_fake_test_db(creation, verbosity=1, autoclobber=False, serialize=True return test_database_name -def _setup_schema_database(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, alias=None, **kwargs): +def _setup_schema_database( + verbosity, + interactive, + keepdb=False, + debug_sql=False, + parallel=0, + alias=None, + **kwargs +): """Create the test databases.""" connection = connections[alias] @@ -202,7 +214,9 @@ def setup_databases(self, **kwargs): read_only = connection.settings_dict.get("TEST", {}).get("READ_ONLY", False) if read_only: if self.verbosity >= 1: - connection.creation.log("Skipping ReadOnly test database for alias '%s'..." % alias) + connection.creation.log( + "Skipping ReadOnly test database for alias '%s'..." % alias + ) aliases = kwargs.get("aliases") aliases.discard(alias) continue @@ -218,7 +232,12 @@ def setup_databases(self, **kwargs): if not created: old_names.extend( _setup_schema_database( - self.verbosity, self.interactive, self.keepdb, self.debug_sql, self.parallel, alias=alias + self.verbosity, + self.interactive, + self.keepdb, + self.debug_sql, + self.parallel, + alias=alias, ) ) created = True @@ -228,7 +247,9 @@ def setup_databases(self, **kwargs): verbosity=self.verbosity, autoclobber=not self.interactive, keepdb=self.keepdb, - serialize=connection.settings_dict.get("TEST", {}).get("SERIALIZE", True), + serialize=connection.settings_dict.get("TEST", {}).get( + "SERIALIZE", True + ), ) old_names.extend(super().setup_databases(**kwargs)) diff --git a/backend/hct_mis_api/apps/core/models.py b/backend/hct_mis_api/apps/core/models.py index 06a96715fc..2c9bc87727 100644 --- a/backend/hct_mis_api/apps/core/models.py +++ b/backend/hct_mis_api/apps/core/models.py @@ -1,8 +1,8 @@ from django.conf import settings from django.contrib.gis.db import models -from django.contrib.postgres.fields import JSONField from django.core.validators import MaxValueValidator, MinValueValidator -from django.utils.translation import ugettext_lazy as _ +from django.db.models import JSONField +from django.utils.translation import gettext_lazy as _ from django_celery_beat.models import PeriodicTask from django_celery_beat.schedulers import DatabaseScheduler, ModelEntry @@ -50,11 +50,20 @@ class BusinessArea(TimeStampedUUIDModel): custom_fields = JSONField(default=dict, blank=True) has_data_sharing_agreement = models.BooleanField(default=False) - parent = models.ForeignKey("self", related_name="children", on_delete=models.SET_NULL, null=True, blank=True) + parent = models.ForeignKey( + "self", + related_name="children", + on_delete=models.SET_NULL, + null=True, + blank=True, + ) is_split = models.BooleanField(default=False) countries = models.ManyToManyField( - "AdminAreaLevel", blank=True, limit_choices_to={"admin_level": 0}, related_name="business_areas" + "AdminAreaLevel", + blank=True, + limit_choices_to={"admin_level": 0}, + related_name="business_areas", ) countries_new = models.ManyToManyField("geo.Country", related_name="business_areas") deduplication_duplicate_score = models.FloatField( @@ -69,16 +78,20 @@ class BusinessArea(TimeStampedUUIDModel): ) deduplication_batch_duplicates_percentage = models.IntegerField( - default=50, help_text="If percentage of duplicates is higher or equal to this setting, deduplication is aborted" + default=50, + help_text="If percentage of duplicates is higher or equal to this setting, deduplication is aborted", ) deduplication_batch_duplicates_allowed = models.IntegerField( - default=5, help_text="If amount of duplicates for single individual exceeds this limit deduplication is aborted" + default=5, + help_text="If amount of duplicates for single individual exceeds this limit deduplication is aborted", ) deduplication_golden_record_duplicates_percentage = models.IntegerField( - default=50, help_text="If percentage of duplicates is higher or equal to this setting, deduplication is aborted" + default=50, + help_text="If percentage of duplicates is higher or equal to this setting, deduplication is aborted", ) deduplication_golden_record_duplicates_allowed = models.IntegerField( - default=5, help_text="If amount of duplicates for single individual exceeds this limit deduplication is aborted" + default=5, + help_text="If amount of duplicates for single individual exceeds this limit deduplication is aborted", ) screen_beneficiary = models.BooleanField(default=False) @@ -89,7 +102,7 @@ def save(self, *args, **kwargs): self.parent.save() if self.children.count(): self.is_split = True - super(BusinessArea, self).save(*args, **kwargs) + super().save(*args, **kwargs) class Meta: ordering = ["name"] @@ -125,7 +138,11 @@ def get_sys_option(self, key, default=None): class AdminAreaLevelManager(models.Manager): def get_countries(self): - return self.filter(admin_level=0).order_by("country_name").values_list("id", "country_name") + return ( + self.filter(admin_level=0) + .order_by("country_name") + .values_list("id", "country_name") + ) class AdminAreaLevel(TimeStampedUUIDModel): @@ -134,15 +151,27 @@ class AdminAreaLevel(TimeStampedUUIDModel): """ name = models.CharField(max_length=64, verbose_name=_("Name")) - display_name = models.CharField(max_length=64, blank=True, null=True, verbose_name=_("Display Name")) - admin_level = models.PositiveSmallIntegerField(verbose_name=_("Admin Level"), blank=True, null=True) + display_name = models.CharField( + max_length=64, blank=True, null=True, verbose_name=_("Display Name") + ) + admin_level = models.PositiveSmallIntegerField( + verbose_name=_("Admin Level"), blank=True, null=True + ) business_area = models.ForeignKey( - "BusinessArea", on_delete=models.SET_NULL, related_name="admin_area_level", null=True, blank=True + "BusinessArea", + on_delete=models.SET_NULL, + related_name="admin_area_level", + null=True, + blank=True, ) area_code = models.CharField(max_length=8, blank=True, null=True) country_name = models.CharField(max_length=100, blank=True, null=True) country = models.ForeignKey( - "self", blank=True, null=True, limit_choices_to={"admin_level": 0}, on_delete=models.CASCADE + "self", + blank=True, + null=True, + limit_choices_to={"admin_level": 0}, + on_delete=models.CASCADE, ) datamart_id = models.CharField(max_length=8, blank=True, null=True, unique=True) objects = AdminAreaLevelManager() @@ -165,7 +194,9 @@ def __str__(self): class AdminAreaManager(TreeManager): def get_queryset(self): - return super(AdminAreaManager, self).get_queryset().order_by("title").select_related("admin_area_level") + return ( + super().get_queryset().order_by("title").select_related("admin_area_level") + ) class AdminArea(MPTTModel, TimeStampedUUIDModel): @@ -181,7 +212,10 @@ class AdminArea(MPTTModel, TimeStampedUUIDModel): """ external_id = models.CharField( - help_text="An ID representing this instance in datamart", blank=True, null=True, max_length=32 + help_text="An ID representing this instance in datamart", + blank=True, + null=True, + max_length=32, ) title = models.CharField(max_length=255) @@ -193,7 +227,9 @@ class AdminArea(MPTTModel, TimeStampedUUIDModel): on_delete=models.CASCADE, ) - p_code = models.CharField(max_length=32, blank=True, null=True, verbose_name="P Code") + p_code = models.CharField( + max_length=32, blank=True, null=True, verbose_name="P Code" + ) parent = TreeForeignKey( "self", @@ -232,7 +268,13 @@ def country(self): @property def geo_point(self): - return self.point if self.point else self.geom.point_on_surface if self.geom else "" + return ( + self.point + if self.point + else self.geom.point_on_surface + if self.geom + else "" + ) @property def point_lat_long(self): @@ -245,7 +287,10 @@ def get_admin_areas_as_choices(cls, admin_level, business_area=None): queryset.filter(admin_area_level__business_area=business_area) queryset = queryset.order_by("title") return [ - {"label": {"English(EN)": f"{admin_area.title}-{admin_area.p_code}"}, "value": admin_area.p_code} + { + "label": {"English(EN)": f"{admin_area.title}-{admin_area.p_code}"}, + "value": admin_area.p_code, + } for admin_area in queryset ] @@ -256,7 +301,10 @@ def get_admin_areas(cls, business_area=None): queryset.filter(admin_area_level__business_area=business_area) queryset = queryset.order_by("title") return [ - {"label": {"English(EN)": f"{admin_area.title}-{admin_area.p_code}"}, "value": admin_area.p_code} + { + "label": {"English(EN)": f"{admin_area.title}-{admin_area.p_code}"}, + "value": admin_area.p_code, + } for admin_area in queryset ] @@ -335,7 +383,9 @@ class Meta: list_name = models.CharField(max_length=255) name = models.CharField(max_length=255) label = JSONField(default=dict) - flex_attributes = models.ManyToManyField("core.FlexibleAttribute", related_name="choices") + flex_attributes = models.ManyToManyField( + "core.FlexibleAttribute", related_name="choices" + ) def __str__(self): return f"list name: {self.list_name}, name: {self.name}" @@ -414,17 +464,28 @@ def get_iso2_code(self, ca_code): return self._cache["ca2"].get(ca_code, ca_code) def build_cache(self): - if not self._cache[2] or not self._cache[3] or not self._cache["ca2"] or not self._cache["ca3"]: + if ( + not self._cache[2] + or not self._cache[3] + or not self._cache["ca2"] + or not self._cache["ca3"] + ): for entry in self.all(): self._cache[2][entry.country.code] = entry.ca_code - self._cache[3][entry.country.countries.alpha3(entry.country.code)] = entry.ca_code + self._cache[3][ + entry.country.countries.alpha3(entry.country.code) + ] = entry.ca_code self._cache["ca2"][entry.ca_code] = entry.country.code - self._cache["ca3"][entry.ca_code] = entry.country.countries.alpha3(entry.country.code) + self._cache["ca3"][entry.ca_code] = entry.country.countries.alpha3( + entry.country.code + ) class CountryCodeMap(models.Model): country = CountryField(unique=True) - country_new = models.ForeignKey("geo.Country", blank=True, null=True, unique=True, on_delete=models.PROTECT) + country_new = models.ForeignKey( + "geo.Country", blank=True, null=True, unique=True, on_delete=models.PROTECT + ) ca_code = models.CharField(max_length=5, unique=True) objects = CountryCodeMapManager() diff --git a/backend/hct_mis_api/apps/core/utils.py b/backend/hct_mis_api/apps/core/utils.py index 9503a15863..08452f7c2e 100644 --- a/backend/hct_mis_api/apps/core/utils.py +++ b/backend/hct_mis_api/apps/core/utils.py @@ -60,10 +60,12 @@ def encode_id_base64(id_string, model_name): from base64 import b64encode - return b64encode(f"{model_name}Node:{str(id_string)}".encode("utf-8")).decode() + return b64encode(f"{model_name}Node:{str(id_string)}".encode()).decode() -def unique_slugify(instance, value, slug_field_name="slug", queryset=None, slug_separator="-"): +def unique_slugify( + instance, value, slug_field_name="slug", queryset=None, slug_separator="-" +): """ Calculates and stores a unique slug of ``value`` for an instance. @@ -99,11 +101,11 @@ def unique_slugify(instance, value, slug_field_name="slug", queryset=None, slug_ next = 2 while not slug or queryset.filter(**{slug_field_name: slug}): slug = original_slug - end = "%s%s" % (slug_separator, next) + end = f"{slug_separator}{next}" if slug_len and len(slug) + len(end) > slug_len: slug = slug[: slug_len - len(end)] slug = _slug_strip(slug, slug_separator) - slug = "%s%s" % (slug, end) + slug = f"{slug}{end}" next += 1 setattr(instance, slug_field.attname, slug) @@ -132,7 +134,7 @@ def _slug_strip(value, separator="-"): if separator: if separator != "-": re_sep = re.escape(separator) - value = re.sub(r"^%s+|%s+$" % (re_sep, re_sep), "", value) + value = re.sub(fr"^{re_sep}+|{re_sep}+$", "", value) return value @@ -268,7 +270,9 @@ def nested_dict_get(dictionary, path): import functools return functools.reduce( - lambda d, key: d.get(key, None) if isinstance(d, dict) else None, path.split("."), dictionary + lambda d, key: d.get(key, None) if isinstance(d, dict) else None, + path.split("."), + dictionary, ) @@ -279,7 +283,7 @@ def get_count_and_percentage(input_list, all_items_list): return {"count": count, "percentage": percentage} -def encode_ids(results: List[dict], model_name: str, key: str) -> List[dict]: +def encode_ids(results: list[dict], model_name: str, key: str) -> list[dict]: if results: for result in results: result_id = result[key] @@ -299,7 +303,9 @@ def to_dict(instance, fields=None, dict_fields=None): for field in fields: main_field = getattr(instance, field, "__NOT_EXIST__") if main_field != "__NOT_EXIST__": - data[field] = main_field if issubclass(type(main_field), Model) else main_field + data[field] = ( + main_field if issubclass(type(main_field), Model) else main_field + ) if dict_fields and isinstance(dict_fields, dict): for main_field_key, nested_fields in dict_fields.items(): @@ -385,7 +391,9 @@ def normalize_fields(self, fields): return OrderedDict(fields) # convert iterable of values => iterable of pairs (field name, param name) - assert is_iterable(fields), "'fields' must be an iterable (e.g., a list, tuple, or mapping)." + assert is_iterable( + fields + ), "'fields' must be an iterable (e.g., a list, tuple, or mapping)." # fields is an iterable of field names assert all( @@ -405,7 +413,9 @@ def normalize_fields(self, fields): new_fields.append(field_name) self.lower_dict[field_name] = field - return OrderedDict([(f, f) if isinstance(f, (str, Lower)) else f for f in new_fields]) + return OrderedDict( + [(f, f) if isinstance(f, (str, Lower)) else f for f in new_fields] + ) def is_valid_uuid(uuid_str): @@ -453,7 +463,9 @@ def check_concurrency_version_in_mutation(version, target): from graphql import GraphQLError if version != target.version: - logger.error(f"Someone has modified this {target} record, versions {version} != {target.version}") + logger.error( + f"Someone has modified this {target} record, versions {version} != {target.version}" + ) raise GraphQLError("Someone has modified this record") @@ -481,14 +493,17 @@ def update_labels_mapping(csv_file): labels_mapping = { core_field_data["xlsx_field"]: { "old": core_field_data["label"], - "new": {"English(EN)": fields_mapping.get(core_field_data["xlsx_field"], "")}, + "new": { + "English(EN)": fields_mapping.get(core_field_data["xlsx_field"], "") + }, } for core_field_data in CORE_FIELDS_ATTRIBUTES - if core_field_data["label"].get("English(EN)", "") != fields_mapping.get(core_field_data["xlsx_field"], "") + if core_field_data["label"].get("English(EN)", "") + != fields_mapping.get(core_field_data["xlsx_field"], "") } file_path = f"{settings.PROJECT_ROOT}/apps/core/core_fields_attributes.py" - with open(file_path, "r") as f: + with open(file_path) as f: content = f.read() new_content = content for core_field, labels in labels_mapping.items(): @@ -538,7 +553,11 @@ def chart_map_choices(choices): def chart_get_filtered_qs( - obj, year, business_area_slug_filter: dict = None, additional_filters: dict = None, year_filter_path: str = None + obj, + year, + business_area_slug_filter: dict = None, + additional_filters: dict = None, + year_filter_path: str = None, ) -> QuerySet: if additional_filters is None: additional_filters = {} @@ -546,9 +565,14 @@ def chart_get_filtered_qs( year_filter = {"created_at__year": year} else: year_filter = {f"{year_filter_path}__year": year} - if business_area_slug_filter is None or "global" in business_area_slug_filter.values(): + if ( + business_area_slug_filter is None + or "global" in business_area_slug_filter.values() + ): business_area_slug_filter = {} - return obj.objects.filter(**year_filter, **business_area_slug_filter, **additional_filters) + return obj.objects.filter( + **year_filter, **business_area_slug_filter, **additional_filters + ) def parse_list_values_to_int(list_to_parse): @@ -577,7 +601,10 @@ def resolve_f(*args, **kwargs): if resolve_info.context.user.is_authenticated: business_area_slug = kwargs.get("business_area_slug", "global") business_area = BusinessArea.objects.filter(slug=business_area_slug).first() - if any(resolve_info.context.user.has_permission(per.name, business_area) for per in permissions): + if any( + resolve_info.context.user.has_permission(per.name, business_area) + for per in permissions + ): return chart_resolve(*args, **kwargs) logger.error("Permission Denied") raise GraphQLError("Permission Denied") @@ -586,10 +613,14 @@ def resolve_f(*args, **kwargs): def chart_filters_decoder(filters): - return {filter_name: decode_id_string(value) for filter_name, value in filters.items()} + return { + filter_name: decode_id_string(value) for filter_name, value in filters.items() + } -def chart_create_filter_query(filters, program_id_path="id", administrative_area_path="admin_areas"): +def chart_create_filter_query( + filters, program_id_path="id", administrative_area_path="admin_areas" +): filter_query = {} if filters.get("program") is not None: filter_query.update({program_id_path: filters.get("program")}) @@ -636,7 +667,9 @@ def resolve_flex_fields_choices_to_string(parent): if flex_field in (FlexibleAttribute.SELECT_ONE, FlexibleAttribute.SELECT_MANY): if isinstance(value, list): - new_value = [str(current_choice_value) for current_choice_value in value] + new_value = [ + str(current_choice_value) for current_choice_value in value + ] else: new_value = str(value) flex_fields_with_str_choices[flex_field_name] = new_value @@ -665,7 +698,10 @@ def __init__(self, sheet): col_holder = list( itertools.chain( string.ascii_uppercase, - ("".join(pair) for pair in itertools.product(string.ascii_uppercase, repeat=2)), + ( + "".join(pair) + for pair in itertools.product(string.ascii_uppercase, repeat=2) + ), ) ) """Loads all sheet images""" @@ -703,9 +739,15 @@ def fix_flex_type_fields(items, flex_fields): def map_unicef_ids_to_households_unicef_ids(excluded_ids_string): excluded_ids_array = excluded_ids_string.split(",") excluded_ids_array = [excluded_id.strip() for excluded_id in excluded_ids_array] - excluded_household_ids_array = [excluded_id for excluded_id in excluded_ids_array if excluded_id.startswith("HH")] + excluded_household_ids_array = [ + excluded_id + for excluded_id in excluded_ids_array + if excluded_id.startswith("HH") + ] excluded_individuals_ids_array = [ - excluded_id for excluded_id in excluded_ids_array if excluded_id.startswith("IND") + excluded_id + for excluded_id in excluded_ids_array + if excluded_id.startswith("IND") ] from hct_mis_api.apps.household.models import Household diff --git a/backend/hct_mis_api/apps/geo/models.py b/backend/hct_mis_api/apps/geo/models.py index 3aecb4d879..50ccc19ed5 100644 --- a/backend/hct_mis_api/apps/geo/models.py +++ b/backend/hct_mis_api/apps/geo/models.py @@ -2,8 +2,9 @@ # - AreaType # - Area from django.contrib.gis.db import models -from django.contrib.postgres.fields import CICharField, JSONField -from django.utils.translation import ugettext_lazy as _ +from django.contrib.postgres.fields import CICharField +from django.db.models import JSONField +from django.utils.translation import gettext_lazy as _ from hct_mis_api.apps.utils.models import TimeStampedUUIDModel from mptt.fields import TreeForeignKey @@ -93,7 +94,9 @@ class Area(MPTTModel, UpgradeModel, TimeStampedUUIDModel): on_delete=models.CASCADE, verbose_name=_("Parent"), ) - p_code = models.CharField(max_length=32, blank=True, null=True, verbose_name="P Code") + p_code = models.CharField( + max_length=32, blank=True, null=True, verbose_name="P Code" + ) area_type = models.ForeignKey(AreaType, on_delete=models.CASCADE) geom = models.MultiPolygonField(null=True, blank=True) @@ -118,4 +121,10 @@ def get_admin_areas_as_choices(cls, admin_level, business_area=None): if business_area is not None: queryset.filter(area_type__country__business_areas=business_area) queryset = queryset.order_by("name") - return [{"label": {"English(EN)": f"{area.name}-{area.p_code}"}, "value": area.p_code} for area in queryset] + return [ + { + "label": {"English(EN)": f"{area.name}-{area.p_code}"}, + "value": area.p_code, + } + for area in queryset + ] diff --git a/backend/hct_mis_api/apps/grievance/models.py b/backend/hct_mis_api/apps/grievance/models.py index 3cd86e73c5..fbe3e87ead 100644 --- a/backend/hct_mis_api/apps/grievance/models.py +++ b/backend/hct_mis_api/apps/grievance/models.py @@ -2,11 +2,10 @@ from itertools import chain from django.conf import settings -from django.contrib.postgres.fields import JSONField from django.core.exceptions import ValidationError from django.db import models -from django.db.models import Q -from django.utils.translation import ugettext_lazy as _ +from django.db.models import JSONField, Q +from django.utils.translation import gettext_lazy as _ from hct_mis_api.apps.activity_log.utils import create_mapping_dict from hct_mis_api.apps.core.utils import choices_to_dict @@ -24,17 +23,49 @@ def belong_household(self, household): # TicketDeleteIndividualDetails, TicketAddIndividualDetails, TicketIndividualDataUpdateDetails, # TicketHouseholdDataUpdateDetails, TicketSensitiveDetails, TicketComplaintDetails, TicketNote] return chain( - (TicketReferralDetails.objects.filter(Q(individual__in=individuals) | Q(household=household))), - (TicketNegativeFeedbackDetails.objects.filter(Q(individual__in=individuals) | Q(household=household))), - (TicketPositiveFeedbackDetails.objects.filter(Q(individual__in=individuals) | Q(household=household))), - (TicketNeedsAdjudicationDetails.objects.filter(selected_individual__in=individuals)), - (TicketSystemFlaggingDetails.objects.filter(golden_records_individual__in=individuals)), + ( + TicketReferralDetails.objects.filter( + Q(individual__in=individuals) | Q(household=household) + ) + ), + ( + TicketNegativeFeedbackDetails.objects.filter( + Q(individual__in=individuals) | Q(household=household) + ) + ), + ( + TicketPositiveFeedbackDetails.objects.filter( + Q(individual__in=individuals) | Q(household=household) + ) + ), + ( + TicketNeedsAdjudicationDetails.objects.filter( + selected_individual__in=individuals + ) + ), + ( + TicketSystemFlaggingDetails.objects.filter( + golden_records_individual__in=individuals + ) + ), (TicketDeleteIndividualDetails.objects.filter(individual__in=individuals)), (TicketAddIndividualDetails.objects.filter(household=household)), - (TicketIndividualDataUpdateDetails.objects.filter(individual__in=individuals)), + ( + TicketIndividualDataUpdateDetails.objects.filter( + individual__in=individuals + ) + ), (TicketHouseholdDataUpdateDetails.objects.filter(household=household)), - (TicketSensitiveDetails.objects.filter(Q(individual__in=individuals) | Q(household=household))), - (TicketComplaintDetails.objects.filter(Q(individual__in=individuals) | Q(household=household))), + ( + TicketSensitiveDetails.objects.filter( + Q(individual__in=individuals) | Q(household=household) + ) + ), + ( + TicketComplaintDetails.objects.filter( + Q(individual__in=individuals) | Q(household=household) + ) + ), ) @@ -128,20 +159,32 @@ class GrievanceTicket(TimeStampedUUIDModel, ConcurrencyModel): }, CATEGORY_SENSITIVE_GRIEVANCE: { ISSUE_TYPE_DATA_BREACH: _("Data breach"), - ISSUE_TYPE_BRIBERY_CORRUPTION_KICKBACK: _("Bribery, corruption or kickback"), + ISSUE_TYPE_BRIBERY_CORRUPTION_KICKBACK: _( + "Bribery, corruption or kickback" + ), ISSUE_TYPE_FRAUD_FORGERY: _("Fraud and forgery"), - ISSUE_TYPE_FRAUD_MISUSE: _("Fraud involving misuse of programme funds by third party"), + ISSUE_TYPE_FRAUD_MISUSE: _( + "Fraud involving misuse of programme funds by third party" + ), ISSUE_TYPE_HARASSMENT: _("Harassment and abuse of authority"), ISSUE_TYPE_INAPPROPRIATE_STAFF_CONDUCT: _("Inappropriate staff conduct"), - ISSUE_TYPE_UNAUTHORIZED_USE: _("Unauthorized use, misuse or waste of UNICEF property or funds"), + ISSUE_TYPE_UNAUTHORIZED_USE: _( + "Unauthorized use, misuse or waste of UNICEF property or funds" + ), ISSUE_TYPE_CONFLICT_OF_INTEREST: _("Conflict of interest"), ISSUE_TYPE_GROSS_MISMANAGEMENT: _("Gross mismanagement"), ISSUE_TYPE_PERSONAL_DISPUTES: _("Personal disputes"), - ISSUE_TYPE_SEXUAL_HARASSMENT: _("Sexual harassment and sexual exploitation"), + ISSUE_TYPE_SEXUAL_HARASSMENT: _( + "Sexual harassment and sexual exploitation" + ), ISSUE_TYPE_MISCELLANEOUS: _("Miscellaneous"), }, } - ALL_ISSUE_TYPES = [choice for choices_group in ISSUE_TYPES_CHOICES.values() for choice in choices_group.items()] + ALL_ISSUE_TYPES = [ + choice + for choices_group in ISSUE_TYPES_CHOICES.values() + for choice in choices_group.items() + ] STATUS_CHOICES = ( (STATUS_NEW, _("New")), (STATUS_ASSIGNED, _("Assigned")), @@ -194,7 +237,10 @@ class GrievanceTicket(TimeStampedUUIDModel, ConcurrencyModel): "individual": "individual", "household": "household", }, - "individual_data_update_ticket_details": {"individual": "individual", "household": "individual__household"}, + "individual_data_update_ticket_details": { + "individual": "individual", + "household": "individual__household", + }, "add_individual_ticket_details": { "household": "household", }, @@ -305,7 +351,9 @@ class GrievanceTicket(TimeStampedUUIDModel, ConcurrencyModel): blank=True, verbose_name=_("Assigned to"), ) - status = models.IntegerField(verbose_name=_("Status"), choices=STATUS_CHOICES, default=STATUS_NEW) + status = models.IntegerField( + verbose_name=_("Status"), choices=STATUS_CHOICES, default=STATUS_NEW + ) category = models.IntegerField(verbose_name=_("Category"), choices=CATEGORY_CHOICES) issue_type = models.IntegerField(verbose_name=_("Type"), null=True, blank=True) description = models.TextField( @@ -313,17 +361,28 @@ class GrievanceTicket(TimeStampedUUIDModel, ConcurrencyModel): blank=True, help_text=_("The content of the customers query."), ) - admin2 = models.ForeignKey("core.AdminArea", null=True, blank=True, on_delete=models.SET_NULL) - admin2_new = models.ForeignKey("geo.Area", null=True, blank=True, on_delete=models.SET_NULL) + admin2 = models.ForeignKey( + "core.AdminArea", null=True, blank=True, on_delete=models.SET_NULL + ) + admin2_new = models.ForeignKey( + "geo.Area", null=True, blank=True, on_delete=models.SET_NULL + ) area = models.CharField(max_length=250, blank=True) language = models.TextField(blank=True) consent = models.BooleanField(default=True) - business_area = models.ForeignKey("core.BusinessArea", related_name="tickets", on_delete=models.CASCADE) + business_area = models.ForeignKey( + "core.BusinessArea", related_name="tickets", on_delete=models.CASCADE + ) linked_tickets = models.ManyToManyField( - to="GrievanceTicket", through="GrievanceTicketThrough", related_name="linked_tickets_related" + to="GrievanceTicket", + through="GrievanceTicketThrough", + related_name="linked_tickets_related", ) registration_data_import = models.ForeignKey( - "registration_data.RegistrationDataImport", null=True, blank=True, on_delete=models.CASCADE + "registration_data.RegistrationDataImport", + null=True, + blank=True, + on_delete=models.CASCADE, ) unicef_id = models.CharField(max_length=250, blank=True, default="") extras = JSONField(blank=True, default=dict) @@ -332,7 +391,9 @@ class GrievanceTicket(TimeStampedUUIDModel, ConcurrencyModel): @property def related_tickets(self): - combined_related_tickets = (self.linked_tickets.all() | self.linked_tickets_related.all()).distinct() + combined_related_tickets = ( + self.linked_tickets.all() | self.linked_tickets_related.all() + ).distinct() yield from combined_related_tickets @property @@ -380,11 +441,19 @@ class Meta: def clean(self): issue_types = self.ISSUE_TYPES_CHOICES.get(self.category) should_contain_issue_types = bool(issue_types) - has_invalid_issue_type = should_contain_issue_types is True and self.issue_type not in issue_types - has_issue_type_for_category_without_issue_types = bool(should_contain_issue_types is False and self.issue_type) + has_invalid_issue_type = ( + should_contain_issue_types is True and self.issue_type not in issue_types + ) + has_issue_type_for_category_without_issue_types = bool( + should_contain_issue_types is False and self.issue_type + ) if has_invalid_issue_type or has_issue_type_for_category_without_issue_types: - logger.error(f"Invalid issue type {self.issue_type} for selected category {self.category}") - raise ValidationError({"issue_type": "Invalid issue type for selected category"}) + logger.error( + f"Invalid issue type {self.issue_type} for selected category {self.category}" + ) + raise ValidationError( + {"issue_type": "Invalid issue type for selected category"} + ) def save(self, *args, **kwargs): self.full_clean() @@ -396,10 +465,14 @@ def __str__(self): class GrievanceTicketThrough(TimeStampedUUIDModel): main_ticket = models.ForeignKey( - "GrievanceTicket", on_delete=models.CASCADE, related_name="grievance_tickets_through_main" + "GrievanceTicket", + on_delete=models.CASCADE, + related_name="grievance_tickets_through_main", ) linked_ticket = models.ForeignKey( - "GrievanceTicket", on_delete=models.CASCADE, related_name="grievance_tickets_through_linked" + "GrievanceTicket", + on_delete=models.CASCADE, + related_name="grievance_tickets_through_linked", ) @@ -408,7 +481,11 @@ class TicketNote(TimeStampedUUIDModel): verbose_name=_("Description"), help_text=_("The content of the customers query."), ) - ticket = models.ForeignKey("grievance.GrievanceTicket", related_name="ticket_notes", on_delete=models.CASCADE) + ticket = models.ForeignKey( + "grievance.GrievanceTicket", + related_name="ticket_notes", + on_delete=models.CASCADE, + ) created_by = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, @@ -421,7 +498,9 @@ class TicketNote(TimeStampedUUIDModel): class TicketComplaintDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="complaint_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="complaint_ticket_details", + on_delete=models.CASCADE, ) payment_record = models.ForeignKey( "payment.PaymentRecord", @@ -445,7 +524,9 @@ class TicketComplaintDetails(TimeStampedUUIDModel): class TicketSensitiveDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="sensitive_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="sensitive_ticket_details", + on_delete=models.CASCADE, ) payment_record = models.ForeignKey( "payment.PaymentRecord", @@ -469,7 +550,9 @@ class TicketSensitiveDetails(TimeStampedUUIDModel): class TicketHouseholdDataUpdateDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="household_data_update_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="household_data_update_ticket_details", + on_delete=models.CASCADE, ) household = models.ForeignKey( "household.Household", @@ -482,7 +565,9 @@ class TicketHouseholdDataUpdateDetails(TimeStampedUUIDModel): class TicketIndividualDataUpdateDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="individual_data_update_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="individual_data_update_ticket_details", + on_delete=models.CASCADE, ) individual = models.ForeignKey( "household.Individual", @@ -500,7 +585,9 @@ def household(self): class TicketAddIndividualDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="add_individual_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="add_individual_ticket_details", + on_delete=models.CASCADE, ) household = models.ForeignKey( "household.Household", @@ -514,7 +601,9 @@ class TicketAddIndividualDetails(TimeStampedUUIDModel): class TicketDeleteIndividualDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="delete_individual_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="delete_individual_ticket_details", + on_delete=models.CASCADE, ) individual = models.ForeignKey( "household.Individual", @@ -532,11 +621,17 @@ def household(self): class TicketSystemFlaggingDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="system_flagging_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="system_flagging_ticket_details", + on_delete=models.CASCADE, + ) + golden_records_individual = models.ForeignKey( + "household.Individual", on_delete=models.CASCADE ) - golden_records_individual = models.ForeignKey("household.Individual", on_delete=models.CASCADE) sanction_list_individual = models.ForeignKey( - "sanction_list.SanctionListIndividual", related_name="+", on_delete=models.CASCADE + "sanction_list.SanctionListIndividual", + related_name="+", + on_delete=models.CASCADE, ) approve_status = models.BooleanField(default=False) role_reassign_data = JSONField(default=dict) @@ -544,10 +639,16 @@ class TicketSystemFlaggingDetails(TimeStampedUUIDModel): class TicketNeedsAdjudicationDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="needs_adjudication_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="needs_adjudication_ticket_details", + on_delete=models.CASCADE, + ) + golden_records_individual = models.ForeignKey( + "household.Individual", related_name="+", on_delete=models.CASCADE + ) + possible_duplicate = models.ForeignKey( + "household.Individual", related_name="+", on_delete=models.CASCADE ) - golden_records_individual = models.ForeignKey("household.Individual", related_name="+", on_delete=models.CASCADE) - possible_duplicate = models.ForeignKey("household.Individual", related_name="+", on_delete=models.CASCADE) selected_individual = models.ForeignKey( "household.Individual", null=True, related_name="+", on_delete=models.CASCADE ) @@ -556,16 +657,26 @@ class TicketNeedsAdjudicationDetails(TimeStampedUUIDModel): @property def has_duplicated_document(self): - documents1 = [f"{x.document_number}--{x.type_id}" for x in self.golden_records_individual.documents.all()] - documents2 = [f"{x.document_number}--{x.type_id}" for x in self.possible_duplicate.documents.all()] + documents1 = [ + f"{x.document_number}--{x.type_id}" + for x in self.golden_records_individual.documents.all() + ] + documents2 = [ + f"{x.document_number}--{x.type_id}" + for x in self.possible_duplicate.documents.all() + ] return bool(set(documents1) & set(documents2)) class TicketPaymentVerificationDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="payment_verification_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="payment_verification_ticket_details", + on_delete=models.CASCADE, + ) + payment_verifications = models.ManyToManyField( + "payment.PaymentVerification", related_name="ticket_details" ) - payment_verifications = models.ManyToManyField("payment.PaymentVerification", related_name="ticket_details") payment_verification_status = models.CharField( max_length=50, choices=PaymentVerification.STATUS_CHOICES, @@ -574,7 +685,9 @@ class TicketPaymentVerificationDetails(TimeStampedUUIDModel): class TicketPositiveFeedbackDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="positive_feedback_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="positive_feedback_ticket_details", + on_delete=models.CASCADE, ) household = models.ForeignKey( "household.Household", @@ -592,7 +705,9 @@ class TicketPositiveFeedbackDetails(TimeStampedUUIDModel): class TicketNegativeFeedbackDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="negative_feedback_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="negative_feedback_ticket_details", + on_delete=models.CASCADE, ) household = models.ForeignKey( "household.Household", @@ -610,7 +725,9 @@ class TicketNegativeFeedbackDetails(TimeStampedUUIDModel): class TicketReferralDetails(TimeStampedUUIDModel): ticket = models.OneToOneField( - "grievance.GrievanceTicket", related_name="referral_ticket_details", on_delete=models.CASCADE + "grievance.GrievanceTicket", + related_name="referral_ticket_details", + on_delete=models.CASCADE, ) household = models.ForeignKey( "household.Household", diff --git a/backend/hct_mis_api/apps/grievance/mutations.py b/backend/hct_mis_api/apps/grievance/mutations.py index 9b775ddd76..8898f34105 100644 --- a/backend/hct_mis_api/apps/grievance/mutations.py +++ b/backend/hct_mis_api/apps/grievance/mutations.py @@ -206,16 +206,37 @@ class CreateGrievanceTicketMutation(PermissionMutation): ], }, GrievanceTicket.ISSUE_TYPE_DATA_BREACH: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_BRIBERY_CORRUPTION_KICKBACK: {"required": [], "not_allowed": []}, + GrievanceTicket.ISSUE_TYPE_BRIBERY_CORRUPTION_KICKBACK: { + "required": [], + "not_allowed": [], + }, GrievanceTicket.ISSUE_TYPE_FRAUD_FORGERY: {"required": [], "not_allowed": []}, GrievanceTicket.ISSUE_TYPE_FRAUD_MISUSE: {"required": [], "not_allowed": []}, GrievanceTicket.ISSUE_TYPE_HARASSMENT: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_INAPPROPRIATE_STAFF_CONDUCT: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_UNAUTHORIZED_USE: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_CONFLICT_OF_INTEREST: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_GROSS_MISMANAGEMENT: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_PERSONAL_DISPUTES: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_SEXUAL_HARASSMENT: {"required": [], "not_allowed": []}, + GrievanceTicket.ISSUE_TYPE_INAPPROPRIATE_STAFF_CONDUCT: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_UNAUTHORIZED_USE: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_CONFLICT_OF_INTEREST: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_GROSS_MISMANAGEMENT: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_PERSONAL_DISPUTES: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_SEXUAL_HARASSMENT: { + "required": [], + "not_allowed": [], + }, GrievanceTicket.ISSUE_TYPE_MISCELLANEOUS: {"required": [], "not_allowed": []}, } @@ -246,9 +267,17 @@ def mutate(cls, root, info, input, **kwargs): save_extra_method = save_extra_methods.get(category) grievances = [grievance_ticket] if save_extra_method: - grievances = save_extra_method(root, info, input, grievance_ticket, extras, **kwargs) + grievances = save_extra_method( + root, info, input, grievance_ticket, extras, **kwargs + ) for grievance in grievances: - log_create(GrievanceTicket.ACTIVITY_LOG_MAPPING, "business_area", info.context.user, None, grievance) + log_create( + GrievanceTicket.ACTIVITY_LOG_MAPPING, + "business_area", + info.context.user, + None, + grievance, + ) return cls(grievance_tickets=grievances) @classmethod @@ -257,10 +286,14 @@ def save_basic_data(cls, root, info, input, **kwargs): user = info.context.user assigned_to_id = decode_id_string(arg("assigned_to")) linked_tickets_encoded_ids = arg("linked_tickets", []) - linked_tickets = [decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids] + linked_tickets = [ + decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids + ] business_area_slug = arg("business_area") extras = arg("extras", {}) - remove_parsed_data_fields(input, ("linked_tickets", "extras", "business_area", "assigned_to")) + remove_parsed_data_fields( + input, ("linked_tickets", "extras", "business_area", "assigned_to") + ) admin = input.pop("admin", None) admin_object = None admin_object_new = None @@ -280,7 +313,9 @@ def save_basic_data(cls, root, info, input, **kwargs): status=GrievanceTicket.STATUS_ASSIGNED, ) GrievanceNotification.send_all_notifications( - GrievanceNotification.prepare_notification_for_ticket_creation(grievance_ticket) + GrievanceNotification.prepare_notification_for_ticket_creation( + grievance_ticket + ) ) grievance_ticket.linked_tickets.set(linked_tickets) return grievance_ticket, extras @@ -292,28 +327,61 @@ class UpdateGrievanceTicketMutation(PermissionMutation): EXTRAS_OPTIONS = { GrievanceTicket.ISSUE_TYPE_HOUSEHOLD_DATA_CHANGE_DATA_UPDATE: { "required": ["extras.household_data_update_issue_type_extras"], - "not_allowed": ["individual_data_update_issue_type_extras", "add_individual_issue_type_extras"], + "not_allowed": [ + "individual_data_update_issue_type_extras", + "add_individual_issue_type_extras", + ], }, GrievanceTicket.ISSUE_TYPE_INDIVIDUAL_DATA_CHANGE_DATA_UPDATE: { "required": ["extras.individual_data_update_issue_type_extras"], - "not_allowed": ["household_data_update_issue_type_extras", "add_individual_issue_type_extras"], + "not_allowed": [ + "household_data_update_issue_type_extras", + "add_individual_issue_type_extras", + ], }, GrievanceTicket.ISSUE_TYPE_DATA_CHANGE_ADD_INDIVIDUAL: { "required": ["extras.add_individual_issue_type_extras"], - "not_allowed": ["household_data_update_issue_type_extras", "individual_data_update_issue_type_extras"], + "not_allowed": [ + "household_data_update_issue_type_extras", + "individual_data_update_issue_type_extras", + ], + }, + GrievanceTicket.ISSUE_TYPE_DATA_CHANGE_DELETE_INDIVIDUAL: { + "required": [], + "not_allowed": [], }, - GrievanceTicket.ISSUE_TYPE_DATA_CHANGE_DELETE_INDIVIDUAL: {"required": [], "not_allowed": []}, GrievanceTicket.ISSUE_TYPE_DATA_BREACH: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_BRIBERY_CORRUPTION_KICKBACK: {"required": [], "not_allowed": []}, + GrievanceTicket.ISSUE_TYPE_BRIBERY_CORRUPTION_KICKBACK: { + "required": [], + "not_allowed": [], + }, GrievanceTicket.ISSUE_TYPE_FRAUD_FORGERY: {"required": [], "not_allowed": []}, GrievanceTicket.ISSUE_TYPE_FRAUD_MISUSE: {"required": [], "not_allowed": []}, GrievanceTicket.ISSUE_TYPE_HARASSMENT: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_INAPPROPRIATE_STAFF_CONDUCT: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_UNAUTHORIZED_USE: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_CONFLICT_OF_INTEREST: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_GROSS_MISMANAGEMENT: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_PERSONAL_DISPUTES: {"required": [], "not_allowed": []}, - GrievanceTicket.ISSUE_TYPE_SEXUAL_HARASSMENT: {"required": [], "not_allowed": []}, + GrievanceTicket.ISSUE_TYPE_INAPPROPRIATE_STAFF_CONDUCT: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_UNAUTHORIZED_USE: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_CONFLICT_OF_INTEREST: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_GROSS_MISMANAGEMENT: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_PERSONAL_DISPUTES: { + "required": [], + "not_allowed": [], + }, + GrievanceTicket.ISSUE_TYPE_SEXUAL_HARASSMENT: { + "required": [], + "not_allowed": [], + }, GrievanceTicket.ISSUE_TYPE_MISCELLANEOUS: {"required": [], "not_allowed": []}, } @@ -326,15 +394,23 @@ class Arguments: @transaction.atomic def mutate(cls, root, info, input, **kwargs): arg = lambda name, default=None: input.get(name, default) - old_grievance_ticket = get_object_or_404(GrievanceTicket, id=decode_id_string(arg("ticket_id"))) - grievance_ticket = get_object_or_404(GrievanceTicket, id=decode_id_string(arg("ticket_id"))) + old_grievance_ticket = get_object_or_404( + GrievanceTicket, id=decode_id_string(arg("ticket_id")) + ) + grievance_ticket = get_object_or_404( + GrievanceTicket, id=decode_id_string(arg("ticket_id")) + ) household, individual = None, None if arg("household") is not None: - household = get_object_or_404(Household, id=decode_id_string(arg("household"))) + household = get_object_or_404( + Household, id=decode_id_string(arg("household")) + ) if arg("individual") is not None: - individual = get_object_or_404(Individual, id=decode_id_string(arg("individual"))) + individual = get_object_or_404( + Individual, id=decode_id_string(arg("individual")) + ) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) business_area = grievance_ticket.business_area @@ -354,7 +430,9 @@ def mutate(cls, root, info, input, **kwargs): if grievance_ticket.issue_type: verify_required_arguments(input, "issue_type", cls.EXTRAS_OPTIONS) - grievance_ticket, extras = cls.update_basic_data(root, info, input, grievance_ticket, **kwargs) + grievance_ticket, extras = cls.update_basic_data( + root, info, input, grievance_ticket, **kwargs + ) if cls.has_creator_or_owner_permission( info, @@ -372,7 +450,9 @@ def mutate(cls, root, info, input, **kwargs): category = grievance_ticket.category update_extra_method = update_extra_methods.get(category) if update_extra_method: - grievance_ticket = update_extra_method(root, info, input, grievance_ticket, extras, **kwargs) + grievance_ticket = update_extra_method( + root, info, input, grievance_ticket, extras, **kwargs + ) update_extra_methods = { GrievanceTicket.CATEGORY_REFERRAL: update_referral_extras, @@ -381,7 +461,9 @@ def mutate(cls, root, info, input, **kwargs): } update_extra_method = update_extra_methods.get(grievance_ticket.category) if update_extra_method: - grievance_ticket = update_extra_method(root, info, input, grievance_ticket, extras, **kwargs) + grievance_ticket = update_extra_method( + root, info, input, grievance_ticket, extras, **kwargs + ) if grievance_ticket.category in [ GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE, @@ -416,7 +498,9 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): arg = lambda name, default=None: input.get(name, default) assigned_to_id = decode_id_string(arg("assigned_to")) linked_tickets_encoded_ids = arg("linked_tickets", []) - linked_tickets = [decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids] + linked_tickets = [ + decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids + ] extras = arg("extras", {}) remove_parsed_data_fields(input, ("linked_tickets", "extras", "assigned_to")) assigned_to = get_object_or_404(get_user_model(), id=assigned_to_id) @@ -426,10 +510,16 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): setattr(grievance_ticket, field, value) if assigned_to != grievance_ticket.assigned_to: - if grievance_ticket.status == GrievanceTicket.STATUS_NEW and grievance_ticket.assigned_to is None: + if ( + grievance_ticket.status == GrievanceTicket.STATUS_NEW + and grievance_ticket.assigned_to is None + ): grievance_ticket.status = GrievanceTicket.STATUS_ASSIGNED grievance_ticket.assigned_to = assigned_to - if grievance_ticket.status in (GrievanceTicket.STATUS_ON_HOLD, GrievanceTicket.STATUS_FOR_APPROVAL): + if grievance_ticket.status in ( + GrievanceTicket.STATUS_ON_HOLD, + GrievanceTicket.STATUS_FOR_APPROVAL, + ): grievance_ticket.status = GrievanceTicket.STATUS_IN_PROGRESS else: if grievance_ticket.status == GrievanceTicket.STATUS_FOR_APPROVAL: @@ -449,7 +539,9 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): and grievance_ticket.status == GrievanceTicket.STATUS_IN_PROGRESS ): back_to_in_progress_notification = GrievanceNotification( - grievance_ticket, GrievanceNotification.ACTION_SEND_BACK_TO_IN_PROGRESS, approver=info.context.user + grievance_ticket, + GrievanceNotification.ACTION_SEND_BACK_TO_IN_PROGRESS, + approver=info.context.user, ) back_to_in_progress_notification.send_email_notification() if old_assigned_to != grievance_ticket.assigned_to: @@ -463,9 +555,18 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): POSSIBLE_STATUS_FLOW = { GrievanceTicket.STATUS_NEW: (GrievanceTicket.STATUS_ASSIGNED,), GrievanceTicket.STATUS_ASSIGNED: (GrievanceTicket.STATUS_IN_PROGRESS,), - GrievanceTicket.STATUS_IN_PROGRESS: (GrievanceTicket.STATUS_ON_HOLD, GrievanceTicket.STATUS_FOR_APPROVAL), - GrievanceTicket.STATUS_ON_HOLD: (GrievanceTicket.STATUS_IN_PROGRESS, GrievanceTicket.STATUS_FOR_APPROVAL), - GrievanceTicket.STATUS_FOR_APPROVAL: (GrievanceTicket.STATUS_IN_PROGRESS, GrievanceTicket.STATUS_CLOSED), + GrievanceTicket.STATUS_IN_PROGRESS: ( + GrievanceTicket.STATUS_ON_HOLD, + GrievanceTicket.STATUS_FOR_APPROVAL, + ), + GrievanceTicket.STATUS_ON_HOLD: ( + GrievanceTicket.STATUS_IN_PROGRESS, + GrievanceTicket.STATUS_FOR_APPROVAL, + ), + GrievanceTicket.STATUS_FOR_APPROVAL: ( + GrievanceTicket.STATUS_IN_PROGRESS, + GrievanceTicket.STATUS_CLOSED, + ), GrievanceTicket.STATUS_CLOSED: (), } POSSIBLE_FEEDBACK_STATUS_FLOW = { @@ -481,7 +582,10 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): GrievanceTicket.STATUS_FOR_APPROVAL, GrievanceTicket.STATUS_CLOSED, ), - GrievanceTicket.STATUS_FOR_APPROVAL: (GrievanceTicket.STATUS_IN_PROGRESS, GrievanceTicket.STATUS_CLOSED), + GrievanceTicket.STATUS_FOR_APPROVAL: ( + GrievanceTicket.STATUS_IN_PROGRESS, + GrievanceTicket.STATUS_CLOSED, + ), GrievanceTicket.STATUS_CLOSED: (), } @@ -519,7 +623,7 @@ class GrievanceStatusChangeMutation(PermissionMutation): GrievanceTicket.CATEGORY_SYSTEM_FLAGGING: close_system_flagging_ticket, } - MOVE_TO_STATUS_PERMISSION_MAPPING: Dict[str, Dict[Union[str, int], List[Enum]]] = { + MOVE_TO_STATUS_PERMISSION_MAPPING: dict[str, dict[Union[str, int], list[Enum]]] = { GrievanceTicket.STATUS_ASSIGNED: { "any": [ Permissions.GRIEVANCES_UPDATE, @@ -572,7 +676,9 @@ class Arguments: @classmethod def get_close_function(cls, category, issue_type): - function_or_nested_issue_types = cls.CATEGORY_ISSUE_TYPE_TO_CLOSE_FUNCTION_MAPPING.get(category) + function_or_nested_issue_types = ( + cls.CATEGORY_ISSUE_TYPE_TO_CLOSE_FUNCTION_MAPPING.get(category) + ) if isinstance(function_or_nested_issue_types, dict) and issue_type: return function_or_nested_issue_types.get(issue_type) return function_or_nested_issue_types @@ -582,7 +688,9 @@ def get_close_function(cls, category, issue_type): @transaction.atomic def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): grievance_ticket_id = decode_id_string(grievance_ticket_id) - old_grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) + old_grievance_ticket = get_object_or_404( + GrievanceTicket, id=grievance_ticket_id + ) grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) if grievance_ticket.status == status: @@ -591,9 +699,13 @@ def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): if cls.MOVE_TO_STATUS_PERMISSION_MAPPING.get(status): if cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get("feedback"): if grievance_ticket.is_feedback: - permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get("feedback") + permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[ + status + ].get("feedback") else: - permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get("any") + permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[ + status + ].get("any") else: permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get( grievance_ticket.status @@ -616,11 +728,18 @@ def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): logger.error("New status is incorrect") raise GraphQLError("New status is incorrect") if status == GrievanceTicket.STATUS_CLOSED: - close_function = cls.get_close_function(grievance_ticket.category, grievance_ticket.issue_type) + close_function = cls.get_close_function( + grievance_ticket.category, grievance_ticket.issue_type + ) close_function(grievance_ticket, info) grievance_ticket.refresh_from_db() - if status == GrievanceTicket.STATUS_ASSIGNED and not grievance_ticket.assigned_to: - cls.has_permission(info, Permissions.GRIEVANCE_ASSIGN, grievance_ticket.business_area) + if ( + status == GrievanceTicket.STATUS_ASSIGNED + and not grievance_ticket.assigned_to + ): + cls.has_permission( + info, Permissions.GRIEVANCE_ASSIGN, grievance_ticket.business_area + ) grievance_ticket.assigned_to = info.context.user grievance_ticket.status = status grievance_ticket.save() @@ -645,7 +764,9 @@ def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): and grievance_ticket.status == GrievanceTicket.STATUS_IN_PROGRESS ): back_to_in_progress_notification = GrievanceNotification( - grievance_ticket, GrievanceNotification.ACTION_SEND_BACK_TO_IN_PROGRESS, approver=info.context.user + grievance_ticket, + GrievanceNotification.ACTION_SEND_BACK_TO_IN_PROGRESS, + approver=info.context.user, ) back_to_in_progress_notification.send_email_notification() if old_grievance_ticket.assigned_to != grievance_ticket.assigned_to: @@ -683,9 +804,14 @@ def mutate(cls, root, info, note_input, **kwargs): description = note_input["description"] created_by = info.context.user - ticket_note = TicketNote.objects.create(ticket=grievance_ticket, description=description, created_by=created_by) + ticket_note = TicketNote.objects.create( + ticket=grievance_ticket, description=description, created_by=created_by + ) notification = GrievanceNotification( - grievance_ticket, GrievanceNotification.ACTION_NOTES_ADDED, created_by=created_by, ticket_note=ticket_note + grievance_ticket, + GrievanceNotification.ACTION_NOTES_ADDED, + created_by=created_by, + ticket_note=ticket_note, ) notification.send_email_notification() return cls(grievance_ticket_note=ticket_note) @@ -742,11 +868,17 @@ def mutate( ) cls.verify_approve_data(individual_approve_data) cls.verify_approve_data(flex_fields_approve_data) - individual_approve_data = {to_snake_case(key): value for key, value in individual_approve_data.items()} + individual_approve_data = { + to_snake_case(key): value for key, value in individual_approve_data.items() + } individual_data_details = grievance_ticket.individual_data_update_ticket_details individual_data = individual_data_details.individual_data - cls.verify_approve_data_against_object_data(individual_data, individual_approve_data) - cls.verify_approve_data_against_object_data(individual_data.get("flex_fields"), flex_fields_approve_data) + cls.verify_approve_data_against_object_data( + individual_data, individual_approve_data + ) + cls.verify_approve_data_against_object_data( + individual_data.get("flex_fields"), flex_fields_approve_data + ) documents_mapping = { "documents": approved_documents_to_create, @@ -762,12 +894,14 @@ def mutate( if field_name in documents_mapping: for index, document_data in enumerate(individual_data[field_name]): approved_documents_indexes = documents_mapping.get(field_name, []) - document_data["approve_status"] = index in approved_documents_indexes + document_data["approve_status"] = ( + index in approved_documents_indexes + ) elif field_name == "flex_fields": for flex_field_name in item.keys(): - individual_data["flex_fields"][flex_field_name]["approve_status"] = flex_fields_approve_data.get( - flex_field_name - ) + individual_data["flex_fields"][flex_field_name][ + "approve_status" + ] = flex_fields_approve_data.get(flex_field_name) elif field_to_approve: individual_data[field_name]["approve_status"] = True else: @@ -796,7 +930,15 @@ class Arguments: @classmethod @is_authenticated @transaction.atomic - def mutate(cls, root, info, grievance_ticket_id, household_approve_data, flex_fields_approve_data, **kwargs): + def mutate( + cls, + root, + info, + grievance_ticket_id, + household_approve_data, + flex_fields_approve_data, + **kwargs, + ): grievance_ticket_id = decode_id_string(grievance_ticket_id) grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) @@ -811,18 +953,24 @@ def mutate(cls, root, info, grievance_ticket_id, household_approve_data, flex_fi ) cls.verify_approve_data(household_approve_data) cls.verify_approve_data(flex_fields_approve_data) - household_approve_data = {to_snake_case(key): value for key, value in household_approve_data.items()} + household_approve_data = { + to_snake_case(key): value for key, value in household_approve_data.items() + } household_data_details = grievance_ticket.household_data_update_ticket_details household_data = household_data_details.household_data - cls.verify_approve_data_against_object_data(household_data, household_approve_data) - cls.verify_approve_data_against_object_data(household_data.get("flex_fields"), flex_fields_approve_data) + cls.verify_approve_data_against_object_data( + household_data, household_approve_data + ) + cls.verify_approve_data_against_object_data( + household_data.get("flex_fields"), flex_fields_approve_data + ) for field_name, item in household_data.items(): if field_name == "flex_fields": for flex_field_name in item.keys(): - household_data["flex_fields"][flex_field_name]["approve_status"] = flex_fields_approve_data.get( - flex_field_name - ) + household_data["flex_fields"][flex_field_name][ + "approve_status" + ] = flex_fields_approve_data.get(flex_field_name) elif household_approve_data.get(field_name): household_data[field_name]["approve_status"] = True else: @@ -897,17 +1045,28 @@ class Arguments: @classmethod def verify_role_choices(cls, role): if role not in [ROLE_PRIMARY, ROLE_ALTERNATE, HEAD]: - logger.error("Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD") - raise GraphQLError("Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD") + logger.error( + "Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD" + ) + raise GraphQLError( + "Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD" + ) @classmethod def verify_if_role_exists(cls, household, current_individual, role): if role == HEAD: if household.head_of_household.id != current_individual.id: logger.error("This individual is not a head of provided household") - raise GraphQLError("This individual is not a head of provided household") + raise GraphQLError( + "This individual is not a head of provided household" + ) else: - get_object_or_404(IndividualRoleInHousehold, individual=current_individual, household=household, role=role) + get_object_or_404( + IndividualRoleInHousehold, + individual=current_individual, + household=household, + role=role, + ) @classmethod @is_authenticated @@ -930,9 +1089,13 @@ def mutate( grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) household = get_object_or_404(Household, id=decoded_household_id) - check_concurrency_version_in_mutation(kwargs.get("household_version"), household) + check_concurrency_version_in_mutation( + kwargs.get("household_version"), household + ) individual = get_object_or_404(Individual, id=decoded_individual_id) - check_concurrency_version_in_mutation(kwargs.get("individual_version"), individual) + check_concurrency_version_in_mutation( + kwargs.get("individual_version"), individual + ) ticket_details = grievance_ticket.ticket_details if grievance_ticket.category == GrievanceTicket.CATEGORY_NEEDS_ADJUDICATION: @@ -947,7 +1110,10 @@ def mutate( role_data_key = role else: role_object = get_object_or_404( - IndividualRoleInHousehold, individual=ticket_individual, household=household, role=role + IndividualRoleInHousehold, + individual=ticket_individual, + household=household, + role=role, ) role_data_key = str(role_object.id) @@ -992,11 +1158,20 @@ def mutate(cls, root, info, grievance_ticket_id, **kwargs): if selected_individual_id: decoded_selected_individual_id = decode_id_string(selected_individual_id) - selected_individual = get_object_or_404(Individual, id=decoded_selected_individual_id) + selected_individual = get_object_or_404( + Individual, id=decoded_selected_individual_id + ) - if selected_individual not in (ticket_details.golden_records_individual, ticket_details.possible_duplicate): - logger.error("The selected individual is not valid, must be one of those attached to the ticket") - raise GraphQLError("The selected individual is not valid, must be one of those attached to the ticket") + if selected_individual not in ( + ticket_details.golden_records_individual, + ticket_details.possible_duplicate, + ): + logger.error( + "The selected individual is not valid, must be one of those attached to the ticket" + ) + raise GraphQLError( + "The selected individual is not valid, must be one of those attached to the ticket" + ) ticket_details.selected_individual = selected_individual ticket_details.role_reassign_data = {} diff --git a/backend/hct_mis_api/apps/grievance/mutations_extras/utils.py b/backend/hct_mis_api/apps/grievance/mutations_extras/utils.py index 7621266d99..e3d19dc6a3 100644 --- a/backend/hct_mis_api/apps/grievance/mutations_extras/utils.py +++ b/backend/hct_mis_api/apps/grievance/mutations_extras/utils.py @@ -25,12 +25,16 @@ def handle_role(role, household, individual): ) if role in (ROLE_PRIMARY, ROLE_ALTERNATE) and household: - already_existing_role = IndividualRoleInHousehold.objects.filter(household=household, role=role).first() + already_existing_role = IndividualRoleInHousehold.objects.filter( + household=household, role=role + ).first() if already_existing_role: already_existing_role.individual = individual already_existing_role.save() else: - IndividualRoleInHousehold.objects.create(individual=individual, household=household, role=role) + IndividualRoleInHousehold.objects.create( + individual=individual, household=household, role=role + ) def handle_add_document(document, individual): @@ -49,12 +53,20 @@ def handle_add_document(document, individual): photo = photoraw document_type = DocumentType.objects.get(country=country, type=type_name) - document_already_exists = Document.objects.filter(document_number=number, type=document_type).exists() + document_already_exists = Document.objects.filter( + document_number=number, type=document_type + ).exists() if document_already_exists: - logger.error(f"Document with number {number} of type {type_name} for country {country} already exist") - raise GraphQLError(f"Document with number {number} of type {type_name} for country {country} already exist") + logger.error( + f"Document with number {number} of type {type_name} for country {country} already exist" + ) + raise GraphQLError( + f"Document with number {number} of type {type_name} for country {country} already exist" + ) - return Document(document_number=number, individual=individual, type=document_type, photo=photo) + return Document( + document_number=number, individual=individual, type=document_type, photo=photo + ) def handle_edit_document(document_data: dict): @@ -84,11 +96,17 @@ def handle_edit_document(document_data: dict): document_type = DocumentType.objects.get(country=country, type=type_name) document_already_exists = ( - Document.objects.exclude(pk=document_id).filter(document_number=number, type=document_type).exists() + Document.objects.exclude(pk=document_id) + .filter(document_number=number, type=document_type) + .exists() ) if document_already_exists: - logger.error(f"Document with number {number} of type {type_name} for country {country} already exist") - raise GraphQLError(f"Document with number {number} of type {type_name} for country {country} already exist") + logger.error( + f"Document with number {number} of type {type_name} for country {country} already exist" + ) + raise GraphQLError( + f"Document with number {number} of type {type_name} for country {country} already exist" + ) document.document_number = number document.type = document_type @@ -109,13 +127,23 @@ def handle_add_identity(identity, individual): agency_type, _ = Agency.objects.get_or_create( country=country, type=agency_name, - defaults={"country": country, "type": agency_name, "label": f"{country.name} - {agency_name}"}, + defaults={ + "country": country, + "type": agency_name, + "label": f"{country.name} - {agency_name}", + }, ) - identity_already_exists = IndividualIdentity.objects.filter(number=number, agency=agency_type).exists() + identity_already_exists = IndividualIdentity.objects.filter( + number=number, agency=agency_type + ).exists() if identity_already_exists: - logger.error(f"Identity with number {number}, agency: {agency_name} already exist") - raise GraphQLError(f"Identity with number {number}, agency: {agency_name} already exist") + logger.error( + f"Identity with number {number}, agency: {agency_name} already exist" + ) + raise GraphQLError( + f"Identity with number {number}, agency: {agency_name} already exist" + ) return IndividualIdentity(number=number, individual=individual, agency=agency_type) @@ -142,15 +170,25 @@ def handle_edit_identity(identity_data: dict): agency_type, _ = Agency.objects.get_or_create( country=country, type=agency_name, - defaults={"country": country, "type": agency_name, "label": f"{country.name} - {agency_name}"}, + defaults={ + "country": country, + "type": agency_name, + "label": f"{country.name} - {agency_name}", + }, ) identity_already_exists = ( - IndividualIdentity.objects.exclude(pk=identity_id).filter(number=number, agency=agency_type).exists() + IndividualIdentity.objects.exclude(pk=identity_id) + .filter(number=number, agency=agency_type) + .exists() ) if identity_already_exists: - logger.error(f"Identity with number {number}, agency: {agency_name} already exist") - raise GraphQLError(f"Identity with number {number}, agency: {agency_name} already exist") + logger.error( + f"Identity with number {number}, agency: {agency_name} already exist" + ) + raise GraphQLError( + f"Identity with number {number}, agency: {agency_name} already exist" + ) identity.number = number identity.agency = agency_type @@ -268,14 +306,18 @@ def prepare_edit_identities(identities): "id": encoded_id, "country": country, "agency": agency, - "individual": encode_id_base64(identity.individual.id, "Individual"), + "individual": encode_id_base64( + identity.individual.id, "Individual" + ), "number": number, }, "previous_value": { "id": encoded_id, "country": identity.agency.country.alpha3, "agency": identity.agency.type, - "individual": encode_id_base64(identity.individual.id, "Individual"), + "individual": encode_id_base64( + identity.individual.id, "Individual" + ), "number": identity.number, }, } @@ -315,8 +357,12 @@ def verify_flex_fields(flex_fields_to_verify, associated_with): from hct_mis_api.apps.core.utils import serialize_flex_attributes if associated_with not in ("households", "individuals"): - logger.error("associated_with argument must be one of ['household', 'individual']") - raise ValueError("associated_with argument must be one of ['household', 'individual']") + logger.error( + "associated_with argument must be one of ['household', 'individual']" + ) + raise ValueError( + "associated_with argument must be one of ['household', 'individual']" + ) all_flex_fields = serialize_flex_attributes().get(associated_with, {}) @@ -326,8 +372,11 @@ def verify_flex_fields(flex_fields_to_verify, associated_with): logger.error(f"{name} is not a correct `flex field") raise ValueError(f"{name} is not a correct `flex field") field_type = flex_field["type"] - field_choices = set(f.get("value") for f in flex_field["choices"]) - if not isinstance(value, FIELD_TYPES_TO_INTERNAL_TYPE[field_type]) or value is None: + field_choices = {f.get("value") for f in flex_field["choices"]} + if ( + not isinstance(value, FIELD_TYPES_TO_INTERNAL_TYPE[field_type]) + or value is None + ): logger.error(f"invalid value type for a field {name}") raise ValueError(f"invalid value type for a field {name}") @@ -347,19 +396,29 @@ def withdraw_individual_and_reassign_roles(ticket_details, individual_to_remove, old_individual = Individual.objects.get(id=individual_to_remove.id) household = reassign_roles_on_disable_individual( - ticket_details.ticket, individual_to_remove, ticket_details.role_reassign_data, info + ticket_details.ticket, + individual_to_remove, + ticket_details.role_reassign_data, + info, ) withdraw_individual(individual_to_remove, info, old_individual, household) -def mark_as_duplicate_individual_and_reassign_roles(ticket_details, individual_to_remove, info, unique_individual): +def mark_as_duplicate_individual_and_reassign_roles( + ticket_details, individual_to_remove, info, unique_individual +): from hct_mis_api.apps.household.models import Individual old_individual = Individual.objects.get(id=individual_to_remove.id) household = reassign_roles_on_disable_individual( - ticket_details.ticket, individual_to_remove, ticket_details.role_reassign_data, info + ticket_details.ticket, + individual_to_remove, + ticket_details.role_reassign_data, + info, + ) + mark_as_duplicate_individual( + individual_to_remove, info, old_individual, household, unique_individual ) - mark_as_duplicate_individual(individual_to_remove, info, old_individual, household, unique_individual) def get_data_from_role_data(role_data): @@ -380,7 +439,9 @@ def get_data_from_role_data(role_data): return role_name, old_individual, new_individual, household -def reassign_roles_on_disable_individual(ticket, individual_to_remove, role_reassign_data, info=None): +def reassign_roles_on_disable_individual( + ticket, individual_to_remove, role_reassign_data, info=None +): from django.shortcuts import get_object_or_404 from graphql import GraphQLError @@ -395,13 +456,20 @@ def reassign_roles_on_disable_individual(ticket, individual_to_remove, role_reas roles_to_bulk_update = [] for role_data in role_reassign_data.values(): - role_name, old_new_individual, new_individual, household = get_data_from_role_data(role_data) + ( + role_name, + old_new_individual, + new_individual, + household, + ) = get_data_from_role_data(role_data) if role_name == HEAD: household.head_of_household = new_individual # can be directly saved, because there is always only one head of household to update household.save() - household.individuals.exclude(id=new_individual.id).update(relationship=RELATIONSHIP_UNKNOWN) + household.individuals.exclude(id=new_individual.id).update( + relationship=RELATIONSHIP_UNKNOWN + ) new_individual.relationship = HEAD new_individual.save() if info: @@ -414,34 +482,54 @@ def reassign_roles_on_disable_individual(ticket, individual_to_remove, role_reas ) if role_name == ROLE_ALTERNATE and new_individual.role == ROLE_PRIMARY: - raise GraphQLError("Cannot reassign the role. Selected individual has primary collector role.") + raise GraphQLError( + "Cannot reassign the role. Selected individual has primary collector role." + ) if role_name in (ROLE_PRIMARY, ROLE_ALTERNATE): role = get_object_or_404( - IndividualRoleInHousehold, role=role_name, household=household, individual=individual_to_remove + IndividualRoleInHousehold, + role=role_name, + household=household, + individual=individual_to_remove, ) role.individual = new_individual roles_to_bulk_update.append(role) - primary_roles_count = Counter([role.get("role") for role in role_reassign_data.values()])[ROLE_PRIMARY] + primary_roles_count = Counter( + [role.get("role") for role in role_reassign_data.values()] + )[ROLE_PRIMARY] household_to_remove = individual_to_remove.household - is_one_individual = household_to_remove.individuals.count() == 1 if household_to_remove else False + is_one_individual = ( + household_to_remove.individuals.count() == 1 if household_to_remove else False + ) - if primary_roles_count != individual_to_remove.count_primary_roles() and not is_one_individual: + if ( + primary_roles_count != individual_to_remove.count_primary_roles() + and not is_one_individual + ): logger.error("Ticket cannot be closed, not all roles have been reassigned") - raise GraphQLError("Ticket cannot be closed, not all roles have been reassigned") + raise GraphQLError( + "Ticket cannot be closed, not all roles have been reassigned" + ) if ( all(HEAD not in key for key in role_reassign_data.keys()) and individual_to_remove.is_head() and not is_one_individual ): - logger.error("Ticket cannot be closed head of household has not been reassigned") - raise GraphQLError("Ticket cannot be closed head of household has not been reassigned") + logger.error( + "Ticket cannot be closed head of household has not been reassigned" + ) + raise GraphQLError( + "Ticket cannot be closed head of household has not been reassigned" + ) if roles_to_bulk_update: - IndividualRoleInHousehold.objects.bulk_update(roles_to_bulk_update, ["individual"]) + IndividualRoleInHousehold.objects.bulk_update( + roles_to_bulk_update, ["individual"] + ) return household_to_remove @@ -459,7 +547,12 @@ def reassign_roles_on_update(individual, role_reassign_data, info=None): roles_to_bulk_update = [] for role_data in role_reassign_data.values(): - role_name, old_new_individual, new_individual, household = get_data_from_role_data(role_data) + ( + role_name, + old_new_individual, + new_individual, + household, + ) = get_data_from_role_data(role_data) if role_name == HEAD: household.head_of_household = new_individual @@ -476,32 +569,51 @@ def reassign_roles_on_update(individual, role_reassign_data, info=None): ) if role_name == ROLE_ALTERNATE and new_individual.role == ROLE_PRIMARY: - raise GraphQLError("Cannot reassign the role. Selected individual has primary collector role.") + raise GraphQLError( + "Cannot reassign the role. Selected individual has primary collector role." + ) if role_name in (ROLE_PRIMARY, ROLE_ALTERNATE): role = get_object_or_404( - IndividualRoleInHousehold, role=role_name, household=household, individual=individual + IndividualRoleInHousehold, + role=role_name, + household=household, + individual=individual, ) role.individual = new_individual roles_to_bulk_update.append(role) if roles_to_bulk_update: - IndividualRoleInHousehold.objects.bulk_update(roles_to_bulk_update, ["individual"]) + IndividualRoleInHousehold.objects.bulk_update( + roles_to_bulk_update, ["individual"] + ) -def withdraw_individual(individual_to_remove, info, old_individual_to_remove, removed_individual_household): +def withdraw_individual( + individual_to_remove, info, old_individual_to_remove, removed_individual_household +): individual_to_remove.withdraw() log_and_withdraw_household_if_needed( - individual_to_remove, info, old_individual_to_remove, removed_individual_household + individual_to_remove, + info, + old_individual_to_remove, + removed_individual_household, ) def mark_as_duplicate_individual( - individual_to_remove, info, old_individual_to_remove, removed_individual_household, unique_individual + individual_to_remove, + info, + old_individual_to_remove, + removed_individual_household, + unique_individual, ): individual_to_remove.mark_as_duplicate(unique_individual) log_and_withdraw_household_if_needed( - individual_to_remove, info, old_individual_to_remove, removed_individual_household + individual_to_remove, + info, + old_individual_to_remove, + removed_individual_household, ) @@ -518,7 +630,10 @@ def log_and_withdraw_household_if_needed( individual_to_remove, ) removed_individual_household.refresh_from_db() - if removed_individual_household and removed_individual_household.active_individuals.count() == 0: + if ( + removed_individual_household + and removed_individual_household.active_individuals.count() == 0 + ): removed_individual_household.withdraw() @@ -527,8 +642,12 @@ def save_images(flex_fields, associated_with): from hct_mis_api.apps.core.utils import serialize_flex_attributes if associated_with not in ("households", "individuals"): - logger.error("associated_with argument must be one of ['household', 'individual']") - raise ValueError("associated_with argument must be one of ['household', 'individual']") + logger.error( + "associated_with argument must be one of ['household', 'individual']" + ) + raise ValueError( + "associated_with argument must be one of ['household', 'individual']" + ) all_flex_fields = serialize_flex_attributes().get(associated_with, {}) @@ -540,8 +659,12 @@ def save_images(flex_fields, associated_with): if flex_field["type"] == TYPE_IMAGE: if isinstance(value, InMemoryUploadedFile): - file_name = "".join(random.choices(string.ascii_uppercase + string.digits, k=3)) - flex_fields[name] = default_storage.save(f"{file_name}-{timezone.now()}.jpg", value) + file_name = "".join( + random.choices(string.ascii_uppercase + string.digits, k=3) + ) + flex_fields[name] = default_storage.save( + f"{file_name}-{timezone.now()}.jpg", value + ) elif isinstance(value, str): file_name = value.replace(default_storage.base_url, "") unquoted_value = urllib.parse.unquote(file_name) @@ -553,7 +676,9 @@ def generate_filename() -> str: return f"{file_name}-{timezone.now()}" -def handle_photo(photo: Union[InMemoryUploadedFile, str], photoraw: str) -> Optional[str]: +def handle_photo( + photo: Union[InMemoryUploadedFile, str], photoraw: str +) -> Optional[str]: if isinstance(photo, InMemoryUploadedFile): return default_storage.save(f"{generate_filename()}.jpg", photo) elif isinstance(photo, str): @@ -569,5 +694,5 @@ def handle_document(document) -> dict: return document -def handle_documents(documents) -> List[dict]: +def handle_documents(documents) -> list[dict]: return [handle_document(document) for document in documents] diff --git a/backend/hct_mis_api/apps/grievance/schema.py b/backend/hct_mis_api/apps/grievance/schema.py index b7f10da6a0..02f3c82e14 100644 --- a/backend/hct_mis_api/apps/grievance/schema.py +++ b/backend/hct_mis_api/apps/grievance/schema.py @@ -83,43 +83,77 @@ class GrievanceTicketFilter(FilterSet): SEARCH_TICKET_TYPES_LOOKUPS = { "complaint_ticket_details": { - "individual": ("full_name", "unicef_id", "phone_no", "phone_no_alternative"), + "individual": ( + "full_name", + "unicef_id", + "phone_no", + "phone_no_alternative", + ), "household": ("unicef_id",), }, "sensitive_ticket_details": { - "individual": ("full_name", "unicef_id", "phone_no", "phone_no_alternative"), + "individual": ( + "full_name", + "unicef_id", + "phone_no", + "phone_no_alternative", + ), "household": ("unicef_id",), }, "individual_data_update_ticket_details": { - "individual": ("full_name", "unicef_id", "phone_no", "phone_no_alternative"), + "individual": ( + "full_name", + "unicef_id", + "phone_no", + "phone_no_alternative", + ), }, "add_individual_ticket_details": {"household": ("unicef_id",)}, "system_flagging_ticket_details": { - "golden_records_individual": ("full_name", "unicef_id", "phone_no", "phone_no_alternative") + "golden_records_individual": ( + "full_name", + "unicef_id", + "phone_no", + "phone_no_alternative", + ) }, "needs_adjudication_ticket_details": { - "golden_records_individual": ("full_name", "unicef_id", "phone_no", "phone_no_alternative") + "golden_records_individual": ( + "full_name", + "unicef_id", + "phone_no", + "phone_no_alternative", + ) }, } TICKET_TYPES_WITH_FSP = ( ("complaint_ticket_details", "payment_record__service_provider"), ("sensitive_ticket_details", "payment_record__service_provider"), - ("payment_verification_ticket_details", "payment_verifications__payment_record__service_provider"), + ( + "payment_verification_ticket_details", + "payment_verifications__payment_record__service_provider", + ), ) business_area = CharFilter(field_name="business_area__slug", required=True) search = CharFilter(method="search_filter") - status = TypedMultipleChoiceFilter(field_name="status", choices=GrievanceTicket.STATUS_CHOICES, coerce=int) + status = TypedMultipleChoiceFilter( + field_name="status", choices=GrievanceTicket.STATUS_CHOICES, coerce=int + ) fsp = CharFilter(method="fsp_filter") admin = ModelMultipleChoiceFilter( - field_name="admin", method="admin_filter", queryset=AdminArea.objects.filter(admin_area_level__admin_level=2) + field_name="admin", + method="admin_filter", + queryset=AdminArea.objects.filter(admin_area_level__admin_level=2), ) cash_plan_payment_verification = CharFilter( field_name="payment_verification_ticket_details", lookup_expr="payment_verifications__cash_plan_payment_verification", ) created_at_range = DateTimeRangeFilter(field_name="created_at") - permissions = MultipleChoiceFilter(choices=Permissions.choices(), method="permissions_filter") + permissions = MultipleChoiceFilter( + choices=Permissions.choices(), method="permissions_filter" + ) class Meta: fields = { @@ -168,7 +202,9 @@ def search_filter(self, qs, name, value): for ticket_type, ticket_fields in self.SEARCH_TICKET_TYPES_LOOKUPS.items(): for field, lookups in ticket_fields.items(): for lookup in lookups: - q_obj |= Q(**{f"{ticket_type}__{field}__{lookup}__startswith": value}) + q_obj |= Q( + **{f"{ticket_type}__{field}__{lookup}__startswith": value} + ) return qs.filter(q_obj) @@ -176,7 +212,9 @@ def fsp_filter(self, qs, name, value): if value: q_obj = Q() for ticket_type, path_to_fsp in self.TICKET_TYPES_WITH_FSP: - q_obj |= Q(**{f"{ticket_type}__{path_to_fsp}__full_name__istartswith": value}) + q_obj |= Q( + **{f"{ticket_type}__{path_to_fsp}__full_name__istartswith": value} + ) return qs.filter(q_obj) return qs @@ -187,12 +225,25 @@ def admin_filter(self, qs, name, value): return qs def permissions_filter(self, qs, name, value): - can_view_ex_sensitive_all = Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE.value in value - can_view_sensitive_all = Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE.value in value - can_view_ex_sensitive_creator = Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR.value in value - can_view_ex_sensitive_owner = Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER.value in value - can_view_sensitive_creator = Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR.value in value - can_view_sensitive_owner = Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER.value in value + can_view_ex_sensitive_all = ( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE.value in value + ) + can_view_sensitive_all = ( + Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE.value in value + ) + can_view_ex_sensitive_creator = ( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR.value + in value + ) + can_view_ex_sensitive_owner = ( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER.value in value + ) + can_view_sensitive_creator = ( + Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR.value in value + ) + can_view_sensitive_owner = ( + Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER.value in value + ) # can view all if can_view_ex_sensitive_all and can_view_sensitive_all: @@ -202,7 +253,9 @@ def permissions_filter(self, qs, name, value): filters_1_exclude = {} filters_2 = {} filters_2_exclude = {} - sensitive_category_filter = {"category": GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE} + sensitive_category_filter = { + "category": GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE + } created_by_filter = {"created_by": self.request.user} assigned_to_filter = {"assigned_to": self.request.user} @@ -214,9 +267,13 @@ def permissions_filter(self, qs, name, value): filters_2.update(assigned_to_filter) if can_view_ex_sensitive_all: - return qs.filter(~Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_2)) + return qs.filter( + ~Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_2) + ) else: - return qs.filter(Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_1)) + return qs.filter( + Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_1) + ) else: # no full lists so only creator and/or owner lists @@ -230,7 +287,8 @@ def permissions_filter(self, qs, name, value): filters_2_exclude.update(sensitive_category_filter) if filters_1 or filters_2: return qs.filter( - Q(Q(**filters_1), ~Q(**filters_1_exclude)) | Q(Q(**filters_2), ~Q(**filters_2_exclude)) + Q(Q(**filters_1), ~Q(**filters_1_exclude)) + | Q(Q(**filters_2), ~Q(**filters_2_exclude)) ) else: return GrievanceTicket.objects.none() @@ -238,12 +296,18 @@ def permissions_filter(self, qs, name, value): class ExistingGrievanceTicketFilter(FilterSet): business_area = CharFilter(field_name="business_area__slug", required=True) - category = ChoiceFilter(field_name="category", choices=GrievanceTicket.CATEGORY_CHOICES) - issue_type = ChoiceFilter(field_name="issue_type", choices=GrievanceTicket.ALL_ISSUE_TYPES) + category = ChoiceFilter( + field_name="category", choices=GrievanceTicket.CATEGORY_CHOICES + ) + issue_type = ChoiceFilter( + field_name="issue_type", choices=GrievanceTicket.ALL_ISSUE_TYPES + ) household = ModelChoiceFilter(queryset=Household.objects.all()) individual = ModelChoiceFilter(queryset=Individual.objects.all()) payment_record = ModelMultipleChoiceFilter(queryset=PaymentRecord.objects.all()) - permissions = MultipleChoiceFilter(choices=Permissions.choices(), method="permissions_filter") + permissions = MultipleChoiceFilter( + choices=Permissions.choices(), method="permissions_filter" + ) class Meta: fields = ("id",) @@ -281,7 +345,7 @@ def filter_queryset(self, queryset): queryset = self.filters[name].filter(queryset, value) assert isinstance( queryset, models.QuerySet - ), "Expected '%s.%s' to return a QuerySet, but got a %s instead." % ( + ), "Expected '{}.{}' to return a QuerySet, but got a {} instead.".format( type(self).__name__, name, type(queryset).__name__, @@ -316,8 +380,12 @@ class Meta: class GrievanceTicketNode(BaseNodePermissionMixin, DjangoObjectType): permission_classes = ( hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE), - hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR), - hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER), + hopePermissionClass( + Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR + ), + hopePermissionClass( + Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER + ), hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE), hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_CREATOR), hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_OWNER), @@ -353,19 +421,31 @@ def check_node_permission(cls, info, object_instance): if object_instance.category == GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE: perm = Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE.value - creator_perm = Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_CREATOR.value + creator_perm = ( + Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_CREATOR.value + ) owner_perm = Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_OWNER.value else: perm = Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE.value - creator_perm = Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR.value - owner_perm = Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER.value + creator_perm = ( + Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR.value + ) + owner_perm = ( + Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER.value + ) - check_creator = object_instance.created_by == user and user.has_permission(creator_perm, business_area) - check_assignee = object_instance.assigned_to == user and user.has_permission(owner_perm, business_area) + check_creator = object_instance.created_by == user and user.has_permission( + creator_perm, business_area + ) + check_assignee = object_instance.assigned_to == user and user.has_permission( + owner_perm, business_area + ) if user.has_permission(perm, business_area) or check_creator or check_assignee: return True - msg = "User is not active creator/assignee and does not have '{perm}' permission" + msg = ( + "User is not active creator/assignee and does not have '{perm}' permission" + ) logger.error(msg) raise GraphQLError(msg) @@ -385,7 +465,9 @@ def resolve_individual(grievance_ticket, info): return GrievanceTicketNode._search_for_lookup(grievance_ticket, "individual") def resolve_payment_record(grievance_ticket, info): - return GrievanceTicketNode._search_for_lookup(grievance_ticket, "payment_record") + return GrievanceTicketNode._search_for_lookup( + grievance_ticket, "payment_record" + ) def resolve_admin(grievance_ticket, info): if grievance_ticket.admin2: @@ -430,7 +512,9 @@ def resolve_individual_data(self, info): individual_data = self.individual_data flex_fields = individual_data.get("flex_fields") if flex_fields: - images_flex_fields_names = FlexibleAttribute.objects.filter(type=TYPE_IMAGE).values_list("name", flat=True) + images_flex_fields_names = FlexibleAttribute.objects.filter( + type=TYPE_IMAGE + ).values_list("name", flat=True) for name, value in flex_fields.items(): if value and name in images_flex_fields_names: try: @@ -453,13 +537,17 @@ def resolve_individual_data(self, info): previous_value = document.get("previous_value", {}) if previous_value and previous_value.get("photo"): previous_value["photoraw"] = previous_value["photo"] - previous_value["photo"] = default_storage.url(previous_value.get("photo")) + previous_value["photo"] = default_storage.url( + previous_value.get("photo") + ) documents_to_edit[index]["previous_value"] = previous_value current_value = document.get("value", {}) if current_value and current_value.get("photo"): current_value["photoraw"] = current_value["photo"] - current_value["photo"] = default_storage.url(current_value.get("photo")) + current_value["photo"] = default_storage.url( + current_value.get("photo") + ) documents_to_edit[index]["value"] = current_value individual_data["documents_to_edit"] = documents_to_edit @@ -469,7 +557,9 @@ def resolve_individual_data(self, info): current_value = document.get("value", {}) if current_value and current_value.get("photo"): current_value["photoraw"] = current_value["photo"] - current_value["photo"] = default_storage.url(current_value.get("photo")) + current_value["photo"] = default_storage.url( + current_value.get("photo") + ) documents[index]["value"] = current_value individual_data["documents"] = documents @@ -489,7 +579,9 @@ def resolve_individual_data(self, info): individual_data = self.individual_data flex_fields = individual_data.get("flex_fields") if flex_fields: - images_flex_fields_names = FlexibleAttribute.objects.filter(type=TYPE_IMAGE).values_list("name", flat=True) + images_flex_fields_names = FlexibleAttribute.objects.filter( + type=TYPE_IMAGE + ).values_list("name", flat=True) for name, value in flex_fields.items(): if value and name in images_flex_fields_names: try: @@ -556,7 +648,9 @@ class Meta: def resolve_extra_data(parent, info): golden_records = parent.extra_data.get("golden_records") possible_duplicate = parent.extra_data.get("possible_duplicate") - return TicketNeedsAdjudicationDetailsExtraDataNode(golden_records, possible_duplicate) + return TicketNeedsAdjudicationDetailsExtraDataNode( + golden_records, possible_duplicate + ) class TicketSystemFlaggingDetailsNode(DjangoObjectType): @@ -605,7 +699,10 @@ class IssueTypesObject(graphene.ObjectType): sub_categories = graphene.List(ChoiceObject) def resolve_sub_categories(self, info): - return [{"name": value, "value": key} for key, value in self.get("sub_categories").items()] + return [ + {"name": value, "value": key} + for key, value in self.get("sub_categories").items() + ] class AddIndividualFiledObjectType(graphene.ObjectType): @@ -629,8 +726,12 @@ class Query(graphene.ObjectType): filterset_class=GrievanceTicketFilter, permission_classes=( hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE), - hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR), - hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER), + hopePermissionClass( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR + ), + hopePermissionClass( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER + ), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER), @@ -641,8 +742,12 @@ class Query(graphene.ObjectType): filterset_class=ExistingGrievanceTicketFilter, permission_classes=( hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE), - hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR), - hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER), + hopePermissionClass( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR + ), + hopePermissionClass( + Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER + ), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER), @@ -658,8 +763,12 @@ class Query(graphene.ObjectType): year=graphene.Int(required=True), administrative_area=graphene.String(required=False), ) - all_add_individuals_fields_attributes = graphene.List(FieldAttributeNode, description="All field datatype meta.") - all_edit_household_fields_attributes = graphene.List(FieldAttributeNode, description="All field datatype meta.") + all_add_individuals_fields_attributes = graphene.List( + FieldAttributeNode, description="All field datatype meta." + ) + all_edit_household_fields_attributes = graphene.List( + FieldAttributeNode, description="All field datatype meta." + ) grievance_ticket_status_choices = graphene.List(ChoiceObject) grievance_ticket_category_choices = graphene.List(ChoiceObject) grievance_ticket_manual_category_choices = graphene.List(ChoiceObject) @@ -679,7 +788,10 @@ def resolve_grievance_ticket_manual_category_choices(self, info, **kwargs): ] def resolve_grievance_ticket_all_category_choices(self, info, **kwargs): - return [{"name": name, "value": value} for value, name in GrievanceTicket.CATEGORY_CHOICES] + return [ + {"name": name, "value": value} + for value, name in GrievanceTicket.CATEGORY_CHOICES + ] def resolve_grievance_ticket_issue_type_choices(self, info, **kwargs): categories = choices_to_dict(GrievanceTicket.CATEGORY_CHOICES) @@ -721,10 +833,15 @@ def resolve_all_add_individuals_fields_attributes(self, info, **kwargs): [ x for x in CORE_FIELDS_ATTRIBUTES - if x.get("associated_with") == _INDIVIDUAL and x.get("name") in ACCEPTABLE_FIELDS + if x.get("associated_with") == _INDIVIDUAL + and x.get("name") in ACCEPTABLE_FIELDS ] + list(KOBO_ONLY_INDIVIDUAL_FIELDS.values()) - + list(FlexibleAttribute.objects.filter(associated_with=FlexibleAttribute.ASSOCIATED_WITH_INDIVIDUAL)) + + list( + FlexibleAttribute.objects.filter( + associated_with=FlexibleAttribute.ASSOCIATED_WITH_INDIVIDUAL + ) + ) ) return sort_by_attr(all_options, "label.English(EN)") @@ -780,8 +897,13 @@ def resolve_all_edit_household_fields_attributes(self, info, **kwargs): all_options = [ x for x in HOUSEHOLD_EDIT_ONLY_FIELDS + CORE_FIELDS_ATTRIBUTES - if x.get("associated_with") == _HOUSEHOLD and x.get("name") in ACCEPTABLE_FIELDS - ] + list(FlexibleAttribute.objects.filter(associated_with=FlexibleAttribute.ASSOCIATED_WITH_HOUSEHOLD)) + if x.get("associated_with") == _HOUSEHOLD + and x.get("name") in ACCEPTABLE_FIELDS + ] + list( + FlexibleAttribute.objects.filter( + associated_with=FlexibleAttribute.ASSOCIATED_WITH_HOUSEHOLD + ) + ) return sort_by_attr(all_options, "label.English(EN)") @@ -799,7 +921,9 @@ def resolve_chart_grievances(self, info, business_area_slug, year, **kwargs): try: grievance_tickets = grievance_tickets.filter( - admin=AdminArea.objects.get(id=filters.get("administrative_area")).title + admin=AdminArea.objects.get( + id=filters.get("administrative_area") + ).title ) except AdminArea.DoesNotExist: pass @@ -814,9 +938,16 @@ def resolve_chart_grievances(self, info, business_area_slug, year, **kwargs): days_30_from_now = datetime.date.today() - datetime.timedelta(days=30) days_60_from_now = datetime.date.today() - datetime.timedelta(days=60) - feedback_categories = [GrievanceTicket.CATEGORY_POSITIVE_FEEDBACK, GrievanceTicket.CATEGORY_NEGATIVE_FEEDBACK] - all_open_tickets = grievance_tickets.filter(~Q(status=GrievanceTicket.STATUS_CLOSED)) - all_closed_tickets = grievance_tickets.filter(status=GrievanceTicket.STATUS_CLOSED) + feedback_categories = [ + GrievanceTicket.CATEGORY_POSITIVE_FEEDBACK, + GrievanceTicket.CATEGORY_NEGATIVE_FEEDBACK, + ] + all_open_tickets = grievance_tickets.filter( + ~Q(status=GrievanceTicket.STATUS_CLOSED) + ) + all_closed_tickets = grievance_tickets.filter( + status=GrievanceTicket.STATUS_CLOSED + ) datasets = [ { @@ -838,8 +969,12 @@ def resolve_chart_grievances(self, info, business_area_slug, year, **kwargs): return { "labels": grievance_status_labels, "datasets": datasets, - "total_number_of_grievances": grievance_tickets.exclude(category__in=feedback_categories).count(), - "total_number_of_feedback": grievance_tickets.filter(category__in=feedback_categories).count(), + "total_number_of_grievances": grievance_tickets.exclude( + category__in=feedback_categories + ).count(), + "total_number_of_feedback": grievance_tickets.filter( + category__in=feedback_categories + ).count(), "total_number_of_open_sensitive": all_open_tickets.filter( category=GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE, ).count(), diff --git a/backend/hct_mis_api/apps/household/admin.py b/backend/hct_mis_api/apps/household/admin.py index 2013d9333e..bac91c0ca9 100644 --- a/backend/hct_mis_api/apps/household/admin.py +++ b/backend/hct_mis_api/apps/household/admin.py @@ -7,9 +7,8 @@ from django.contrib.admin import TabularInline from django.contrib.admin.models import LogEntry from django.contrib.messages import DEFAULT_TAGS -from django.contrib.postgres.fields import JSONField from django.db import transaction -from django.db.models import Count, Q +from django.db.models import Count, JSONField, Q from django.db.transaction import atomic from django.http import HttpResponse, HttpResponseRedirect from django.template.response import TemplateResponse @@ -136,7 +135,12 @@ class HouseholdAdmin( search_fields = ("head_of_household__family_name", "unicef_id") readonly_fields = ("created_at", "updated_at") filter_horizontal = ("representatives", "programs") - raw_id_fields = ("registration_data_import", "admin_area", "head_of_household", "business_area") + raw_id_fields = ( + "registration_data_import", + "admin_area", + "head_of_household", + "business_area", + ) fieldsets = [ (None, {"fields": (("unicef_id", "head_of_household"),)}), ( @@ -182,9 +186,13 @@ def withdrawn(self, request, pk): context["status"] = new_withdrawn_status tickets = GrievanceTicket.objects.belong_household(obj) if obj.withdrawn: - tickets = filter(lambda t: t.ticket.extras.get("status_before_withdrawn", False), tickets) + tickets = filter( + lambda t: t.ticket.extras.get("status_before_withdrawn", False), tickets + ) else: - tickets = filter(lambda t: t.ticket.status != GrievanceTicket.STATUS_CLOSED, tickets) + tickets = filter( + lambda t: t.ticket.status != GrievanceTicket.STATUS_CLOSED, tickets + ) context["tickets"] = tickets if request.method == "POST": @@ -199,20 +207,34 @@ def withdrawn(self, request, pk): message = "{} has been restored" obj.withdrawn = withdrawn withdrawns = list(obj.individuals.values_list("id", flat=True)) - for ind in Individual.objects.filter(id__in=withdrawns, duplicate=False): + for ind in Individual.objects.filter( + id__in=withdrawns, duplicate=False + ): ind.withdrawn = withdrawn ind.save() self.log_change(request, ind, message.format("Individual")) for tkt in context["tickets"]: if withdrawn: - tkt.ticket.extras["status_before_withdrawn"] = tkt.ticket.status + tkt.ticket.extras[ + "status_before_withdrawn" + ] = tkt.ticket.status tkt.ticket.status = GrievanceTicket.STATUS_CLOSED - self.log_change(request, tkt.ticket, "Ticket closed due to Household withdrawn") + self.log_change( + request, + tkt.ticket, + "Ticket closed due to Household withdrawn", + ) else: if tkt.ticket.extras.get("status_before_withdrawn"): - tkt.ticket.status = tkt.ticket.extras["status_before_withdrawn"] + tkt.ticket.status = tkt.ticket.extras[ + "status_before_withdrawn" + ] tkt.ticket.extras["status_before_withdrawn"] = "" - self.log_change(request, tkt.ticket, "Ticket reopened due to Household restore") + self.log_change( + request, + tkt.ticket, + "Ticket reopened due to Household restore", + ) tkt.ticket.save() obj.save() @@ -221,17 +243,23 @@ def withdrawn(self, request, pk): except Exception as e: self.message_user(request, str(e), messages.ERROR) - return TemplateResponse(request, "admin/household/household/withdrawn.html", context) + return TemplateResponse( + request, "admin/household/household/withdrawn.html", context + ) @button() def tickets(self, request, pk): context = self.get_common_context(request, pk, title="Tickets") obj = context["original"] tickets = [] - for entry in chain(obj.sensitive_ticket_details.all(), obj.complaint_ticket_details.all()): + for entry in chain( + obj.sensitive_ticket_details.all(), obj.complaint_ticket_details.all() + ): tickets.append(entry.ticket) context["tickets"] = tickets - return TemplateResponse(request, "admin/household/household/tickets.html", context) + return TemplateResponse( + request, "admin/household/household/tickets.html", context + ) @button() def members(self, request, pk): @@ -248,11 +276,15 @@ def sanity_check(self, request, pk): primary = None head = None try: - primary = IndividualRoleInHousehold.objects.get(household=hh, role=ROLE_PRIMARY) + primary = IndividualRoleInHousehold.objects.get( + household=hh, role=ROLE_PRIMARY + ) except IndividualRoleInHousehold.DoesNotExist: warnings.append([messages.ERROR, "Head of househould not found"]) - alternate = IndividualRoleInHousehold.objects.filter(household=hh, role=ROLE_ALTERNATE).first() + alternate = IndividualRoleInHousehold.objects.filter( + household=hh, role=ROLE_ALTERNATE + ).first() try: head = hh.individuals.get(relationship=HEAD) except IndividualRoleInHousehold.DoesNotExist: @@ -264,8 +296,12 @@ def sanity_check(self, request, pk): field = f"{gender}_age_group_{num_range}_count" total_in_ranges += getattr(hh, field, 0) or 0 - active_individuals = hh.individuals.exclude(Q(duplicate=True) | Q(withdrawn=True)) - ghosts_individuals = hh.individuals.filter(Q(duplicate=True) | Q(withdrawn=True)) + active_individuals = hh.individuals.exclude( + Q(duplicate=True) | Q(withdrawn=True) + ) + ghosts_individuals = hh.individuals.filter( + Q(duplicate=True) | Q(withdrawn=True) + ) all_individuals = hh.individuals.all() if hh.collect_individual_data: if active_individuals.count() != hh.size: @@ -273,11 +309,16 @@ def sanity_check(self, request, pk): else: if all_individuals.count() > 1: - warnings.append([messages.ERROR, "Individual data not collected but members found"]) + warnings.append( + [messages.ERROR, "Individual data not collected but members found"] + ) if hh.size != total_in_ranges: warnings.append( - [messages.ERROR, f"HH size ({hh.size}) and ranges population ({total_in_ranges}) does not match"] + [ + messages.ERROR, + f"HH size ({hh.size}) and ranges population ({total_in_ranges}) does not match", + ] ) aaaa = active_individuals.values_list("unicef_id", flat=True) @@ -296,7 +337,9 @@ def sanity_check(self, request, pk): "alternate": alternate, "warnings": [(DEFAULT_TAGS[w[0]], w[1]) for w in warnings], } - return TemplateResponse(request, "admin/household/household/sanity_check.html", context) + return TemplateResponse( + request, "admin/household/household/sanity_check.html", context + ) class IndividualRoleInHouseholdInline(TabularInline): @@ -410,7 +453,9 @@ def formfield_for_dbfield(self, db_field, request, **kwargs): def household_members(self, request, pk): obj = Individual.objects.get(pk=pk) url = reverse("admin:household_individual_changelist") - return HttpResponseRedirect(f"{url}?household|unicef_id|iexact={obj.household.unicef_id}") + return HttpResponseRedirect( + f"{url}?household|unicef_id|iexact={obj.household.unicef_id}" + ) @button() def sanity_check(self, request, pk): @@ -419,7 +464,9 @@ def sanity_check(self, request, pk): context["roles"] = obj.households_and_roles.all() context["duplicates"] = Individual.objects.filter(unicef_id=obj.unicef_id) - return TemplateResponse(request, "admin/household/individual/sanity_check.html", context) + return TemplateResponse( + request, "admin/household/individual/sanity_check.html", context + ) @admin.register(IndividualRoleInHousehold) @@ -452,7 +499,13 @@ class EntitlementCardAdmin(ExtraUrlMixin, HOPEModelAdminBase): @admin.register(XlsxUpdateFile) class XlsxUpdateFileAdmin(ExtraUrlMixin, HOPEModelAdminBase): - readonly_fields = ("file", "business_area", "rdi", "xlsx_match_columns", "uploaded_by") + readonly_fields = ( + "file", + "business_area", + "rdi", + "xlsx_match_columns", + "uploaded_by", + ) list_filter = ( ("business_area", AutoCompleteFilter), ("uploaded_by", AutoCompleteFilter), @@ -470,21 +523,32 @@ def xlsx_update_stage2(self, request, old_form): updater = IndividualXlsxUpdate(xlsx_update_file) except InvalidColumnsError as e: self.message_user(request, str(e), messages.ERROR) - context = self.get_common_context(request, title="Update Individual by xlsx", form=UpdateByXlsxStage1Form()) - return TemplateResponse(request, "admin/household/individual/xlsx_update.html", context) + context = self.get_common_context( + request, + title="Update Individual by xlsx", + form=UpdateByXlsxStage1Form(), + ) + return TemplateResponse( + request, "admin/household/individual/xlsx_update.html", context + ) context = self.get_common_context( request, title="Update Individual by xlsx", form=UpdateByXlsxStage2Form( - xlsx_columns=updater.columns_names, initial={"xlsx_update_file": xlsx_update_file} + xlsx_columns=updater.columns_names, + initial={"xlsx_update_file": xlsx_update_file}, ), ) - return TemplateResponse(request, "admin/household/individual/xlsx_update_stage2.html", context) + return TemplateResponse( + request, "admin/household/individual/xlsx_update_stage2.html", context + ) def xlsx_update_stage3(self, request, old_form): xlsx_update_file = old_form.cleaned_data["xlsx_update_file"] - xlsx_update_file.xlsx_match_columns = old_form.cleaned_data["xlsx_match_columns"] + xlsx_update_file.xlsx_match_columns = old_form.cleaned_data[ + "xlsx_match_columns" + ] xlsx_update_file.save() updater = IndividualXlsxUpdate(xlsx_update_file) report = updater.get_matching_report() @@ -492,11 +556,15 @@ def xlsx_update_stage3(self, request, old_form): request, title="Update Individual by xlsx Report", unique_report_rows=report[IndividualXlsxUpdate.STATUS_UNIQUE], - multiple_match_report_rows=report[IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH], + multiple_match_report_rows=report[ + IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH + ], no_match_report_rows=report[IndividualXlsxUpdate.STATUS_NO_MATCH], xlsx_update_file=xlsx_update_file.id, ) - return TemplateResponse(request, "admin/household/individual/xlsx_update_stage3.html", context) + return TemplateResponse( + request, "admin/household/individual/xlsx_update_stage3.html", context + ) def add_view(self, request, form_url="", extra_context=None): return self.xlsx_update(request) @@ -504,30 +572,52 @@ def add_view(self, request, form_url="", extra_context=None): def xlsx_update(self, request): if request.method == "GET": form = UpdateByXlsxStage1Form() - form.fields["registration_data_import"].widget = AutocompleteWidget(RegistrationDataImport, self.admin_site) - form.fields["business_area"].widget = AutocompleteWidget(BusinessArea, self.admin_site) - context = self.get_common_context(request, title="Update Individual by xlsx", form=form) + form.fields["registration_data_import"].widget = AutocompleteWidget( + RegistrationDataImport, self.admin_site + ) + form.fields["business_area"].widget = AutocompleteWidget( + BusinessArea, self.admin_site + ) + context = self.get_common_context( + request, title="Update Individual by xlsx", form=form + ) elif request.POST.get("stage") == "2": form = UpdateByXlsxStage1Form(request.POST, request.FILES) - context = self.get_common_context(request, title="Update Individual by xlsx", form=form) + context = self.get_common_context( + request, title="Update Individual by xlsx", form=form + ) if form.is_valid(): try: return self.xlsx_update_stage2(request, form) except Exception as e: - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) - return TemplateResponse(request, "admin/household/individual/xlsx_update.html", context) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) + return TemplateResponse( + request, "admin/household/individual/xlsx_update.html", context + ) elif request.POST.get("stage") == "3": - xlsx_update_file = XlsxUpdateFile.objects.get(pk=request.POST["xlsx_update_file"]) + xlsx_update_file = XlsxUpdateFile.objects.get( + pk=request.POST["xlsx_update_file"] + ) updater = IndividualXlsxUpdate(xlsx_update_file) - form = UpdateByXlsxStage2Form(request.POST, request.FILES, xlsx_columns=updater.columns_names) - context = self.get_common_context(request, title="Update Individual by xlsx", form=form) + form = UpdateByXlsxStage2Form( + request.POST, request.FILES, xlsx_columns=updater.columns_names + ) + context = self.get_common_context( + request, title="Update Individual by xlsx", form=form + ) if form.is_valid(): try: return self.xlsx_update_stage3(request, form) except Exception as e: - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) - return TemplateResponse(request, "admin/household/individual/xlsx_update_stage2.html", context) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) + return TemplateResponse( + request, "admin/household/individual/xlsx_update_stage2.html", context + ) elif request.POST.get("stage") == "4": xlsx_update_file_id = request.POST.get("xlsx_update_file") @@ -537,18 +627,30 @@ def xlsx_update(self, request): with transaction.atomic(): updater.update_individuals() self.message_user(request, "Done", messages.SUCCESS) - return HttpResponseRedirect(reverse("admin:household_individual_changelist")) + return HttpResponseRedirect( + reverse("admin:household_individual_changelist") + ) except Exception as e: - self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + self.message_user( + request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR + ) report = updater.report_dict context = self.get_common_context( request, title="Update Individual by xlsx Report", unique_report_rows=report[IndividualXlsxUpdate.STATUS_UNIQUE], - multiple_match_report_rows=report[IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH], + multiple_match_report_rows=report[ + IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH + ], no_match_report_rows=report[IndividualXlsxUpdate.STATUS_NO_MATCH], xlsx_update_file=xlsx_update_file.id, ) - return TemplateResponse(request, "admin/household/individual/xlsx_update_stage3.html", context) + return TemplateResponse( + request, + "admin/household/individual/xlsx_update_stage3.html", + context, + ) - return TemplateResponse(request, "admin/household/individual/xlsx_update.html", context) + return TemplateResponse( + request, "admin/household/individual/xlsx_update.html", context + ) diff --git a/backend/hct_mis_api/apps/household/const.py b/backend/hct_mis_api/apps/household/const.py index ff8daa7680..5aa05fec95 100644 --- a/backend/hct_mis_api/apps/household/const.py +++ b/backend/hct_mis_api/apps/household/const.py @@ -1,6 +1,6 @@ -from django.utils.translation import ugettext_lazy as _ -from django_countries.data import COUNTRIES +from django.utils.translation import gettext_lazy as _ +from django_countries.data import COUNTRIES NATIONALITIES = ( ("AF", _("Afghan")), diff --git a/backend/hct_mis_api/apps/household/forms.py b/backend/hct_mis_api/apps/household/forms.py index 055fc865c5..bd058ebed8 100644 --- a/backend/hct_mis_api/apps/household/forms.py +++ b/backend/hct_mis_api/apps/household/forms.py @@ -10,7 +10,9 @@ class UpdateByXlsxStage1Form(forms.Form): business_area = forms.ModelChoiceField(queryset=BusinessArea.objects.all()) - registration_data_import = forms.ModelChoiceField(queryset=RegistrationDataImport.objects.all()) + registration_data_import = forms.ModelChoiceField( + queryset=RegistrationDataImport.objects.all() + ) file = forms.FileField() def clean_registration_data_import(self) -> Optional[RegistrationDataImport]: @@ -32,18 +34,22 @@ def _change_rdi_has_correct_business_area(self, registration_data_import) -> Non def _retrieve_rdi_by_name(self) -> RegistrationDataImport: data = self.cleaned_data.get("registration_data_import") - registration_data_import = RegistrationDataImport.objects.filter(name=data).first() + registration_data_import = RegistrationDataImport.objects.filter( + name=data + ).first() if not registration_data_import: raise ValidationError(f"Rdi with the name {data} doesn't exist") return registration_data_import class UpdateByXlsxStage2Form(forms.Form): - xlsx_update_file = forms.ModelChoiceField(queryset=XlsxUpdateFile.objects.all(), widget=forms.HiddenInput()) + xlsx_update_file = forms.ModelChoiceField( + queryset=XlsxUpdateFile.objects.all(), widget=forms.HiddenInput() + ) def __init__(self, *args, **kwargs): self.xlsx_columns = kwargs.pop("xlsx_columns", []) - super(UpdateByXlsxStage2Form, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields["xlsx_match_columns"] = forms.MultipleChoiceField( widget=forms.CheckboxSelectMultiple, choices=[(xlsx_column, xlsx_column) for xlsx_column in self.xlsx_columns], diff --git a/backend/hct_mis_api/apps/household/models.py b/backend/hct_mis_api/apps/household/models.py index 99998805b9..c8a099183b 100644 --- a/backend/hct_mis_api/apps/household/models.py +++ b/backend/hct_mis_api/apps/household/models.py @@ -4,13 +4,13 @@ from django.conf import settings from django.contrib.gis.db.models import Count, PointField, Q, UniqueConstraint -from django.contrib.postgres.fields import ArrayField, CICharField, JSONField +from django.contrib.postgres.fields import ArrayField, CICharField from django.core.validators import MinLengthValidator, validate_image_file_extension from django.db import models -from django.db.models import F, Sum +from django.db.models import F, JSONField, Sum from django.utils import timezone from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from dateutil.relativedelta import relativedelta from django_countries.fields import CountryField @@ -258,7 +258,9 @@ logger = logging.getLogger(__name__) -class Household(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncable, ConcurrencyModel): +class Household( + SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncable, ConcurrencyModel +): ACTIVITY_LOG_MAPPING = create_mapping_dict( [ "withdrawn", @@ -322,7 +324,7 @@ class Household(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncab withdrawn = models.BooleanField(default=False, db_index=True) withdrawn_date = models.DateTimeField(null=True, blank=True, db_index=True) consent_sign = ImageField(validators=[validate_image_file_extension], blank=True) - consent = models.NullBooleanField() + consent = models.BooleanField(null=True) consent_sharing = MultiSelectField(choices=DATA_SHARING_CHOICES, default=BLANK) residence_status = models.CharField(max_length=255, choices=RESIDENCE_STATUS_CHOICE) country_origin = CountryField(blank=True, db_index=True) @@ -330,12 +332,18 @@ class Household(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncab "geo.Country", related_name="+", blank=True, null=True, on_delete=models.PROTECT ) country = CountryField(db_index=True) - country_new = models.ForeignKey("geo.Country", related_name="+", blank=True, null=True, on_delete=models.PROTECT) + country_new = models.ForeignKey( + "geo.Country", related_name="+", blank=True, null=True, on_delete=models.PROTECT + ) size = models.PositiveIntegerField(db_index=True) address = CICharField(max_length=255, blank=True) """location contains lowest administrative area info""" - admin_area = models.ForeignKey("core.AdminArea", null=True, on_delete=models.SET_NULL, blank=True) - admin_area_new = models.ForeignKey("geo.Area", null=True, on_delete=models.SET_NULL, blank=True) + admin_area = models.ForeignKey( + "core.AdminArea", null=True, on_delete=models.SET_NULL, blank=True + ) + admin_area_new = models.ForeignKey( + "geo.Area", null=True, on_delete=models.SET_NULL, blank=True + ) representatives = models.ManyToManyField( to="household.Individual", through="household.IndividualRoleInHousehold", @@ -356,16 +364,36 @@ class Household(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncab male_age_group_12_17_count = models.PositiveIntegerField(default=None, null=True) male_age_group_18_59_count = models.PositiveIntegerField(default=None, null=True) male_age_group_60_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_0_5_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_6_11_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_12_17_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_18_59_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_60_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_0_5_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_6_11_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_12_17_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_18_59_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_60_disabled_count = models.PositiveIntegerField(default=None, null=True) + female_age_group_0_5_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_6_11_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_12_17_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_18_59_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_60_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_0_5_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_6_11_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_12_17_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_18_59_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_60_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) registration_data_import = models.ForeignKey( "registration_data.RegistrationDataImport", related_name="households", @@ -376,25 +404,35 @@ class Household(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncab related_name="households", blank=True, ) - returnee = models.NullBooleanField() + returnee = models.BooleanField(null=True) flex_fields = JSONField(default=dict, blank=True) first_registration_date = models.DateTimeField() last_registration_date = models.DateTimeField() - head_of_household = models.OneToOneField("Individual", related_name="heading_household", on_delete=models.CASCADE) - fchild_hoh = models.NullBooleanField() - child_hoh = models.NullBooleanField() + head_of_household = models.OneToOneField( + "Individual", related_name="heading_household", on_delete=models.CASCADE + ) + fchild_hoh = models.BooleanField(null=True) + child_hoh = models.BooleanField(null=True) unicef_id = CICharField(max_length=250, blank=True, default=BLANK, db_index=True) business_area = models.ForeignKey("core.BusinessArea", on_delete=models.CASCADE) start = models.DateTimeField(blank=True, null=True) deviceid = models.CharField(max_length=250, blank=True, default=BLANK) name_enumerator = models.CharField(max_length=250, blank=True, default=BLANK) - org_enumerator = models.CharField(max_length=250, choices=ORG_ENUMERATOR_CHOICES, default=BLANK) + org_enumerator = models.CharField( + max_length=250, choices=ORG_ENUMERATOR_CHOICES, default=BLANK + ) org_name_enumerator = models.CharField(max_length=250, blank=True, default=BLANK) village = models.CharField(max_length=250, blank=True, default=BLANK) - registration_method = models.CharField(max_length=250, choices=REGISTRATION_METHOD_CHOICES, default=BLANK) - collect_individual_data = models.CharField(max_length=250, choices=YES_NO_CHOICE, default=BLANK) + registration_method = models.CharField( + max_length=250, choices=REGISTRATION_METHOD_CHOICES, default=BLANK + ) + collect_individual_data = models.CharField( + max_length=250, choices=YES_NO_CHOICE, default=BLANK + ) currency = models.CharField(max_length=250, choices=CURRENCY_CHOICES, default=BLANK) - unhcr_id = models.CharField(max_length=250, blank=True, default=BLANK, db_index=True) + unhcr_id = models.CharField( + max_length=250, blank=True, default=BLANK, db_index=True + ) user_fields = JSONField(default=dict, blank=True) kobo_asset_id = models.CharField(max_length=150, blank=True, default=BLANK) row_id = models.PositiveIntegerField(blank=True, null=True) @@ -498,7 +536,11 @@ def sanction_list_confirmed_match(self): @property def total_cash_received(self): - return self.payment_records.filter().aggregate(models.Sum("delivered_quantity")).get("delivered_quantity__sum") + return ( + self.payment_records.filter() + .aggregate(models.Sum("delivered_quantity")) + .get("delivered_quantity__sum") + ) @property def total_cash_received_usd(self): @@ -533,7 +575,9 @@ def programs_with_delivered_quantity(self): "name": program["program_name"], "quantity": [ { - "total_delivered_quantity": program["total_delivered_quantity_usd"], + "total_delivered_quantity": program[ + "total_delivered_quantity_usd" + ], "currency": "USD", } ], @@ -573,54 +617,104 @@ def recalculate_data(self): male_disability_beneficiary = Q(disabled_disability & male_beneficiary) to_6_years = Q(birth_date__gt=date_6_years_ago) - from_6_to_12_years = Q(birth_date__lte=date_6_years_ago, birth_date__gt=date_12_years_ago) - from_12_to_18_years = Q(birth_date__lte=date_12_years_ago, birth_date__gt=date_18_years_ago) - from_18_to_60_years = Q(birth_date__lte=date_18_years_ago, birth_date__gt=date_60_years_ago) + from_6_to_12_years = Q( + birth_date__lte=date_6_years_ago, birth_date__gt=date_12_years_ago + ) + from_12_to_18_years = Q( + birth_date__lte=date_12_years_ago, birth_date__gt=date_18_years_ago + ) + from_18_to_60_years = Q( + birth_date__lte=date_18_years_ago, birth_date__gt=date_60_years_ago + ) from_60_years = Q(birth_date__lte=date_60_years_ago) age_groups = self.individuals.aggregate( - female_age_group_0_5_count=Count("id", distinct=True, filter=Q(female_beneficiary & to_6_years)), - female_age_group_6_11_count=Count("id", distinct=True, filter=Q(female_beneficiary & from_6_to_12_years)), - female_age_group_12_17_count=Count("id", distinct=True, filter=Q(female_beneficiary & from_12_to_18_years)), - female_age_group_18_59_count=Count("id", distinct=True, filter=Q(female_beneficiary & from_18_to_60_years)), - female_age_group_60_count=Count("id", distinct=True, filter=Q(female_beneficiary & from_60_years)), - male_age_group_0_5_count=Count("id", distinct=True, filter=Q(male_beneficiary & to_6_years)), - male_age_group_6_11_count=Count("id", distinct=True, filter=Q(male_beneficiary & from_6_to_12_years)), - male_age_group_12_17_count=Count("id", distinct=True, filter=Q(male_beneficiary & from_12_to_18_years)), - male_age_group_18_59_count=Count("id", distinct=True, filter=Q(male_beneficiary & from_18_to_60_years)), - male_age_group_60_count=Count("id", distinct=True, filter=Q(male_beneficiary & from_60_years)), + female_age_group_0_5_count=Count( + "id", distinct=True, filter=Q(female_beneficiary & to_6_years) + ), + female_age_group_6_11_count=Count( + "id", distinct=True, filter=Q(female_beneficiary & from_6_to_12_years) + ), + female_age_group_12_17_count=Count( + "id", distinct=True, filter=Q(female_beneficiary & from_12_to_18_years) + ), + female_age_group_18_59_count=Count( + "id", distinct=True, filter=Q(female_beneficiary & from_18_to_60_years) + ), + female_age_group_60_count=Count( + "id", distinct=True, filter=Q(female_beneficiary & from_60_years) + ), + male_age_group_0_5_count=Count( + "id", distinct=True, filter=Q(male_beneficiary & to_6_years) + ), + male_age_group_6_11_count=Count( + "id", distinct=True, filter=Q(male_beneficiary & from_6_to_12_years) + ), + male_age_group_12_17_count=Count( + "id", distinct=True, filter=Q(male_beneficiary & from_12_to_18_years) + ), + male_age_group_18_59_count=Count( + "id", distinct=True, filter=Q(male_beneficiary & from_18_to_60_years) + ), + male_age_group_60_count=Count( + "id", distinct=True, filter=Q(male_beneficiary & from_60_years) + ), female_age_group_0_5_disabled_count=Count( - "id", distinct=True, filter=Q(female_disability_beneficiary & to_6_years) + "id", + distinct=True, + filter=Q(female_disability_beneficiary & to_6_years), ), female_age_group_6_11_disabled_count=Count( - "id", distinct=True, filter=Q(female_disability_beneficiary & from_6_to_12_years) + "id", + distinct=True, + filter=Q(female_disability_beneficiary & from_6_to_12_years), ), female_age_group_12_17_disabled_count=Count( - "id", distinct=True, filter=Q(female_disability_beneficiary & from_12_to_18_years) + "id", + distinct=True, + filter=Q(female_disability_beneficiary & from_12_to_18_years), ), female_age_group_18_59_disabled_count=Count( - "id", distinct=True, filter=Q(female_disability_beneficiary & from_18_to_60_years) + "id", + distinct=True, + filter=Q(female_disability_beneficiary & from_18_to_60_years), ), female_age_group_60_disabled_count=Count( - "id", distinct=True, filter=Q(female_disability_beneficiary & from_60_years) + "id", + distinct=True, + filter=Q(female_disability_beneficiary & from_60_years), ), male_age_group_0_5_disabled_count=Count( "id", distinct=True, filter=Q(male_disability_beneficiary & to_6_years) ), male_age_group_6_11_disabled_count=Count( - "id", distinct=True, filter=Q(male_disability_beneficiary & from_6_to_12_years) + "id", + distinct=True, + filter=Q(male_disability_beneficiary & from_6_to_12_years), ), male_age_group_12_17_disabled_count=Count( - "id", distinct=True, filter=Q(male_disability_beneficiary & from_12_to_18_years) + "id", + distinct=True, + filter=Q(male_disability_beneficiary & from_12_to_18_years), ), male_age_group_18_59_disabled_count=Count( - "id", distinct=True, filter=Q(male_disability_beneficiary & from_18_to_60_years) + "id", + distinct=True, + filter=Q(male_disability_beneficiary & from_18_to_60_years), ), male_age_group_60_disabled_count=Count( - "id", distinct=True, filter=Q(male_disability_beneficiary & from_60_years) + "id", + distinct=True, + filter=Q(male_disability_beneficiary & from_60_years), + ), + size=Count( + "id", distinct=True, filter=Q(is_beneficiary & active_beneficiary) + ), + pregnant_count=Count( + "id", + distinct=True, + filter=Q(is_beneficiary & active_beneficiary & Q(pregnant=True)), ), - size=Count("id", distinct=True, filter=Q(is_beneficiary & active_beneficiary)), - pregnant_count=Count("id", distinct=True, filter=Q(is_beneficiary & active_beneficiary & Q(pregnant=True))), ) updated_fields = ["child_hoh", "fchild_hoh"] for key, value in age_groups.items(): @@ -637,13 +731,17 @@ def recalculate_data(self): class DocumentValidator(TimeStampedUUIDModel): - type = models.ForeignKey("DocumentType", related_name="validators", on_delete=models.CASCADE) + type = models.ForeignKey( + "DocumentType", related_name="validators", on_delete=models.CASCADE + ) regex = models.CharField(max_length=100, default=".*") class DocumentType(TimeStampedUUIDModel): country = CountryField(default="U") - country_new = models.ForeignKey("geo.Country", blank=True, null=True, on_delete=models.PROTECT) + country_new = models.ForeignKey( + "geo.Country", blank=True, null=True, on_delete=models.PROTECT + ) label = models.CharField(max_length=100) type = models.CharField(max_length=50, choices=IDENTIFICATION_TYPE_CHOICE) @@ -657,8 +755,12 @@ def __str__(self): class Document(SoftDeletableModel, TimeStampedUUIDModel): document_number = models.CharField(max_length=255, blank=True) photo = models.ImageField(blank=True) - individual = models.ForeignKey("Individual", related_name="documents", on_delete=models.CASCADE) - type = models.ForeignKey("DocumentType", related_name="documents", on_delete=models.CASCADE) + individual = models.ForeignKey( + "Individual", related_name="documents", on_delete=models.CASCADE + ) + type = models.ForeignKey( + "DocumentType", related_name="documents", on_delete=models.CASCADE + ) STATUS_PENDING = "PENDING" STATUS_VALID = "VALID" STATUS_NEED_INVESTIGATION = "NEED_INVESTIGATION" @@ -669,7 +771,9 @@ class Document(SoftDeletableModel, TimeStampedUUIDModel): (STATUS_NEED_INVESTIGATION, _("Need Investigation")), (STATUS_INVALID, _("Invalid")), ) - status = models.CharField(max_length=20, choices=STATUS_CHOICES, default=STATUS_PENDING) + status = models.CharField( + max_length=20, choices=STATUS_CHOICES, default=STATUS_PENDING + ) def clean(self): from django.core.exceptions import ValidationError @@ -695,7 +799,9 @@ class Agency(models.Model): max_length=100, ) country = CountryField() - country_new = models.ForeignKey("geo.Country", blank=True, null=True, on_delete=models.PROTECT) + country_new = models.ForeignKey( + "geo.Country", blank=True, null=True, on_delete=models.PROTECT + ) class Meta: verbose_name_plural = "Agencies" @@ -711,8 +817,12 @@ def __str__(self): class IndividualIdentity(models.Model): - agency = models.ForeignKey("Agency", related_name="individual_identities", on_delete=models.CASCADE) - individual = models.ForeignKey("Individual", related_name="identities", on_delete=models.CASCADE) + agency = models.ForeignKey( + "Agency", related_name="individual_identities", on_delete=models.CASCADE + ) + individual = models.ForeignKey( + "Individual", related_name="identities", on_delete=models.CASCADE + ) number = models.CharField( max_length=255, ) @@ -745,7 +855,9 @@ def __str__(self): return f"{self.individual.full_name} - {self.role}" -class Individual(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncable, ConcurrencyModel): +class Individual( + SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSyncable, ConcurrencyModel +): ACTIVITY_LOG_MAPPING = create_mapping_dict( [ "status", @@ -802,14 +914,18 @@ class Individual(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSynca withdrawn_date = models.DateTimeField(null=True, blank=True) individual_id = models.CharField(max_length=255, blank=True) photo = models.ImageField(blank=True) - full_name = CICharField(max_length=255, validators=[MinLengthValidator(2)], db_index=True) + full_name = CICharField( + max_length=255, validators=[MinLengthValidator(2)], db_index=True + ) given_name = CICharField(max_length=85, blank=True, db_index=True) middle_name = CICharField(max_length=85, blank=True, db_index=True) family_name = CICharField(max_length=85, blank=True, db_index=True) sex = models.CharField(max_length=255, choices=SEX_CHOICE, db_index=True) birth_date = models.DateField(db_index=True) estimated_birth_date = models.BooleanField(default=False) - marital_status = models.CharField(max_length=255, choices=MARITAL_STATUS_CHOICE, default=BLANK, db_index=True) + marital_status = models.CharField( + max_length=255, choices=MARITAL_STATUS_CHOICE, default=BLANK, db_index=True + ) phone_no = PhoneNumberField(blank=True) phone_no_alternative = PhoneNumberField(blank=True) relationship = models.CharField( @@ -836,7 +952,9 @@ class Individual(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSynca on_delete=models.CASCADE, null=True, ) - disability = models.CharField(max_length=20, choices=DISABILITY_CHOICES, default=NOT_DISABLED) + disability = models.CharField( + max_length=20, choices=DISABILITY_CHOICES, default=NOT_DISABLED + ) work_status = models.CharField( max_length=20, choices=WORK_STATUS_CHOICE, @@ -847,8 +965,8 @@ class Individual(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSynca last_registration_date = models.DateField() flex_fields = JSONField(default=dict, blank=True) user_fields = JSONField(default=dict, blank=True) - enrolled_in_nutrition_programme = models.NullBooleanField() - administration_of_rutf = models.NullBooleanField() + enrolled_in_nutrition_programme = models.BooleanField(null=True) + administration_of_rutf = models.BooleanField(null=True) unicef_id = CICharField(max_length=250, blank=True, db_index=True) deduplication_golden_record_status = models.CharField( max_length=50, @@ -866,14 +984,28 @@ class Individual(SoftDeletableModelWithDate, TimeStampedUUIDModel, AbstractSynca sanction_list_possible_match = models.BooleanField(default=False) sanction_list_confirmed_match = models.BooleanField(default=False) sanction_list_last_check = models.DateTimeField(null=True, blank=True) - pregnant = models.NullBooleanField() - observed_disability = MultiSelectField(choices=OBSERVED_DISABILITY_CHOICE, default=NONE) - seeing_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - hearing_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - physical_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - memory_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - selfcare_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - comms_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) + pregnant = models.BooleanField(null=True) + observed_disability = MultiSelectField( + choices=OBSERVED_DISABILITY_CHOICE, default=NONE + ) + seeing_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + hearing_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + physical_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + memory_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + selfcare_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + comms_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) who_answers_phone = models.CharField(max_length=150, blank=True) who_answers_alt_phone = models.CharField(max_length=150, blank=True) business_area = models.ForeignKey("core.BusinessArea", on_delete=models.CASCADE) @@ -970,7 +1102,9 @@ def recalculate_data(self): should_be_disabled = False for field in disability_fields: value = getattr(self, field, None) - should_be_disabled = should_be_disabled or value == CANNOT_DO or value == LOT_DIFFICULTY + should_be_disabled = ( + should_be_disabled or value == CANNOT_DO or value == LOT_DIFFICULTY + ) self.disability = DISABLED if should_be_disabled else NOT_DISABLED self.save(update_fields=["disability"]) @@ -983,20 +1117,28 @@ def count_primary_roles(self): @cached_property def parents(self): if self.household: - return self.household.individuals.exclude(Q(duplicate=True) | Q(withdrawn=True)) + return self.household.individuals.exclude( + Q(duplicate=True) | Q(withdrawn=True) + ) return [] def is_golden_record_duplicated(self): return self.deduplication_golden_record_status == DUPLICATE def get_deduplication_golden_record(self): - status_key = "duplicates" if self.is_golden_record_duplicated() else "possible_duplicates" + status_key = ( + "duplicates" + if self.is_golden_record_duplicated() + else "possible_duplicates" + ) return self.deduplication_golden_record_results.get(status_key, []) @cached_property def active_record(self): if self.duplicate: - return Individual.objects.filter(unicef_id=self.unicef_id, duplicate=False, is_removed=False).first() + return Individual.objects.filter( + unicef_id=self.unicef_id, duplicate=False, is_removed=False + ).first() def is_head(self): if not self.household: @@ -1034,6 +1176,10 @@ class EntitlementCard(TimeStampedUUIDModel): class XlsxUpdateFile(TimeStampedUUIDModel): file = models.FileField() business_area = models.ForeignKey("core.BusinessArea", on_delete=models.CASCADE) - rdi = models.ForeignKey("registration_data.RegistrationDataImport", on_delete=models.CASCADE, null=True) + rdi = models.ForeignKey( + "registration_data.RegistrationDataImport", on_delete=models.CASCADE, null=True + ) xlsx_match_columns = ArrayField(models.CharField(max_length=32), null=True) - uploaded_by = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.PROTECT) + uploaded_by = models.ForeignKey( + settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.PROTECT + ) diff --git a/backend/hct_mis_api/apps/mis_datahub/models.py b/backend/hct_mis_api/apps/mis_datahub/models.py index cafba9ba46..2963809255 100644 --- a/backend/hct_mis_api/apps/mis_datahub/models.py +++ b/backend/hct_mis_api/apps/mis_datahub/models.py @@ -1,5 +1,5 @@ from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from hct_mis_api.apps.household.models import ( IDENTIFICATION_TYPE_CHOICE, @@ -28,7 +28,9 @@ class Household(SessionModel): mis_id = models.UUIDField() unhcr_id = models.CharField(max_length=255, null=True) unicef_id = models.CharField(max_length=255, null=True) - status = models.CharField(max_length=20, choices=INDIVIDUAL_HOUSEHOLD_STATUS, default="ACTIVE") + status = models.CharField( + max_length=20, choices=INDIVIDUAL_HOUSEHOLD_STATUS, default="ACTIVE" + ) household_size = models.PositiveIntegerField() # registration household id form_number = models.CharField(max_length=255, null=True) @@ -36,7 +38,9 @@ class Household(SessionModel): admin1 = models.CharField(max_length=255, null=True) admin2 = models.CharField(max_length=255, null=True) country = models.CharField(null=True, max_length=3) - residence_status = models.CharField(max_length=255, choices=RESIDENCE_STATUS_CHOICE, null=True) + residence_status = models.CharField( + max_length=255, choices=RESIDENCE_STATUS_CHOICE, null=True + ) registration_date = models.DateField(null=True) village = models.CharField(max_length=250, blank=True, null=True) @@ -81,7 +85,7 @@ class Individual(SessionModel): choices=MARITAL_STATUS_CHOICE, ) phone_number = models.CharField(max_length=60, null=True) - pregnant = models.NullBooleanField() + pregnant = models.BooleanField(null=True) sanction_list_confirmed_match = models.BooleanField(default=False) class Meta: @@ -190,7 +194,9 @@ class Document(SessionModel): (DAMAGED, _("Damaged")), ) - status = models.CharField(choices=STATUS_CHOICE, null=True, max_length=30, default=None) + status = models.CharField( + choices=STATUS_CHOICE, null=True, max_length=30, default=None + ) date_of_expiry = models.DateField(null=True, default=None) photo = models.ImageField(blank=True, default="") mis_id = models.UUIDField() diff --git a/backend/hct_mis_api/apps/payment/models.py b/backend/hct_mis_api/apps/payment/models.py index e50adaf557..8fa05702d8 100644 --- a/backend/hct_mis_api/apps/payment/models.py +++ b/backend/hct_mis_api/apps/payment/models.py @@ -1,12 +1,12 @@ from decimal import Decimal -from django.contrib.postgres.fields import JSONField from django.core.validators import MinValueValidator from django.db import models +from django.db.models import JSONField from django.db.models.signals import post_save from django.dispatch import receiver from django.utils import timezone -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from model_utils import Choices @@ -98,7 +98,10 @@ class PaymentRecord(TimeStampedUUIDModel, ConcurrencyModel): related_name="payment_records", ) head_of_household = models.ForeignKey( - "household.Individual", on_delete=models.CASCADE, related_name="payment_records", null=True + "household.Individual", + on_delete=models.CASCADE, + related_name="payment_records", + null=True, ) full_name = models.CharField(max_length=255) @@ -114,7 +117,10 @@ class PaymentRecord(TimeStampedUUIDModel, ConcurrencyModel): target_population_cash_assist_id = models.CharField(max_length=255) entitlement_card_number = models.CharField(max_length=255, null=True) entitlement_card_status = models.CharField( - choices=ENTITLEMENT_CARD_STATUS_CHOICE, default="ACTIVE", max_length=20, null=True + choices=ENTITLEMENT_CARD_STATUS_CHOICE, + default="ACTIVE", + max_length=20, + null=True, ) entitlement_card_issue_date = models.DateField(null=True) delivery_type = models.CharField( @@ -135,7 +141,10 @@ class PaymentRecord(TimeStampedUUIDModel, ConcurrencyModel): validators=[MinValueValidator(Decimal("0.01"))], ) delivered_quantity_usd = models.DecimalField( - decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal("0.01"))], null=True + decimal_places=2, + max_digits=12, + validators=[MinValueValidator(Decimal("0.01"))], + null=True, ) delivery_date = models.DateTimeField(null=True, blank=True) service_provider = models.ForeignKey( @@ -205,14 +214,18 @@ class CashPlanPaymentVerification(TimeStampedUUIDModel, ConcurrencyModel): (VERIFICATION_METHOD_XLSX, "XLSX"), (VERIFICATION_METHOD_MANUAL, "MANUAL"), ) - status = models.CharField(max_length=50, choices=STATUS_CHOICES, default=STATUS_PENDING, db_index=True) + status = models.CharField( + max_length=50, choices=STATUS_CHOICES, default=STATUS_PENDING, db_index=True + ) cash_plan = models.ForeignKey( "program.CashPlan", on_delete=models.CASCADE, related_name="verifications", ) sampling = models.CharField(max_length=50, choices=SAMPLING_CHOICES) - verification_method = models.CharField(max_length=50, choices=VERIFICATION_METHOD_CHOICES) + verification_method = models.CharField( + max_length=50, choices=VERIFICATION_METHOD_CHOICES + ) sample_size = models.PositiveIntegerField(null=True) responded_count = models.PositiveIntegerField(null=True) received_count = models.PositiveIntegerField(null=True) @@ -268,8 +281,12 @@ class PaymentVerification(TimeStampedUUIDModel, ConcurrencyModel): on_delete=models.CASCADE, related_name="payment_record_verifications", ) - payment_record = models.ForeignKey("PaymentRecord", on_delete=models.CASCADE, related_name="verifications") - status = models.CharField(max_length=50, choices=STATUS_CHOICES, default=STATUS_PENDING) + payment_record = models.ForeignKey( + "PaymentRecord", on_delete=models.CASCADE, related_name="verifications" + ) + status = models.CharField( + max_length=50, choices=STATUS_CHOICES, default=STATUS_PENDING + ) status_date = models.DateTimeField(null=True) received_amount = models.DecimalField( decimal_places=2, @@ -286,7 +303,9 @@ def is_manually_editable(self): ): return False minutes_elapsed = (timezone.now() - self.status_date).total_seconds() / 60 - return not (self.status != PaymentVerification.STATUS_PENDING and minutes_elapsed > 10) + return not ( + self.status != PaymentVerification.STATUS_PENDING and minutes_elapsed > 10 + ) @property def business_area(self): diff --git a/backend/hct_mis_api/apps/payment/utils.py b/backend/hct_mis_api/apps/payment/utils.py index 2b2b7dd528..427f2cc62d 100644 --- a/backend/hct_mis_api/apps/payment/utils.py +++ b/backend/hct_mis_api/apps/payment/utils.py @@ -1,20 +1,29 @@ from decimal import Decimal +from math import ceil from django.db.models import Q -from math import ceil from hct_mis_api.apps.core.utils import chart_create_filter_query, chart_get_filtered_qs -from hct_mis_api.apps.payment.models import PaymentVerification, PaymentRecord +from hct_mis_api.apps.payment.models import PaymentRecord, PaymentVerification -def get_number_of_samples(payment_records_sample_count, confidence_interval, margin_of_error): +def get_number_of_samples( + payment_records_sample_count, confidence_interval, margin_of_error +): from statistics import NormalDist variable = 0.5 z_score = NormalDist().inv_cdf(confidence_interval + (1 - confidence_interval) / 2) - theoretical_sample = (z_score ** 2) * variable * (1 - variable) / margin_of_error ** 2 + theoretical_sample = ( + (z_score ** 2) * variable * (1 - variable) / margin_of_error ** 2 + ) actual_sample = ceil( - (payment_records_sample_count * theoretical_sample / (theoretical_sample + payment_records_sample_count)) * 1.5 + ( + payment_records_sample_count + * theoretical_sample + / (theoretical_sample + payment_records_sample_count) + ) + * 1.5 ) return min(actual_sample, payment_records_sample_count) @@ -36,7 +45,7 @@ def from_received_to_status(received, received_amount, delivered_amount): def float_to_decimal(received_amount): if isinstance(received_amount, float): - return Decimal("{:.2f}".format(round(received_amount, 2))) + return Decimal(f"{round(received_amount, 2):.2f}") return received_amount @@ -50,21 +59,31 @@ def from_received_yes_no_to_status(received, received_amount, delivered_amount): def calculate_counts(cash_plan_verification): - cash_plan_verification.responded_count = cash_plan_verification.payment_record_verifications.filter( - ~Q(status=PaymentVerification.STATUS_PENDING) - ).count() - cash_plan_verification.received_count = cash_plan_verification.payment_record_verifications.filter( - Q(status=PaymentVerification.STATUS_RECEIVED) - ).count() - cash_plan_verification.not_received_count = cash_plan_verification.payment_record_verifications.filter( - Q(status=PaymentVerification.STATUS_NOT_RECEIVED) - ).count() - cash_plan_verification.received_with_problems_count = cash_plan_verification.payment_record_verifications.filter( - Q(status=PaymentVerification.STATUS_RECEIVED_WITH_ISSUES) - ).count() + cash_plan_verification.responded_count = ( + cash_plan_verification.payment_record_verifications.filter( + ~Q(status=PaymentVerification.STATUS_PENDING) + ).count() + ) + cash_plan_verification.received_count = ( + cash_plan_verification.payment_record_verifications.filter( + Q(status=PaymentVerification.STATUS_RECEIVED) + ).count() + ) + cash_plan_verification.not_received_count = ( + cash_plan_verification.payment_record_verifications.filter( + Q(status=PaymentVerification.STATUS_NOT_RECEIVED) + ).count() + ) + cash_plan_verification.received_with_problems_count = ( + cash_plan_verification.payment_record_verifications.filter( + Q(status=PaymentVerification.STATUS_RECEIVED_WITH_ISSUES) + ).count() + ) -def get_payment_records_for_dashboard(year, business_area_slug, filters, only_with_delivered_quantity=False): +def get_payment_records_for_dashboard( + year, business_area_slug, filters, only_with_delivered_quantity=False +): additional_filters = {} if only_with_delivered_quantity: additional_filters["delivered_quantity_usd__gt"] = 0 diff --git a/backend/hct_mis_api/apps/power_query/models.py b/backend/hct_mis_api/apps/power_query/models.py index 5079bcd789..583fb10aee 100644 --- a/backend/hct_mis_api/apps/power_query/models.py +++ b/backend/hct_mis_api/apps/power_query/models.py @@ -3,9 +3,9 @@ from django.conf import settings from django.contrib.contenttypes.models import ContentType -from django.contrib.postgres.fields import JSONField from django.core.exceptions import ObjectDoesNotExist from django.db import models +from django.db.models import JSONField from django.template import Context, Template from django.utils import timezone @@ -31,7 +31,9 @@ class Query(models.Model): name = models.CharField(max_length=255, blank=True, null=True, unique=True) description = models.TextField(blank=True, null=True) - owner = models.ForeignKey(User, on_delete=models.CASCADE, related_name="power_queries") + owner = models.ForeignKey( + User, on_delete=models.CASCADE, related_name="power_queries" + ) target = models.ForeignKey(ContentType, on_delete=models.CASCADE, default="") code = models.TextField(default="qs=conn.all()", blank=True) info = JSONField(default=dict, blank=True) @@ -46,7 +48,9 @@ class Meta: verbose_name_plural = "Power Queries" ordering = ("name",) - def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + def save( + self, force_insert=False, force_update=False, using=None, update_fields=None + ): if not self.code: self.code = "qs=conn.all().order_by('id')" self.error = None @@ -76,7 +80,9 @@ def execute(self, persist=False, query_args=None): _error = None try: locals_ = dict() - locals_["conn"] = model._default_manager.using(settings.POWER_QUERY_DB_ALIAS) + locals_["conn"] = model._default_manager.using( + settings.POWER_QUERY_DB_ALIAS + ) locals_["query"] = self locals_["query_filters"] = filters locals_["invoke"] = self._invoke @@ -90,7 +96,12 @@ def execute(self, persist=False, query_args=None): "debug_info": debug_info, } r, __ = Dataset.objects.update_or_create( - query=self, defaults={"last_run": timezone.now(), "result": pickle.dumps(result), "info": info} + query=self, + defaults={ + "last_run": timezone.now(), + "result": pickle.dumps(result), + "info": info, + }, ) return result, debug_info @@ -118,7 +129,9 @@ def data(self): class Formatter(models.Model): name = models.CharField(max_length=255, blank=True, null=True, unique=True) - content_type = models.CharField(max_length=5, choices=list(map(list, mimetype_map.items()))) + content_type = models.CharField( + max_length=5, choices=list(map(list, mimetype_map.items())) + ) code = models.TextField(blank=True, null=True) def __str__(self): @@ -146,7 +159,9 @@ class Report(models.Model): query = models.ForeignKey(Query, on_delete=models.CASCADE) formatter = models.ForeignKey(Formatter, on_delete=models.CASCADE) refresh = models.BooleanField(default=False) - owner = models.ForeignKey(User, blank=True, null=True, on_delete=models.CASCADE, related_name="+") + owner = models.ForeignKey( + User, blank=True, null=True, on_delete=models.CASCADE, related_name="+" + ) available_to = models.ManyToManyField(User, blank=True, related_name="+") query_args = JSONField(default=dict, blank=True) diff --git a/backend/hct_mis_api/apps/program/models.py b/backend/hct_mis_api/apps/program/models.py index 01012a1d15..fc528dda68 100644 --- a/backend/hct_mis_api/apps/program/models.py +++ b/backend/hct_mis_api/apps/program/models.py @@ -12,7 +12,7 @@ from django.db.models import Count, Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from model_utils.models import SoftDeletableModel @@ -29,7 +29,9 @@ ) -class Program(SoftDeletableModel, TimeStampedUUIDModel, AbstractSyncable, ConcurrencyModel): +class Program( + SoftDeletableModel, TimeStampedUUIDModel, AbstractSyncable, ConcurrencyModel +): ACTIVITY_LOG_MAPPING = create_mapping_dict( [ "name", @@ -120,10 +122,15 @@ class Program(SoftDeletableModel, TimeStampedUUIDModel, AbstractSyncable, Concur related_name="programs", blank=True, ) - admin_areas_new = models.ManyToManyField("geo.Area", related_name="programs", blank=True) + admin_areas_new = models.ManyToManyField( + "geo.Area", related_name="programs", blank=True + ) business_area = models.ForeignKey("core.BusinessArea", on_delete=models.CASCADE) budget = models.DecimalField( - decimal_places=2, max_digits=11, validators=[MinValueValidator(Decimal("0.00"))], db_index=True + decimal_places=2, + max_digits=11, + validators=[MinValueValidator(Decimal("0.00"))], + db_index=True, ) frequency_of_payments = models.CharField( max_length=50, @@ -201,33 +208,59 @@ class CashPlan(TimeStampedUUIDModel): coverage_duration = models.PositiveIntegerField() coverage_unit = models.CharField(max_length=255) comments = models.CharField(max_length=255, null=True) - program = models.ForeignKey("program.Program", on_delete=models.CASCADE, related_name="cash_plans") + program = models.ForeignKey( + "program.Program", on_delete=models.CASCADE, related_name="cash_plans" + ) delivery_type = models.CharField( - choices=PaymentRecord.DELIVERY_TYPE_CHOICE, max_length=24, null=True, db_index=True + choices=PaymentRecord.DELIVERY_TYPE_CHOICE, + max_length=24, + null=True, + db_index=True, ) assistance_measurement = models.CharField(max_length=255, db_index=True) assistance_through = models.CharField(max_length=255, db_index=True) service_provider = models.ForeignKey( - "payment.ServiceProvider", null=True, related_name="cash_plans", on_delete=models.CASCADE + "payment.ServiceProvider", + null=True, + related_name="cash_plans", + on_delete=models.CASCADE, ) vision_id = models.CharField(max_length=255, null=True) funds_commitment = models.CharField(max_length=255, null=True) - exchange_rate = models.DecimalField(decimal_places=8, blank=True, null=True, max_digits=12) + exchange_rate = models.DecimalField( + decimal_places=8, blank=True, null=True, max_digits=12 + ) down_payment = models.CharField(max_length=255, null=True) validation_alerts_count = models.IntegerField() total_persons_covered = models.IntegerField(db_index=True) total_persons_covered_revised = models.IntegerField(db_index=True) total_entitled_quantity = models.DecimalField( - decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal("0.01"))], db_index=True, null=True + decimal_places=2, + max_digits=12, + validators=[MinValueValidator(Decimal("0.01"))], + db_index=True, + null=True, ) total_entitled_quantity_revised = models.DecimalField( - decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal("0.01"))], db_index=True, null=True + decimal_places=2, + max_digits=12, + validators=[MinValueValidator(Decimal("0.01"))], + db_index=True, + null=True, ) total_delivered_quantity = models.DecimalField( - decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal("0.01"))], db_index=True, null=True + decimal_places=2, + max_digits=12, + validators=[MinValueValidator(Decimal("0.01"))], + db_index=True, + null=True, ) total_undelivered_quantity = models.DecimalField( - decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal("0.01"))], db_index=True, null=True + decimal_places=2, + max_digits=12, + validators=[MinValueValidator(Decimal("0.01"))], + db_index=True, + null=True, ) verification_status = models.CharField( max_length=10, @@ -245,7 +278,9 @@ def payment_records_count(self): @property def bank_reconciliation_success(self): - return self.payment_records.filter(status__in=PaymentRecord.ALLOW_CREATE_VERIFICATION).count() + return self.payment_records.filter( + status__in=PaymentRecord.ALLOW_CREATE_VERIFICATION + ).count() @property def bank_reconciliation_error(self): diff --git a/backend/hct_mis_api/apps/registration_data/models.py b/backend/hct_mis_api/apps/registration_data/models.py index e0b03ab094..375635fe4f 100644 --- a/backend/hct_mis_api/apps/registration_data/models.py +++ b/backend/hct_mis_api/apps/registration_data/models.py @@ -7,7 +7,7 @@ ) from django.db import models from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from hct_mis_api.apps.activity_log.utils import create_mapping_dict from hct_mis_api.apps.core.models import BusinessArea @@ -71,7 +71,9 @@ class RegistrationDataImport(TimeStampedUUIDModel, ConcurrencyModel): ProhibitNullCharactersValidator(), ], ) - status = models.CharField(max_length=255, choices=STATUS_CHOICE, default=IN_REVIEW, db_index=True) + status = models.CharField( + max_length=255, choices=STATUS_CHOICE, default=IN_REVIEW, db_index=True + ) import_date = models.DateTimeField(auto_now_add=True, db_index=True) imported_by = models.ForeignKey( settings.AUTH_USER_MODEL, @@ -97,7 +99,9 @@ def __str__(self): @cached_property def all_imported_individuals(self): - return ImportedIndividual.objects.filter(registration_data_import=self.datahub_id) + return ImportedIndividual.objects.filter( + registration_data_import=self.datahub_id + ) class Meta: unique_together = ("name", "business_area") diff --git a/backend/hct_mis_api/apps/registration_datahub/models.py b/backend/hct_mis_api/apps/registration_datahub/models.py index 6b3f49a246..6dff4c93fc 100644 --- a/backend/hct_mis_api/apps/registration_datahub/models.py +++ b/backend/hct_mis_api/apps/registration_datahub/models.py @@ -3,14 +3,14 @@ from datetime import date from django.contrib.gis.db.models import PointField -from django.contrib.postgres.fields import JSONField from django.core.validators import ( MaxLengthValidator, MinLengthValidator, validate_image_file_extension, ) from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.db.models import JSONField +from django.utils.translation import gettext_lazy as _ from django_countries.fields import CountryField from multiselectfield import MultiSelectField @@ -57,7 +57,7 @@ class ImportedHousehold(TimeStampedUUIDModel): consent_sign = ImageField(validators=[validate_image_file_extension], blank=True) - consent = models.NullBooleanField() + consent = models.BooleanField(null=True) consent_sharing = MultiSelectField(choices=DATA_SHARING_CHOICES, default=BLANK) residence_status = models.CharField(max_length=255, choices=RESIDENCE_STATUS_CHOICE) country_origin = CountryField() @@ -80,19 +80,41 @@ class ImportedHousehold(TimeStampedUUIDModel): male_age_group_12_17_count = models.PositiveIntegerField(default=None, null=True) male_age_group_18_59_count = models.PositiveIntegerField(default=None, null=True) male_age_group_60_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_0_5_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_6_11_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_12_17_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_18_59_disabled_count = models.PositiveIntegerField(default=None, null=True) - female_age_group_60_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_0_5_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_6_11_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_12_17_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_18_59_disabled_count = models.PositiveIntegerField(default=None, null=True) - male_age_group_60_disabled_count = models.PositiveIntegerField(default=None, null=True) - head_of_household = models.OneToOneField("ImportedIndividual", on_delete=models.CASCADE, null=True) - fchild_hoh = models.NullBooleanField() - child_hoh = models.NullBooleanField() + female_age_group_0_5_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_6_11_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_12_17_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_18_59_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + female_age_group_60_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_0_5_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_6_11_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_12_17_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_18_59_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + male_age_group_60_disabled_count = models.PositiveIntegerField( + default=None, null=True + ) + head_of_household = models.OneToOneField( + "ImportedIndividual", on_delete=models.CASCADE, null=True + ) + fchild_hoh = models.BooleanField(null=True) + child_hoh = models.BooleanField(null=True) registration_data_import = models.ForeignKey( "RegistrationDataImportDatahub", related_name="households", @@ -100,16 +122,22 @@ class ImportedHousehold(TimeStampedUUIDModel): ) first_registration_date = models.DateTimeField() last_registration_date = models.DateTimeField() - returnee = models.NullBooleanField() + returnee = models.BooleanField(null=True) flex_fields = JSONField(default=dict) start = models.DateTimeField(blank=True, null=True) deviceid = models.CharField(max_length=250, blank=True) name_enumerator = models.CharField(max_length=250, blank=True, default=BLANK) - org_enumerator = models.CharField(max_length=250, choices=ORG_ENUMERATOR_CHOICES, blank=True, default=BLANK) + org_enumerator = models.CharField( + max_length=250, choices=ORG_ENUMERATOR_CHOICES, blank=True, default=BLANK + ) org_name_enumerator = models.CharField(max_length=250, blank=True, default=BLANK) village = models.CharField(max_length=250, blank=True, default=BLANK) - registration_method = models.CharField(max_length=250, choices=REGISTRATION_METHOD_CHOICES, default=BLANK) - collect_individual_data = models.CharField(max_length=250, choices=YES_NO_CHOICE, default=BLANK) + registration_method = models.CharField( + max_length=250, choices=REGISTRATION_METHOD_CHOICES, default=BLANK + ) + collect_individual_data = models.CharField( + max_length=250, choices=YES_NO_CHOICE, default=BLANK + ) currency = models.CharField(max_length=250, choices=CURRENCY_CHOICES, default=BLANK) unhcr_id = models.CharField(max_length=250, blank=True, default=BLANK) kobo_submission_uuid = models.UUIDField(null=True, default=None) @@ -164,7 +192,9 @@ class ImportedIndividual(TimeStampedUUIDModel): related_name="individuals", on_delete=models.CASCADE, ) - disability = models.CharField(max_length=20, choices=DISABILITY_CHOICES, default=NOT_DISABLED) + disability = models.CharField( + max_length=20, choices=DISABILITY_CHOICES, default=NOT_DISABLED + ) work_status = models.CharField( max_length=20, choices=WORK_STATUS_CHOICE, @@ -188,14 +218,26 @@ class ImportedIndividual(TimeStampedUUIDModel): deduplication_batch_results = JSONField(default=dict) deduplication_golden_record_results = JSONField(default=dict) flex_fields = JSONField(default=dict) - pregnant = models.NullBooleanField() + pregnant = models.BooleanField(null=True) observed_disability = MultiSelectField(choices=OBSERVED_DISABILITY_CHOICE) - seeing_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - hearing_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - physical_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - memory_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - selfcare_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) - comms_disability = models.CharField(max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True) + seeing_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + hearing_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + physical_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + memory_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + selfcare_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) + comms_disability = models.CharField( + max_length=50, choices=SEVERITY_OF_DISABILITY_CHOICES, blank=True + ) who_answers_phone = models.CharField(max_length=150, blank=True) who_answers_alt_phone = models.CharField(max_length=150, blank=True) kobo_asset_id = models.CharField(max_length=150, blank=True, default=BLANK) @@ -277,7 +319,9 @@ class RegistrationDataImportDatahub(TimeStampedUUIDModel): on_delete=models.CASCADE, null=True, ) - import_done = models.CharField(max_length=15, choices=IMPORT_DONE_CHOICES, default=NOT_STARTED) + import_done = models.CharField( + max_length=15, choices=IMPORT_DONE_CHOICES, default=NOT_STARTED + ) business_area_slug = models.CharField(max_length=250, blank=True) class Meta: @@ -329,7 +373,9 @@ def __str__(self): class ImportedDocument(TimeStampedUUIDModel): document_number = models.CharField(max_length=255, blank=True) photo = models.ImageField(blank=True) - individual = models.ForeignKey("ImportedIndividual", related_name="documents", on_delete=models.CASCADE) + individual = models.ForeignKey( + "ImportedIndividual", related_name="documents", on_delete=models.CASCADE + ) type = models.ForeignKey( "ImportedDocumentType", related_name="documents", @@ -362,7 +408,9 @@ def __str__(self): class ImportedIndividualIdentity(models.Model): - agency = models.ForeignKey("ImportedAgency", related_name="identities", on_delete=models.CASCADE) + agency = models.ForeignKey( + "ImportedAgency", related_name="identities", on_delete=models.CASCADE + ) individual = models.ForeignKey( "ImportedIndividual", related_name="identities", @@ -377,13 +425,17 @@ def __str__(self): class KoboImportedSubmission(models.Model): - created_at = models.DateTimeField(auto_now_add=True, db_index=True, null=True, blank=True) + created_at = models.DateTimeField( + auto_now_add=True, db_index=True, null=True, blank=True + ) kobo_submission_uuid = models.UUIDField() kobo_asset_id = models.CharField(max_length=150) kobo_submission_time = models.DateTimeField() # we use on_delete=models.SET_NULL because we want to be able to delete # ImportedHousehold without loosing track of importing - imported_household = models.ForeignKey(ImportedHousehold, blank=True, null=True, on_delete=models.SET_NULL) + imported_household = models.ForeignKey( + ImportedHousehold, blank=True, null=True, on_delete=models.SET_NULL + ) amended = models.BooleanField(default=False, blank=True) registration_data_import = models.ForeignKey( diff --git a/backend/hct_mis_api/apps/registration_datahub/template_generator.py b/backend/hct_mis_api/apps/registration_datahub/template_generator.py index 11a86b3778..dd5dc7cadd 100644 --- a/backend/hct_mis_api/apps/registration_datahub/template_generator.py +++ b/backend/hct_mis_api/apps/registration_datahub/template_generator.py @@ -1,8 +1,11 @@ -from typing import List, Tuple, Dict +from typing import Dict, List, Tuple import openpyxl -from hct_mis_api.apps.core.core_fields_attributes import CORE_FIELDS_SEPARATED_WITH_NAME_AS_KEY, COLLECTORS_FIELDS +from hct_mis_api.apps.core.core_fields_attributes import ( + COLLECTORS_FIELDS, + CORE_FIELDS_SEPARATED_WITH_NAME_AS_KEY, +) from hct_mis_api.apps.core.models import AdminArea from hct_mis_api.apps.core.utils import serialize_flex_attributes @@ -19,8 +22,10 @@ def _create_workbook(cls) -> openpyxl.Workbook: return wb @classmethod - def _handle_choices(cls, fields: Dict) -> List[List[str]]: - rows: List[List[str]] = [["Field Name", "Label", "Value to be used in template"]] + def _handle_choices(cls, fields: dict) -> list[list[str]]: + rows: list[list[str]] = [ + ["Field Name", "Label", "Value to be used in template"] + ] for field_name, field_value in fields.items(): is_admin_level = field_name in ("admin1_h_c", "admin2_h_c") @@ -29,15 +34,19 @@ def _handle_choices(cls, fields: Dict) -> List[List[str]]: choices = AdminArea.get_admin_areas_as_choices(field_name[-5]) if choices: for choice in field_value["choices"]: - row = [field_name, str(choice["label"]["English(EN)"]), choice["value"]] + row = [ + field_name, + str(choice["label"]["English(EN)"]), + choice["value"], + ] rows.append(row) return rows @classmethod - def _handle_name_and_label_row(cls, fields: Dict) -> Tuple[List[str], List[str]]: - names: List[str] = [] - labels: List[str] = [] + def _handle_name_and_label_row(cls, fields: dict) -> tuple[list[str], list[str]]: + names: list[str] = [] + labels: list[str] = [] for field_name, field_value in fields.items(): names.append(field_name) diff --git a/backend/hct_mis_api/apps/registration_datahub/validators.py b/backend/hct_mis_api/apps/registration_datahub/validators.py index a7bd336299..ec8d5f362d 100644 --- a/backend/hct_mis_api/apps/registration_datahub/validators.py +++ b/backend/hct_mis_api/apps/registration_datahub/validators.py @@ -52,7 +52,9 @@ class XLSXValidator(BaseValidator): @classmethod def validate(cls, *args, **kwargs): try: - validate_methods = [getattr(cls, m) for m in dir(cls) if m.startswith("validate_")] + validate_methods = [ + getattr(cls, m) for m in dir(cls) if m.startswith("validate_") + ] errors_list = [] for method in validate_methods: @@ -85,7 +87,13 @@ def validate_file_extension(cls, *args, **kwargs): try: load_workbook(xlsx_file, data_only=True) except BadZipfile: - return [{"row_number": 1, "header": f"{xlsx_file.name}", "message": "Invalid .xlsx file"}] + return [ + { + "row_number": 1, + "header": f"{xlsx_file.name}", + "message": "Invalid .xlsx file", + } + ] return [] except Exception as e: @@ -110,7 +118,9 @@ class ImportDataValidator(BaseValidator): @classmethod def validate(cls, *args, **kwargs): try: - validate_methods = [getattr(cls, m) for m in dir(cls) if m.startswith("validate_")] + validate_methods = [ + getattr(cls, m) for m in dir(cls) if m.startswith("validate_") + ] errors_list = [] for method in validate_methods: @@ -136,7 +146,10 @@ def documents_validator(cls, documents_numbers_dict, is_xlsx=True, *args, **kwar issuing_countries = [None] * len(values["validation_data"]) if key == "other_id_type_i_c": for name, value, validation_data, issuing_country in zip( - values["names"], values["numbers"], values["validation_data"], issuing_countries + values["names"], + values["numbers"], + values["validation_data"], + issuing_countries, ): row_number = validation_data.get("row_number") if not name and value: @@ -169,7 +182,9 @@ def documents_validator(cls, documents_numbers_dict, is_xlsx=True, *args, **kwar values["validation_data"], values["numbers"], issuing_countries ): row_number = ( - validation_data.get("row_number") if isinstance(validation_data, dict) else validation_data + validation_data.get("row_number") + if isinstance(validation_data, dict) + else validation_data ) if value and not issuing_country: error = { @@ -204,7 +219,11 @@ def identity_validator(cls, identities_numbers_dict, is_xlsx=True, *args, **kwar for data_dict, value, issuing_country in zip_longest( values["validation_data"], values["numbers"], issuing_countries ): - row_number = data_dict.get("row_number") if isinstance(data_dict, dict) else data_dict + row_number = ( + data_dict.get("row_number") + if isinstance(data_dict, dict) + else data_dict + ) if not value and not issuing_country: continue elif value and not issuing_country: @@ -245,7 +264,9 @@ class ImportDataInstanceValidator: "unhcr_id_issuer_i_c": "unhcr_id_no_i_c", } - def documents_validator(self, documents_numbers_dict, is_xlsx=True, *args, **kwargs): + def documents_validator( + self, documents_numbers_dict, is_xlsx=True, *args, **kwargs + ): try: invalid_rows = [] for key, values in documents_numbers_dict.items(): @@ -256,7 +277,10 @@ def documents_validator(self, documents_numbers_dict, is_xlsx=True, *args, **kwa issuing_countries = [None] * len(values["validation_data"]) if key == "other_id_type_i_c": for name, value, validation_data, issuing_country in zip( - values["names"], values["numbers"], values["validation_data"], issuing_countries + values["names"], + values["numbers"], + values["validation_data"], + issuing_countries, ): row_number = validation_data.get("row_number") if not name and value: @@ -289,7 +313,9 @@ def documents_validator(self, documents_numbers_dict, is_xlsx=True, *args, **kwa values["validation_data"], values["numbers"], issuing_countries ): row_number = ( - validation_data.get("row_number") if isinstance(validation_data, dict) else validation_data + validation_data.get("row_number") + if isinstance(validation_data, dict) + else validation_data ) if value and not issuing_country: error = { @@ -313,7 +339,9 @@ def documents_validator(self, documents_numbers_dict, is_xlsx=True, *args, **kwa logger.exception(e) raise - def identity_validator(self, identities_numbers_dict, is_xlsx=True, *args, **kwargs): + def identity_validator( + self, identities_numbers_dict, is_xlsx=True, *args, **kwargs + ): try: invalid_rows = [] for key, values in identities_numbers_dict.items(): @@ -323,7 +351,11 @@ def identity_validator(self, identities_numbers_dict, is_xlsx=True, *args, **kwa for data_dict, value, issuing_country in zip_longest( values["validation_data"], values["numbers"], issuing_countries ): - row_number = data_dict.get("row_number") if isinstance(data_dict, dict) else data_dict + row_number = ( + data_dict.get("row_number") + if isinstance(data_dict, dict) + else data_dict + ) if not value and not issuing_country: continue elif value and not issuing_country: @@ -527,7 +559,9 @@ def bool_validator(self, value, header, *args, **kwargs): try: if isinstance(value, bool): return True - if self.all_fields[header]["required"] is False and (value is None or value == ""): + if self.all_fields[header]["required"] is False and ( + value is None or value == "" + ): return True if type(value) is str: value = value.capitalize() @@ -671,53 +705,95 @@ def rows_validator(self, sheet): field_type = current_field["type"] fn = switch_dict.get(field_type) - if fn(value, header.value, cell) is False and household_id_can_be_empty is False: + if ( + fn(value, header.value, cell) is False + and household_id_can_be_empty is False + ): message = ( f"Sheet: {sheet.title}, Unexpected value: " f"{value} for type " f"{field_type.replace('_', ' ').lower()} " f"of field {header.value}" ) - invalid_rows.append({"row_number": cell.row, "header": header.value, "message": message}) + invalid_rows.append( + { + "row_number": cell.row, + "header": header.value, + "message": message, + } + ) if header.value in documents_numbers: if header.value == "other_id_type_i_c": - documents_numbers["other_id_type_i_c"]["names"].append(value) + documents_numbers["other_id_type_i_c"]["names"].append( + value + ) elif header.value == "other_id_no_i_c": - documents_numbers["other_id_type_i_c"]["numbers"].append(str(value) if value else None) + documents_numbers["other_id_type_i_c"]["numbers"].append( + str(value) if value else None + ) else: - documents_numbers[header.value]["numbers"].append(str(value) if value else None) + documents_numbers[header.value]["numbers"].append( + str(value) if value else None + ) if header.value in self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.keys(): - document_key = self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.get(header.value) + document_key = self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.get( + header.value + ) documents_dict = documents_numbers if document_key in identities_numbers.keys(): documents_dict = identities_numbers documents_dict[document_key]["issuing_countries"].append(value) if header.value in identities_numbers: - identities_numbers[header.value]["numbers"].append(str(value) if value else None) + identities_numbers[header.value]["numbers"].append( + str(value) if value else None + ) - if current_household_id and current_household_id not in self.household_ids: + if ( + current_household_id + and current_household_id not in self.household_ids + ): message = f"Sheet: Individuals, There is no household with provided id: {current_household_id}" - invalid_rows.append({"row_number": row_number, "header": "relationship_i_c", "message": message}) + invalid_rows.append( + { + "row_number": row_number, + "header": "relationship_i_c", + "message": message, + } + ) for header in self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.values(): documents_or_identity_dict = ( - identities_numbers if header in identities_numbers.keys() else documents_numbers + identities_numbers + if header in identities_numbers.keys() + else documents_numbers + ) + documents_or_identity_dict[header]["validation_data"].append( + {"row_number": row[0].row} ) - documents_or_identity_dict[header]["validation_data"].append({"row_number": row[0].row}) if sheet.title == "Individuals": for household_id, count in self.head_of_household_count.items(): if count == 0: - message = ( - f"Sheet: Individuals, Household with id: {household_id}, has to have a head of household" + message = f"Sheet: Individuals, Household with id: {household_id}, has to have a head of household" + invalid_rows.append( + { + "row_number": 0, + "header": "relationship_i_c", + "message": message, + } ) - invalid_rows.append({"row_number": 0, "header": "relationship_i_c", "message": message}) elif count > 1: message = f"Sheet: Individuals, There are multiple head of households for household with id: {household_id}" - invalid_rows.append({"row_number": 0, "header": "relationship_i_c", "message": message}) + invalid_rows.append( + { + "row_number": 0, + "header": "relationship_i_c", + "message": message, + } + ) invalid_doc_rows = [] invalid_ident_rows = [] @@ -743,9 +819,13 @@ def validate_file_with_template(self, wb): sheet = wb[name.capitalize()] first_row = sheet[1] - all_fields = list(fields.values()) + list(self.flex_fields[name].values()) + all_fields = list(fields.values()) + list( + self.flex_fields[name].values() + ) - required_fields = set(field["xlsx_field"] for field in all_fields if field["required"]) + required_fields = { + field["xlsx_field"] for field in all_fields if field["required"] + } column_names = {cell.value for cell in first_row} @@ -754,7 +834,11 @@ def validate_file_with_template(self, wb): if columns_difference: errors.extend( [ - {"row_number": 1, "header": col, "message": f"Missing column name {col}"} + { + "row_number": 1, + "header": col, + "message": f"Missing column name {col}", + } for col in columns_difference ] ) @@ -788,7 +872,13 @@ def validate_everything(self, xlsx_file, business_area_slug): try: wb = openpyxl.load_workbook(xlsx_file, data_only=True) except BadZipfile: - return [{"row_number": 1, "header": f"{xlsx_file.name}", "message": "Invalid .xlsx file"}] + return [ + { + "row_number": 1, + "header": f"{xlsx_file.name}", + "message": "Invalid .xlsx file", + } + ] errors.extend(self.validate_file_with_template(wb)) errors.extend(self.validate_collectors_size(wb)) errors.extend(self.validate_collectors(wb)) @@ -827,7 +917,9 @@ def collector_column_validator(header, data_dict, household_ids): collectors_ids_set = set(collectors_ids) if is_primary_collector: - household_ids_without_collectors = household_ids.difference(collectors_ids_set) + household_ids_without_collectors = household_ids.difference( + collectors_ids_set + ) errors.extend( { "row_number": 1, @@ -838,10 +930,18 @@ def collector_column_validator(header, data_dict, household_ids): ) ids_counter = Counter(collectors_ids) - erroneous_collectors_ids = [item for item, count in ids_counter.items() if count > 1] - message = "Household can contain only one primary and one alternate collector" + erroneous_collectors_ids = [ + item for item, count in ids_counter.items() if count > 1 + ] + message = ( + "Household can contain only one primary and one alternate collector" + ) errors.extend( - {"row_number": 1, "header": header, "message": f"{message}, erroneous id: {hh_id}"} + { + "row_number": 1, + "header": header, + "message": f"{message}, erroneous id: {hh_id}", + } for hh_id in erroneous_collectors_ids ) return errors @@ -858,7 +958,9 @@ def validate_collectors(self, wb): first_row = individuals_sheet[1] household_ids = { - str(int(cell.value)) if isinstance(cell.value, float) and cell.value.is_integer() else str(cell.value) + str(int(cell.value)) + if isinstance(cell.value, float) and cell.value.is_integer() + else str(cell.value) for cell in households_sheet["A"][2:] if cell.value } @@ -867,12 +969,22 @@ def validate_collectors(self, wb): alternate_collectors_data = {} for cell in first_row: if cell.value == "primary_collector_id": - primary_collectors_data = {c.row: c for c in individuals_sheet[cell.column_letter][2:] if c.value} + primary_collectors_data = { + c.row: c + for c in individuals_sheet[cell.column_letter][2:] + if c.value + } elif cell.value == "alternate_collector_id": - alternate_collectors_data = {c.row: c for c in individuals_sheet[cell.column_letter][2:] if c.value} + alternate_collectors_data = { + c.row: c + for c in individuals_sheet[cell.column_letter][2:] + if c.value + } errors.extend( - self.collector_column_validator("primary_collector_id", primary_collectors_data, household_ids) + self.collector_column_validator( + "primary_collector_id", primary_collectors_data, household_ids + ) ) errors.extend( self.collector_column_validator( @@ -944,15 +1056,23 @@ def __init__(self): self.flex_fields = self.get_flex_fields() self.all_fields = self.get_all_fields() self.expected_household_core_fields = self.get_expected_household_core_fields() - self.expected_households_flex_fields = self.get_expected_households_flex_fields() - self.expected_individuals_core_fields = self.get_expected_individuals_core_fields() - self.expected_individuals_flex_fields = self.get_expected_individuals_flex_fields() + self.expected_households_flex_fields = ( + self.get_expected_households_flex_fields() + ) + self.expected_individuals_core_fields = ( + self.get_expected_individuals_core_fields() + ) + self.expected_individuals_flex_fields = ( + self.get_expected_individuals_flex_fields() + ) self.expected_household_fields = self.get_expected_household_fields() self.expected_individuals_fields = self.get_expected_individuals_fields() def get_core_fields(self): try: - return core_fields_to_separated_dict(append_household_id=False, append_xlsx=False) + return core_fields_to_separated_dict( + append_household_id=False, append_xlsx=False + ) except Exception as e: logger.exception(e) raise @@ -973,42 +1093,62 @@ def get_all_fields(self): def get_expected_household_core_fields(self): try: - return {field["xlsx_field"] for field in self.core_fields["households"].values() if field["required"]} + return { + field["xlsx_field"] + for field in self.core_fields["households"].values() + if field["required"] + } except Exception as e: logger.exception(e) raise def get_expected_households_flex_fields(self): try: - return {field["xlsx_field"] for field in self.flex_fields["households"].values() if field["required"]} + return { + field["xlsx_field"] + for field in self.flex_fields["households"].values() + if field["required"] + } except Exception as e: logger.exception(e) raise def get_expected_individuals_core_fields(self): try: - return {field["xlsx_field"] for field in self.core_fields["individuals"].values() if field["required"]} + return { + field["xlsx_field"] + for field in self.core_fields["individuals"].values() + if field["required"] + } except Exception as e: logger.exception(e) raise def get_expected_individuals_flex_fields(self): try: - return {field["xlsx_field"] for field in self.flex_fields["individuals"].values() if field["required"]} + return { + field["xlsx_field"] + for field in self.flex_fields["individuals"].values() + if field["required"] + } except Exception as e: logger.exception(e) raise def get_expected_household_fields(self): try: - return self.expected_household_core_fields.union(self.expected_households_flex_fields) + return self.expected_household_core_fields.union( + self.expected_households_flex_fields + ) except Exception as e: logger.exception(e) raise def get_expected_individuals_fields(self): try: - return self.expected_individuals_core_fields.union(self.expected_individuals_flex_fields) + return self.expected_individuals_core_fields.union( + self.expected_individuals_flex_fields + ) except Exception as e: logger.exception(e) raise @@ -1022,7 +1162,10 @@ def standard_type_validator(self, value: str, field: str, field_type: str): int(value) return except Exception as e: - return f"Invalid value {value} of type {value_type_name} for " f"field {field} of type int" + return ( + f"Invalid value {value} of type {value_type_name} for " + f"field {field} of type int" + ) elif field_type == "STRING": # everything from Kobo is string so cannot really validate it # only check phone number @@ -1039,18 +1182,26 @@ def standard_type_validator(self, value: str, field: str, field_type: str): # to no not break import if they start returning integers if value in ("True", "False", True, False, "0", "1"): return None - return f"Invalid value {value} of type {value_type_name} for " f"field {field} of type bool" + return ( + f"Invalid value {value} of type {value_type_name} for " + f"field {field} of type bool" + ) except Exception as e: logger.exception(e) raise - def image_validator(self, value: str, field: str, attachments: List[dict], *args, **kwargs) -> Union[str, None]: + def image_validator( + self, value: str, field: str, attachments: list[dict], *args, **kwargs + ) -> Union[str, None]: try: allowed_extensions = django_core_validators.get_available_image_extensions() file_extension = value.split(".")[-1] if file_extension.lower() not in allowed_extensions: - message = f"Specified image {value} for " f"field {field} is not a valid image file" + message = ( + f"Specified image {value} for " + f"field {field} is not a valid image file" + ) return message message = f"Specified image {value} for field {field} is not in attachments" @@ -1069,7 +1220,9 @@ def image_validator(self, value: str, field: str, attachments: List[dict], *args logger.exception(e) raise - def geopoint_validator(self, value: str, field: str, *args, **kwargs) -> Union[str, None]: + def geopoint_validator( + self, value: str, field: str, *args, **kwargs + ) -> Union[str, None]: message = f"Invalid geopoint {value} for field {field}" if not value or not isinstance(value, str): @@ -1083,7 +1236,9 @@ def geopoint_validator(self, value: str, field: str, *args, **kwargs) -> Union[s return None if is_valid_geopoint else message - def date_validator(self, value: str, field: str, *args, **kwargs) -> Union[str, None]: + def date_validator( + self, value: str, field: str, *args, **kwargs + ) -> Union[str, None]: try: message = ( f"Invalid datetime/date {value} for field {field}, " @@ -1103,7 +1258,9 @@ def date_validator(self, value: str, field: str, *args, **kwargs) -> Union[str, matched = re.match(pattern_iso, value) if matched is None: - pattern_date = re.compile(r"([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))") + pattern_date = re.compile( + r"([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))" + ) matched = re.match(pattern_date, value) @@ -1112,7 +1269,9 @@ def date_validator(self, value: str, field: str, *args, **kwargs) -> Union[str, logger.exception(e) raise - def choice_validator(self, value: str, field: str, *args, **kwargs) -> Union[str, None]: + def choice_validator( + self, value: str, field: str, *args, **kwargs + ) -> Union[str, None]: try: message = f"Invalid choice {value} for field {field}" @@ -1127,7 +1286,11 @@ def choice_validator(self, value: str, field: str, *args, **kwargs) -> Union[str if choice_type == TYPE_SELECT_ONE: if custom_validate_choices_method is not None: - return None if custom_validate_choices_method(value) is True else message + return ( + None + if custom_validate_choices_method(value) is True + else message + ) is_in_choices = value in choices if is_in_choices is False: @@ -1146,7 +1309,11 @@ def choice_validator(self, value: str, field: str, *args, **kwargs) -> Union[str selected_choices = str_value.split(" ") if custom_validate_choices_method is not None: - return None if custom_validate_choices_method(str_value) is True else message + return ( + None + if custom_validate_choices_method(str_value) is True + else message + ) for choice in selected_choices: choice = choice.strip() @@ -1157,7 +1324,9 @@ def choice_validator(self, value: str, field: str, *args, **kwargs) -> Union[str logger.exception(e) raise - def _get_field_type_error(self, field: str, value: Union[str, list], attachments: list) -> Union[dict, None]: + def _get_field_type_error( + self, field: str, value: Union[str, list], attachments: list + ) -> Union[dict, None]: try: field_dict = self.all_fields.get(field) if field_dict is None: @@ -1174,7 +1343,9 @@ def _get_field_type_error(self, field: str, value: Union[str, list], attachments complex_type_fn = complex_types.get(field_type) if complex_type_fn: - message = complex_type_fn(field=field, value=value, attachments=attachments) + message = complex_type_fn( + field=field, value=value, attachments=attachments + ) if message is not None: return { "header": field, @@ -1199,8 +1370,18 @@ def validate_everything(self, submissions: list, business_area: BusinessArea): # have fun debugging this ;_; identities_numbers = { - "unhcr_id_no_i_c": {"agency": "UNHCR", "validation_data": [], "numbers": [], "issuing_countries": []}, - "scope_id_no_i_c": {"agency": "WFP", "validation_data": [], "numbers": [], "issuing_countries": []}, + "unhcr_id_no_i_c": { + "agency": "UNHCR", + "validation_data": [], + "numbers": [], + "issuing_countries": [], + }, + "scope_id_no_i_c": { + "agency": "WFP", + "validation_data": [], + "numbers": [], + "issuing_countries": [], + }, } documents_numbers = { "birth_certificate_no_i_c": { @@ -1249,13 +1430,18 @@ def validate_everything(self, submissions: list, business_area: BusinessArea): if business_area.get_sys_option("ignore_amended_kobo_submissions"): submission_meta_data["amended"] = False - submission_exists = KoboImportedSubmission.objects.filter(**submission_meta_data).exists() + submission_exists = KoboImportedSubmission.objects.filter( + **submission_meta_data + ).exists() if submission_exists is True: continue head_of_hh_counter = 0 primary_collector_counter = 0 alternate_collector_counter = 0 - expected_hh_fields = {*self.expected_household_fields, *KOBO_ONLY_HOUSEHOLD_FIELDS.keys()} + expected_hh_fields = { + *self.expected_household_fields, + *KOBO_ONLY_HOUSEHOLD_FIELDS.keys(), + } attachments = household.get("_attachments", []) for hh_field, hh_value in household.items(): expected_hh_fields.discard(hh_field) @@ -1269,27 +1455,51 @@ def validate_everything(self, submissions: list, business_area: BusinessArea): for i_field, i_value in individual.items(): if i_field in documents_numbers: if i_field == "other_id_type_i_c": - documents_numbers["other_id_type_i_c"]["names"].append(i_value) + documents_numbers["other_id_type_i_c"][ + "names" + ].append(i_value) elif i_field == "other_id_no_i_c": - documents_numbers["other_id_type_i_c"]["validation_data"].append( - {"value": i_value} - ) - documents_numbers["other_id_type_i_c"]["numbers"].append(i_value) + documents_numbers["other_id_type_i_c"][ + "validation_data" + ].append({"value": i_value}) + documents_numbers["other_id_type_i_c"][ + "numbers" + ].append(i_value) else: - documents_numbers[i_field]["validation_data"].append({"value": i_value}) - documents_numbers[i_field]["numbers"].append(i_value) - if i_field in self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.keys(): - document_key = self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.get(i_field) + documents_numbers[i_field][ + "validation_data" + ].append({"value": i_value}) + documents_numbers[i_field]["numbers"].append( + i_value + ) + if ( + i_field + in self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.keys() + ): + document_key = ( + self.DOCUMENTS_ISSUING_COUNTRIES_MAPPING.get( + i_field + ) + ) documents_dict = documents_numbers if document_key in identities_numbers.keys(): documents_dict = identities_numbers - documents_dict[document_key]["issuing_countries"].append(i_value) + documents_dict[document_key][ + "issuing_countries" + ].append(i_value) if i_field in identities_numbers: - identities_numbers[i_field]["numbers"].append(i_value) - identities_numbers[i_field]["validation_data"].append({"value": i_value}) - - if i_field == "relationship_i_c" and i_value.upper() == "HEAD": + identities_numbers[i_field]["numbers"].append( + i_value + ) + identities_numbers[i_field][ + "validation_data" + ].append({"value": i_value}) + + if ( + i_field == "relationship_i_c" + and i_value.upper() == "HEAD" + ): head_of_hh_counter += 1 if i_field == "role_i_c": role = i_value.upper() @@ -1299,14 +1509,22 @@ def validate_everything(self, submissions: list, business_area: BusinessArea): alternate_collector_counter += 1 expected_i_fields.discard(i_field) - error = self._get_field_type_error(i_field, i_value, attachments) + error = self._get_field_type_error( + i_field, i_value, attachments + ) if error: errors.append(error) - docs_and_identities_to_validate.append(current_individual_docs_and_identities) + docs_and_identities_to_validate.append( + current_individual_docs_and_identities + ) i_expected_field_errors = [ - {"header": field, "message": "Missing individual " f"required field {field}"} + { + "header": field, + "message": "Missing individual " + f"required field {field}", + } for field in expected_i_fields ] errors.extend(i_expected_field_errors) @@ -1315,40 +1533,61 @@ def validate_everything(self, submissions: list, business_area: BusinessArea): errors.append( { "header": "relationship_i_c", - "message": "Household has to have a " "head of household", + "message": "Household has to have a " + "head of household", } ) if head_of_hh_counter > 1: errors.append( { "header": "relationship_i_c", - "message": "Only one person can " "be a head of household", + "message": "Only one person can " + "be a head of household", } ) if primary_collector_counter == 0: errors.append( - {"header": "role_i_c", "message": "Household must have a " "primary collector"} + { + "header": "role_i_c", + "message": "Household must have a " + "primary collector", + } ) if primary_collector_counter > 1: errors.append( - {"header": "role_i_c", "message": "Only one person can " "be a primary collector"} + { + "header": "role_i_c", + "message": "Only one person can " + "be a primary collector", + } ) if alternate_collector_counter > 1: errors.append( - {"header": "role_i_c", "message": "Only one person can " "be a alternate collector"} + { + "header": "role_i_c", + "message": "Only one person can " + "be a alternate collector", + } ) else: - error = self._get_field_type_error(hh_field, hh_value, attachments) + error = self._get_field_type_error( + hh_field, hh_value, attachments + ) if error: errors.append(error) hh_expected_field_errors = [ - {"header": field, "message": f"Missing household required field {field}"} + { + "header": field, + "message": f"Missing household required field {field}", + } for field in expected_hh_fields ] errors.extend(hh_expected_field_errors) document_errors = self.documents_validator(documents_numbers, is_xlsx=False) - identities_errors = self.identity_validator(identities_numbers, is_xlsx=False) + identities_errors = self.identity_validator( + identities_numbers, is_xlsx=False + ) return [*errors, *document_errors, *identities_errors] except Exception as e: diff --git a/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py b/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py index 56858fc3f2..c5ef9042bb 100644 --- a/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py +++ b/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py @@ -43,9 +43,10 @@ def get_beneficiaries(cls, report: DashboardReport): individual_count_fields = cls._get_all_individual_count_fields() valid_payment_records = cls._get_payment_records_for_report(report) - instances, valid_payment_records_in_instance_filter_key = cls._get_business_areas_or_programs( - report, valid_payment_records - ) + ( + instances, + valid_payment_records_in_instance_filter_key, + ) = cls._get_business_areas_or_programs(report, valid_payment_records) for instance in instances: valid_payment_records_in_instance = valid_payment_records.filter( @@ -58,17 +59,29 @@ def get_beneficiaries(cls, report: DashboardReport): valid_households, individual_count_fields, ) - instance["total_children"] = cls._reduce_aggregate(households_aggr, children_count_fields) - instance["total_individuals"] = cls._reduce_aggregate(households_aggr, individual_count_fields) + instance["total_children"] = cls._reduce_aggregate( + households_aggr, children_count_fields + ) + instance["total_individuals"] = cls._reduce_aggregate( + households_aggr, individual_count_fields + ) instance["num_households"] = valid_households.count() # get total distincts (can't use the sum of column since some households might belong to multiple programs) - households = Household.objects.filter(payment_records__in=valid_payment_records).distinct() - households_aggr = cls._aggregate_instances_sum(households, individual_count_fields) + households = Household.objects.filter( + payment_records__in=valid_payment_records + ).distinct() + households_aggr = cls._aggregate_instances_sum( + households, individual_count_fields + ) totals = { "num_households": households.count(), - "total_individuals": cls._reduce_aggregate(households_aggr, individual_count_fields), - "total_children": cls._reduce_aggregate(households_aggr, children_count_fields), + "total_individuals": cls._reduce_aggregate( + households_aggr, individual_count_fields + ), + "total_children": cls._reduce_aggregate( + households_aggr, children_count_fields + ), } # return instances for rows and totals row info return instances, totals @@ -78,23 +91,29 @@ def get_individuals(cls, report: DashboardReport): valid_payment_records = cls._get_payment_records_for_report(report) individual_count_fields = cls._get_all_with_disabled_individual_count_fields() - instances, valid_payment_records_in_instance_filter_key = cls._get_business_areas_or_programs( - report, valid_payment_records - ) + ( + instances, + valid_payment_records_in_instance_filter_key, + ) = cls._get_business_areas_or_programs(report, valid_payment_records) for instance in instances: valid_payment_records_in_instance = valid_payment_records.filter( **{valid_payment_records_in_instance_filter_key: instance["id"]} ) households_aggr = cls._aggregate_instances_sum( - Household.objects.filter(payment_records__in=valid_payment_records_in_instance).distinct(), + Household.objects.filter( + payment_records__in=valid_payment_records_in_instance + ).distinct(), individual_count_fields, ) instance.update(households_aggr) # get total distincts (can't use the sum of column since some households might belong to multiple programs) households_aggr = cls._aggregate_instances_sum( - Household.objects.filter(payment_records__in=valid_payment_records).distinct(), individual_count_fields + Household.objects.filter( + payment_records__in=valid_payment_records + ).distinct(), + individual_count_fields, ) # return instances for rows and totals row info return instances, households_aggr @@ -103,9 +122,10 @@ def get_individuals(cls, report: DashboardReport): def get_volumes_by_delivery(cls, report: DashboardReport): valid_payment_records = cls._get_payment_records_for_report(report) - instances, valid_payment_records_in_instance_filter_key = cls._get_business_areas_or_programs( - report, valid_payment_records - ) + ( + instances, + valid_payment_records_in_instance_filter_key, + ) = cls._get_business_areas_or_programs(report, valid_payment_records) def aggregate_by_delivery_type(payment_records): result = dict() @@ -122,7 +142,9 @@ def aggregate_by_delivery_type(payment_records): valid_payment_records_in_instance = valid_payment_records.filter( **{valid_payment_records_in_instance_filter_key: instance["id"]} ) - aggregated_by_delivery_type = aggregate_by_delivery_type(valid_payment_records_in_instance) + aggregated_by_delivery_type = aggregate_by_delivery_type( + valid_payment_records_in_instance + ) instance.update(aggregated_by_delivery_type) totals = aggregate_by_delivery_type(valid_payment_records) @@ -168,12 +190,14 @@ def get_annotation(index_number: int, cash=True): .distinct() .annotate( successful_payments=Count( - "cash_plans__payment_records", filter=Q(cash_plans__payment_records__delivered_quantity_usd__gt=0) + "cash_plans__payment_records", + filter=Q(cash_plans__payment_records__delivered_quantity_usd__gt=0), ) ) .annotate( unsuccessful_payments=Count( - "cash_plans__payment_records", filter=Q(cash_plans__payment_records__delivered_quantity_usd=0) + "cash_plans__payment_records", + filter=Q(cash_plans__payment_records__delivered_quantity_usd=0), ) ) ) @@ -198,19 +222,32 @@ def get_grievances(cls, report: DashboardReport): days_30_from_now = datetime.date.today() - datetime.timedelta(days=30) days_60_from_now = datetime.date.today() - datetime.timedelta(days=60) - feedback_categories = [GrievanceTicket.CATEGORY_POSITIVE_FEEDBACK, GrievanceTicket.CATEGORY_NEGATIVE_FEEDBACK] + feedback_categories = [ + GrievanceTicket.CATEGORY_POSITIVE_FEEDBACK, + GrievanceTicket.CATEGORY_NEGATIVE_FEEDBACK, + ] status_closed_query = Q(tickets__status=GrievanceTicket.STATUS_CLOSED) status_open_query = ~Q(tickets__status=GrievanceTicket.STATUS_CLOSED) instances = ( BusinessArea.objects.filter(tickets__in=valid_grievances) .distinct() - .annotate(total_grievances=Count("tickets", filter=~Q(tickets__category__in=feedback_categories))) - .annotate(total_feedback=Count("tickets", filter=Q(tickets__category__in=feedback_categories))) + .annotate( + total_grievances=Count( + "tickets", filter=~Q(tickets__category__in=feedback_categories) + ) + ) + .annotate( + total_feedback=Count( + "tickets", filter=Q(tickets__category__in=feedback_categories) + ) + ) .annotate(total_resolved=Count("tickets", filter=status_closed_query)) .annotate( total_unresolved_lte_30=Count( "tickets", - filter=Q(status_open_query, tickets__created_at__gte=days_30_from_now), + filter=Q( + status_open_query, tickets__created_at__gte=days_30_from_now + ), ) ) .annotate( @@ -226,7 +263,9 @@ def get_grievances(cls, report: DashboardReport): .annotate( total_unresolved_60=Count( "tickets", - filter=Q(status_open_query, tickets__created_at__lt=days_60_from_now), + filter=Q( + status_open_query, tickets__created_at__lt=days_60_from_now + ), ) ) .annotate( @@ -262,15 +301,23 @@ def get_payment_verifications(cls, report: DashboardReport): if not cls._is_report_global(report): filter_vars["payment_record__business_area"] = report.business_area valid_verifications = PaymentVerification.objects.filter(**filter_vars) - path_to_payment_record_verifications = "cash_plans__verifications__payment_record_verifications" + path_to_payment_record_verifications = ( + "cash_plans__verifications__payment_record_verifications" + ) def format_status_filter(status): return Q(**{f"{path_to_payment_record_verifications}__status": status}) programs = ( - Program.objects.filter(**{f"{path_to_payment_record_verifications}__in": valid_verifications}) + Program.objects.filter( + **{f"{path_to_payment_record_verifications}__in": valid_verifications} + ) .distinct() - .annotate(total_cash_plan_verifications=Count("cash_plans__verifications", distinct=True)) + .annotate( + total_cash_plan_verifications=Count( + "cash_plans__verifications", distinct=True + ) + ) .annotate( total_households=Count( f"{path_to_payment_record_verifications}__payment_record__household", @@ -294,7 +341,11 @@ def format_status_filter(status): ), ) ) - .annotate(total_verifications_done=Count(path_to_payment_record_verifications, distinct=True)) + .annotate( + total_verifications_done=Count( + path_to_payment_record_verifications, distinct=True + ) + ) .annotate( received=Count( path_to_payment_record_verifications, @@ -305,14 +356,18 @@ def format_status_filter(status): .annotate( not_received=Count( path_to_payment_record_verifications, - filter=format_status_filter(PaymentVerification.STATUS_NOT_RECEIVED), + filter=format_status_filter( + PaymentVerification.STATUS_NOT_RECEIVED + ), distinct=True, ) ) .annotate( received_with_issues=Count( path_to_payment_record_verifications, - filter=format_status_filter(PaymentVerification.STATUS_RECEIVED_WITH_ISSUES), + filter=format_status_filter( + PaymentVerification.STATUS_RECEIVED_WITH_ISSUES + ), distinct=True, ) ) @@ -332,18 +387,23 @@ def get_total_transferred_by_country(cls, report: DashboardReport): # only for HQ dashboard business_areas = ( BusinessArea.objects.filter( - paymentrecord__delivered_quantity_usd__gt=0, paymentrecord__delivery_date__year=report.year + paymentrecord__delivered_quantity_usd__gt=0, + paymentrecord__delivery_date__year=report.year, ) .annotate( total_cash=Sum( "paymentrecord__delivered_quantity_usd", - filter=Q(paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_CASH), + filter=Q( + paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_CASH + ), ) ) .annotate( total_voucher=Sum( "paymentrecord__delivered_quantity_usd", - filter=Q(paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_VOUCHER), + filter=Q( + paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_VOUCHER + ), ) ) ) @@ -362,26 +422,38 @@ def get_total_transferred_by_admin_area(cls, report: DashboardReport): household__payment_records__in=valid_payment_records, ) .distinct() - .annotate(total_transferred=Sum("household__payment_records__delivered_quantity_usd")) + .annotate( + total_transferred=Sum( + "household__payment_records__delivered_quantity_usd" + ) + ) .annotate(num_households=Count("household", distinct=True)) ) totals = admin_areas.aggregate(Sum("total_transferred"), Sum("num_households")) - admin_areas = admin_areas.values("id", "title", "p_code", "num_households", "total_transferred") + admin_areas = admin_areas.values( + "id", "title", "p_code", "num_households", "total_transferred" + ) individual_count_fields = cls._get_all_individual_count_fields() for admin_area in admin_areas: - valid_payment_records_in_instance = valid_payment_records.filter(household__admin_area=admin_area["id"]) + valid_payment_records_in_instance = valid_payment_records.filter( + household__admin_area=admin_area["id"] + ) households_aggr = cls._aggregate_instances_sum( - Household.objects.filter(payment_records__in=valid_payment_records_in_instance).distinct(), + Household.objects.filter( + payment_records__in=valid_payment_records_in_instance + ).distinct(), individual_count_fields, ) admin_area.update(households_aggr) totals.update( cls._aggregate_instances_sum( - Household.objects.filter(payment_records__in=valid_payment_records).distinct(), + Household.objects.filter( + payment_records__in=valid_payment_records + ).distinct(), individual_count_fields, ) ) @@ -434,13 +506,23 @@ def format_programs_row(cls, instance: Program, *args) -> tuple: ) months = cls.get_all_months() for month in months: - result += (getattr(instance, f"{month}_cash", 0), getattr(instance, f"{month}_voucher", 0)) + result += ( + getattr(instance, f"{month}_cash", 0), + getattr(instance, f"{month}_voucher", 0), + ) return result @staticmethod - def format_total_transferred_by_country(instance: BusinessArea, is_totals: bool, *args) -> tuple: + def format_total_transferred_by_country( + instance: BusinessArea, is_totals: bool, *args + ) -> tuple: if is_totals: - return "", "Total", instance.get("total_cash__sum") or 0, instance.get("total_voucher__sum") or 0 + return ( + "", + "Total", + instance.get("total_cash__sum") or 0, + instance.get("total_voucher__sum") or 0, + ) else: return ( instance.code, @@ -489,7 +571,13 @@ def format_payment_verifications_row(instance: Program, *args): instance.name, instance.total_cash_plan_verifications, instance.total_households, - round((instance.total_verifications_done / instance.all_possible_payment_records) * 100) + round( + ( + instance.total_verifications_done + / instance.all_possible_payment_records + ) + * 100 + ) if instance.total_payment_records else 0, instance.received, @@ -499,10 +587,14 @@ def format_payment_verifications_row(instance: Program, *args): ) @classmethod - def format_total_transferred_by_admin_area_row(cls, instance, is_totals: bool, *args): + def format_total_transferred_by_admin_area_row( + cls, instance, is_totals: bool, *args + ): fields_list = cls._get_all_individual_count_fields() - shared_cells = tuple([instance.get(f"{field_name}__sum", 0) for field_name in fields_list]) + shared_cells = tuple( + instance.get(f"{field_name}__sum", 0) for field_name in fields_list + ) if is_totals: return ( @@ -537,7 +629,9 @@ def _format_filters( if date_path: filter_vars.update({f"{date_path}__year": report.year}) if admin_area_path and report.admin_area: - filter_vars.update({admin_area_path: report.admin_area, f"{admin_area_path}__level": 2}) + filter_vars.update( + {admin_area_path: report.admin_area, f"{admin_area_path}__level": 2} + ) if program_path and report.program: filter_vars.update({program_path: report.program}) if not cls._is_report_global(report) and business_area_path: @@ -564,11 +658,15 @@ def _get_payment_records_for_report(self, report): def _get_business_areas_or_programs(cls, report, valid_payment_records): if cls._is_report_global(report): business_area_code_path = "code" - instances = BusinessArea.objects.filter(paymentrecord__in=valid_payment_records) + instances = BusinessArea.objects.filter( + paymentrecord__in=valid_payment_records + ) valid_payment_records_in_instance_filter_key = "business_area" else: business_area_code_path = "business_area__code" - instances = Program.objects.filter(cash_plans__payment_records__in=valid_payment_records) + instances = Program.objects.filter( + cash_plans__payment_records__in=valid_payment_records + ) valid_payment_records_in_instance_filter_key = "cash_plan__program" instances = ( @@ -586,7 +684,9 @@ def _aggregate_instances_sum(instances, field_list: list) -> dict: @staticmethod def _reduce_aggregate(aggregate: dict, fields_list: list) -> int: return functools.reduce( - lambda a, b: a + aggregate[f"{b}__sum"] if aggregate[f"{b}__sum"] else a, fields_list, 0 + lambda a, b: a + aggregate[f"{b}__sum"] if aggregate[f"{b}__sum"] else a, + fields_list, + 0, ) @staticmethod @@ -710,7 +810,12 @@ class GenerateDashboardReportService: ), }, DashboardReport.TOTAL_TRANSFERRED_BY_COUNTRY: { - HQ: ("business area", "country", "actual cash transferred", "actual voucher transferred"), + HQ: ( + "business area", + "country", + "actual cash transferred", + "actual voucher transferred", + ), COUNTRY: (), SHARED: (), }, @@ -745,7 +850,7 @@ class GenerateDashboardReportService: "business area", "programme", ), - SHARED: tuple([choice[1] for choice in PaymentRecord.DELIVERY_TYPE_CHOICE]), + SHARED: tuple(choice[1] for choice in PaymentRecord.DELIVERY_TYPE_CHOICE), }, DashboardReport.INDIVIDUALS_REACHED: { HQ: ( @@ -814,9 +919,18 @@ class GenerateDashboardReportService: GenerateDashboardReportContentHelpers.format_total_transferred_by_admin_area_row, ), } - META_HEADERS = ("report type", "creation date", "created by", "business area", "report year") + META_HEADERS = ( + "report type", + "creation date", + "created by", + "business area", + "report year", + ) REMOVE_EMPTY_COLUMNS = { - DashboardReport.VOLUME_BY_DELIVERY_MECHANISM: (3, len(PaymentRecord.DELIVERY_TYPE_CHOICE) + 3) + DashboardReport.VOLUME_BY_DELIVERY_MECHANISM: ( + 3, + len(PaymentRecord.DELIVERY_TYPE_CHOICE) + 3, + ) } META_SHEET = "Meta data" MAX_COL_WIDTH = 75 @@ -825,7 +939,9 @@ def __init__(self, report: DashboardReport): self.report = report self.report_types = report.report_type self.business_area = report.business_area - self.hq_or_country = self.HQ if report.business_area.slug == "global" else self.COUNTRY + self.hq_or_country = ( + self.HQ if report.business_area.slug == "global" else self.COUNTRY + ) def _create_workbook(self) -> openpyxl.Workbook: wb = openpyxl.Workbook() @@ -847,7 +963,10 @@ def _format_meta_tab(self): self.ws_meta.append(info_row) def _add_headers(self, active_sheet, report_type) -> int: - headers_row = self.HEADERS[report_type][self.hq_or_country] + self.HEADERS[report_type][self.SHARED] + headers_row = ( + self.HEADERS[report_type][self.hq_or_country] + + self.HEADERS[report_type][self.SHARED] + ) headers_row = self._stringify_all_values(headers_row) active_sheet.append(headers_row) return len(headers_row) @@ -884,7 +1003,10 @@ def generate_workbook(self) -> openpyxl.Workbook: remove_empty_columns_values = self.REMOVE_EMPTY_COLUMNS.get(report_type) if remove_empty_columns_values: self._remove_empty_columns( - active_sheet, number_of_rows + 2, remove_empty_columns_values[0], remove_empty_columns_values[1] + active_sheet, + number_of_rows + 2, + remove_empty_columns_values[0], + remove_empty_columns_values[1], ) self._adjust_column_width_from_col(active_sheet, 1, number_of_columns, 1) @@ -894,7 +1016,9 @@ def generate_report(self): try: self.generate_workbook() file_name = ( - self._report_type_to_str(self.report_types[0]) if len(self.report_types) == 1 else "Multiple reports" + self._report_type_to_str(self.report_types[0]) + if len(self.report_types) == 1 + else "Multiple reports" ) self.report.file.save( f"{file_name}-{self._format_date(self.report.created_at)}.xlsx", @@ -932,7 +1056,9 @@ def _send_email(self): @staticmethod def _adjust_column_width_from_col(ws, min_col, max_col, min_row): column_widths = [] - for i, col in enumerate(ws.iter_cols(min_col=min_col, max_col=max_col, min_row=min_row)): + for i, col in enumerate( + ws.iter_cols(min_col=min_col, max_col=max_col, min_row=min_row) + ): for cell in col: value = cell.value if value is not None: @@ -972,7 +1098,9 @@ def _report_type_to_str(report_type) -> str: return label[:31] def _report_types_to_joined_str(self) -> str: - return ", ".join([self._report_type_to_str(report_type) for report_type in self.report_types]) + return ", ".join( + [self._report_type_to_str(report_type) for report_type in self.report_types] + ) @staticmethod def _stringify_all_values(row: tuple) -> tuple: @@ -991,7 +1119,9 @@ def _format_date(date) -> str: @staticmethod def _format_user_name(user: User) -> str: return ( - f"{user.first_name} {user.last_name}" if user.first_name or user.last_name else user.email or user.username + f"{user.first_name} {user.last_name}" + if user.first_name or user.last_name + else user.email or user.username ) @staticmethod diff --git a/backend/hct_mis_api/apps/reporting/models.py b/backend/hct_mis_api/apps/reporting/models.py index fed8f6d914..949ddbba34 100644 --- a/backend/hct_mis_api/apps/reporting/models.py +++ b/backend/hct_mis_api/apps/reporting/models.py @@ -1,7 +1,7 @@ from datetime import datetime from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from hct_mis_api.apps.account.models import ChoiceArrayField from hct_mis_api.apps.utils.models import TimeStampedUUIDModel @@ -11,7 +11,11 @@ class Report(TimeStampedUUIDModel): IN_PROGRESS = 1 COMPLETED = 2 FAILED = 3 - STATUSES = ((IN_PROGRESS, _("Processing")), (COMPLETED, _("Generated")), (FAILED, _("Failed"))) + STATUSES = ( + (IN_PROGRESS, _("Processing")), + (COMPLETED, _("Generated")), + (FAILED, _("Failed")), + ) INDIVIDUALS = 1 HOUSEHOLD_DEMOGRAPHICS = 2 @@ -32,9 +36,13 @@ class Report(TimeStampedUUIDModel): (INDIVIDUALS_AND_PAYMENT, _("Individuals & Payment")), ) - business_area = models.ForeignKey("core.BusinessArea", related_name="reports", on_delete=models.CASCADE) + business_area = models.ForeignKey( + "core.BusinessArea", related_name="reports", on_delete=models.CASCADE + ) file = models.FileField(blank=True, null=True) - created_by = models.ForeignKey("account.User", related_name="reports", on_delete=models.CASCADE) + created_by = models.ForeignKey( + "account.User", related_name="reports", on_delete=models.CASCADE + ) status = models.IntegerField(choices=STATUSES, default=IN_PROGRESS) report_type = models.IntegerField(choices=REPORT_TYPES) date_from = models.DateField() @@ -42,10 +50,18 @@ class Report(TimeStampedUUIDModel): number_of_records = models.IntegerField(blank=True, null=True) # any of these are optional and their requirements will depend on report type program = models.ForeignKey( - "program.Program", on_delete=models.CASCADE, blank=True, null=True, related_name="reports" + "program.Program", + on_delete=models.CASCADE, + blank=True, + null=True, + related_name="reports", + ) + admin_area = models.ManyToManyField( + "core.AdminArea", blank=True, related_name="reports" + ) + admin_area_new = models.ManyToManyField( + "geo.Area", blank=True, related_name="reports" ) - admin_area = models.ManyToManyField("core.AdminArea", blank=True, related_name="reports") - admin_area_new = models.ManyToManyField("geo.Area", blank=True, related_name="reports") def __str__(self): return f"[{self.report_type}] Report for [{self.business_area}]" @@ -58,7 +74,11 @@ class DashboardReport(TimeStampedUUIDModel): IN_PROGRESS = 1 COMPLETED = 2 FAILED = 3 - STATUSES = ((IN_PROGRESS, _("Processing")), (COMPLETED, _("Generated")), (FAILED, _("Failed"))) + STATUSES = ( + (IN_PROGRESS, _("Processing")), + (COMPLETED, _("Generated")), + (FAILED, _("Failed")), + ) TOTAL_TRANSFERRED_BY_COUNTRY = "TOTAL_TRANSFERRED_BY_COUNTRY" TOTAL_TRANSFERRED_BY_ADMIN_AREA = "TOTAL_TRANSFERRED_BY_ADMIN_AREA" @@ -79,20 +99,38 @@ class DashboardReport(TimeStampedUUIDModel): (PAYMENT_VERIFICATION, _("Payment verification")), ) - business_area = models.ForeignKey("core.BusinessArea", related_name="dashboard_reports", on_delete=models.CASCADE) + business_area = models.ForeignKey( + "core.BusinessArea", related_name="dashboard_reports", on_delete=models.CASCADE + ) file = models.FileField(blank=True, null=True) - created_by = models.ForeignKey("account.User", related_name="dashboard_reports", on_delete=models.CASCADE) + created_by = models.ForeignKey( + "account.User", related_name="dashboard_reports", on_delete=models.CASCADE + ) status = models.PositiveSmallIntegerField(choices=STATUSES, default=IN_PROGRESS) - report_type = ChoiceArrayField(models.CharField(choices=REPORT_TYPES, max_length=255)) + report_type = ChoiceArrayField( + models.CharField(choices=REPORT_TYPES, max_length=255) + ) # filters year = models.PositiveSmallIntegerField(default=datetime.now().year) program = models.ForeignKey( - "program.Program", on_delete=models.CASCADE, blank=True, null=True, related_name="dashboard_reports" + "program.Program", + on_delete=models.CASCADE, + blank=True, + null=True, + related_name="dashboard_reports", ) admin_area = models.ForeignKey( - "core.AdminArea", on_delete=models.CASCADE, blank=True, null=True, related_name="dashboard_reports" + "core.AdminArea", + on_delete=models.CASCADE, + blank=True, + null=True, + related_name="dashboard_reports", ) admin_area_new = models.ForeignKey( - "geo.Area", on_delete=models.CASCADE, blank=True, null=True, related_name="dashboard_reports" + "geo.Area", + on_delete=models.CASCADE, + blank=True, + null=True, + related_name="dashboard_reports", ) diff --git a/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py b/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py index a3c099498e..68d3a1fe0e 100644 --- a/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py +++ b/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py @@ -28,7 +28,9 @@ class LoadSanctionListXMLTask: - SANCTION_LIST_XML_URL = "https://scsanctions.un.org/resources/xml/en/consolidated.xml" + SANCTION_LIST_XML_URL = ( + "https://scsanctions.un.org/resources/xml/en/consolidated.xml" + ) INDIVIDUAL_TAG_PATH = "INDIVIDUALS/INDIVIDUAL" @@ -67,11 +69,16 @@ def _get_text_from_path(individual_tag: ET.Element, path: str) -> str: return tag.text @staticmethod - def _get_designation(individual_tag: ET.Element, *args, **kwargs) -> Union[str, None]: + def _get_designation( + individual_tag: ET.Element, *args, **kwargs + ) -> Union[str, None]: designation_tag_name = "DESIGNATION" designation_tag = individual_tag.find(designation_tag_name) if isinstance(designation_tag, ET.Element): - designations = [value_tag.text for value_tag in individual_tag.find(designation_tag_name)] + designations = [ + value_tag.text + for value_tag in individual_tag.find(designation_tag_name) + ] return " ".join(designations) return "" @@ -81,7 +88,7 @@ def _get_date_of_births( individual: SanctionListIndividual, *args, **kwargs, - ) -> Set[SanctionListIndividualDateOfBirth]: + ) -> set[SanctionListIndividualDateOfBirth]: date_of_birth_tags = individual_tag.findall("INDIVIDUAL_DATE_OF_BIRTH") dates_of_birth = set() for date_of_birth_tag in date_of_birth_tags: @@ -103,10 +110,14 @@ def _get_date_of_births( elif isinstance(note_tag, ET.Element) and note_tag.text: value = note_tag.text try: - parsed_date = dateutil.parser.parse(value, default=default_datetime) + parsed_date = dateutil.parser.parse( + value, default=default_datetime + ) dates_of_birth.add( SanctionListIndividualDateOfBirth( - individual=self._get_individual_from_db_or_file(individual), + individual=self._get_individual_from_db_or_file( + individual + ), date=parsed_date.date(), ) ) @@ -132,7 +143,7 @@ def _get_alias_names( individual: SanctionListIndividual, *args, **kwargs, - ) -> Set[SanctionListIndividualAliasName]: + ) -> set[SanctionListIndividualAliasName]: path = "INDIVIDUAL_ALIAS" alias_names_tags = individual_tag.findall(path) @@ -140,10 +151,17 @@ def _get_alias_names( for tag in alias_names_tags: quality_tag = tag.find("QUALITY") alias_name_tag = tag.find("ALIAS_NAME") - is_valid_quality_tag = isinstance(quality_tag, ET.Element) and quality_tag.text - is_valid_name_tag = isinstance(alias_name_tag, ET.Element) and alias_name_tag.text + is_valid_quality_tag = ( + isinstance(quality_tag, ET.Element) and quality_tag.text + ) + is_valid_name_tag = ( + isinstance(alias_name_tag, ET.Element) and alias_name_tag.text + ) if is_valid_quality_tag and is_valid_name_tag: - if quality_tag.text.lower() in ("good", "a.k.a") and alias_name_tag.text: + if ( + quality_tag.text.lower() in ("good", "a.k.a") + and alias_name_tag.text + ): aliases.add( SanctionListIndividualAliasName( individual=self._get_individual_from_db_or_file(individual), @@ -154,7 +172,9 @@ def _get_alias_names( return aliases @staticmethod - def _get_country_field(individual_tag: ET.Element, path: str, *args, **kwargs) -> Union[str, None, Set]: + def _get_country_field( + individual_tag: ET.Element, path: str, *args, **kwargs + ) -> Union[str, None, set]: tags = individual_tag.findall(path) countries = set() @@ -176,18 +196,18 @@ def _get_countries( individual: SanctionListIndividual, *args, **kwargs, - ) -> Set[SanctionListIndividualCountries]: + ) -> set[SanctionListIndividualCountries]: path = "INDIVIDUAL_ADDRESS/COUNTRY" result = self._get_country_field(individual_tag, path) if result: - return set( + return { SanctionListIndividualCountries( individual=self._get_individual_from_db_or_file(individual), country=alpha2, country_new=geo_models.Country.objects.get(iso_code2=alpha2), ) for alpha2 in result - ) + } return set() def _get_country_of_birth(self, individual_tag: ET.Element, *args, **kwargs) -> str: @@ -204,18 +224,18 @@ def _get_nationalities( individual: SanctionListIndividual, *args, **kwargs, - ) -> Set[SanctionListIndividualNationalities]: + ) -> set[SanctionListIndividualNationalities]: path = "NATIONALITY/VALUE" result = self._get_country_field(individual_tag, path) if result: - return set( + return { SanctionListIndividualNationalities( individual=self._get_individual_from_db_or_file(individual), nationality=alpha2, nationality_new=geo_models.Country.objects.get(iso_code2=alpha2), ) for alpha2 in result - ) + } return set() def _get_documents( @@ -224,7 +244,7 @@ def _get_documents( individual: SanctionListIndividual, *args, **kwargs, - ) -> Set[SanctionListIndividualDocument]: + ) -> set[SanctionListIndividualDocument]: document_tags = individual_tag.findall("INDIVIDUAL_DOCUMENT") documents = set() @@ -250,7 +270,9 @@ def _get_documents( "note", document_tag.find("NOTE"), ) - if isinstance(document_number_tag, ET.Element) and isinstance(type_of_document_tag, ET.Element): + if isinstance(document_number_tag, ET.Element) and isinstance( + type_of_document_tag, ET.Element + ): document = SanctionListIndividualDocument( individual=self._get_individual_from_db_or_file(individual), type_of_document=type_of_document_tag.text, @@ -263,7 +285,7 @@ def _get_documents( return documents - def _get_individual_data(self, individual_tag: ET.Element) -> Dict: + def _get_individual_data(self, individual_tag: ET.Element) -> dict: individual_data_dict = { "individual": SanctionListIndividual(), "documents": None, @@ -279,9 +301,14 @@ def _get_individual_data(self, individual_tag: ET.Element) -> Dict: value = path_or_func(individual_tag, individual) else: raw_value = self._get_text_from_path(individual_tag, path_or_func) - value = self._cast_field_value_to_correct_type(SanctionListIndividual, field_name, raw_value) + value = self._cast_field_value_to_correct_type( + SanctionListIndividual, field_name, raw_value + ) - if hasattr(individual, field_name) and field_name not in individual_data_dict.keys(): + if ( + hasattr(individual, field_name) + and field_name not in individual_data_dict.keys() + ): setattr(individual, field_name, value) elif field_name in individual_data_dict.keys(): individual_data_dict[field_name] = value @@ -289,7 +316,7 @@ def _get_individual_data(self, individual_tag: ET.Element) -> Dict: return individual_data_dict @cached_property - def _get_individual_fields(self) -> List[str]: + def _get_individual_fields(self) -> list[str]: excluded_fields = { "id", "history", @@ -303,14 +330,20 @@ def _get_individual_fields(self) -> List[str]: # "country_of_birth", } all_fields = SanctionListIndividual._meta.get_fields(include_parents=False) - return [field.name for field in all_fields if field.name not in excluded_fields and field.concrete is True] + return [ + field.name + for field in all_fields + if field.name not in excluded_fields and field.concrete is True + ] @staticmethod def _get_individual_from_db_or_file( individual: SanctionListIndividual, ) -> SanctionListIndividual: try: - return SanctionListIndividual.all_objects.get(reference_number=individual.reference_number) + return SanctionListIndividual.all_objects.get( + reference_number=individual.reference_number + ) except ObjectDoesNotExist: return individual @@ -318,45 +351,63 @@ def _get_individual_from_db_or_file( def _get_all_individuals_from_db(self) -> QuerySet: return SanctionListIndividual.all_objects.defer("documents") - def _get_existing_individuals(self, individuals_reference_numbers: Set[str]) -> QuerySet: - return self._get_all_individuals_from_db.filter(reference_number__in=individuals_reference_numbers) + def _get_existing_individuals( + self, individuals_reference_numbers: set[str] + ) -> QuerySet: + return self._get_all_individuals_from_db.filter( + reference_number__in=individuals_reference_numbers + ) def _get_individuals_to_create( self, individuals_from_file: Iterable[SanctionListIndividual] - ) -> Set[SanctionListIndividual]: - individuals_reference_numbers = self._get_reference_numbers_list(individuals_from_file) + ) -> set[SanctionListIndividual]: + individuals_reference_numbers = self._get_reference_numbers_list( + individuals_from_file + ) return { individual for individual in individuals_from_file if individual.reference_number - not in self._get_existing_individuals(individuals_reference_numbers).values_list( - "reference_number", flat=True - ) + not in self._get_existing_individuals( + individuals_reference_numbers + ).values_list("reference_number", flat=True) } def _get_individuals_to_update( self, individuals_from_file: Iterable[SanctionListIndividual] - ) -> Set[SanctionListIndividual]: + ) -> set[SanctionListIndividual]: individuals_to_update = set() - individuals_reference_numbers = self._get_reference_numbers_list(individuals_from_file) + individuals_reference_numbers = self._get_reference_numbers_list( + individuals_from_file + ) for individual in individuals_from_file: - new_individual_data_dict = model_to_dict(individual, fields=self._get_individual_fields) + new_individual_data_dict = model_to_dict( + individual, fields=self._get_individual_fields + ) old_individual = ( self._get_existing_individuals(individuals_reference_numbers) .filter(reference_number=new_individual_data_dict["reference_number"]) .first() ) if old_individual: - old_individual_data_dict = model_to_dict(old_individual, fields=self._get_individual_fields) + old_individual_data_dict = model_to_dict( + old_individual, fields=self._get_individual_fields + ) if new_individual_data_dict != old_individual_data_dict: - obj = SanctionListIndividual.all_objects.get(reference_number=individual.reference_number) + obj = SanctionListIndividual.all_objects.get( + reference_number=individual.reference_number + ) individual.id = obj.id individuals_to_update.add(individual) return individuals_to_update - def _get_individuals_to_deactivate(self, individuals_from_file: Iterable[SanctionListIndividual]) -> List[str]: - individuals_reference_numbers = self._get_reference_numbers_list(individuals_from_file) + def _get_individuals_to_deactivate( + self, individuals_from_file: Iterable[SanctionListIndividual] + ) -> list[str]: + individuals_reference_numbers = self._get_reference_numbers_list( + individuals_from_file + ) ids = self._get_all_individuals_from_db.difference( self._get_existing_individuals(individuals_reference_numbers) ).values_list("id", flat=True) @@ -366,7 +417,7 @@ def _get_individuals_to_deactivate(self, individuals_from_file: Iterable[Sanctio @staticmethod def _get_reference_numbers_list( individuals_from_file: Iterable[SanctionListIndividual], - ) -> Set[str]: + ) -> set[str]: return {i.reference_number for i in individuals_from_file} @staticmethod @@ -454,7 +505,9 @@ def execute(self): self._get_individual_fields, 1000, ) - individuals_ids_to_delete = self._get_individuals_to_deactivate(individuals_from_file) + individuals_ids_to_delete = self._get_individuals_to_deactivate( + individuals_from_file + ) SanctionListIndividual.objects.filter(id__in=individuals_ids_to_delete).delete() # SanctionListIndividualDocument @@ -472,7 +525,9 @@ def execute(self): note=single_doc.note, ) if created is True: - individuals_to_check_against_sanction_list.append(doc_obj.individual) + individuals_to_check_against_sanction_list.append( + doc_obj.individual + ) # SanctionListIndividualCountries SanctionListIndividualCountries.objects.all().delete() @@ -483,7 +538,9 @@ def execute(self): SanctionListIndividualNationalities.objects.all().delete() if nationalities_from_file: - SanctionListIndividualNationalities.objects.bulk_create(nationalities_from_file) + SanctionListIndividualNationalities.objects.bulk_create( + nationalities_from_file + ) # SanctionListIndividualAliasName SanctionListIndividualAliasName.objects.all().delete() @@ -493,15 +550,22 @@ def execute(self): # SanctionListIndividualDateOfBirth if dob_from_file: for single_dob in dob_from_file: - dob_obj, created = SanctionListIndividualDateOfBirth.objects.get_or_create( + ( + dob_obj, + created, + ) = SanctionListIndividualDateOfBirth.objects.get_or_create( individual=single_dob.individual, date=single_dob.date, ) if created is True: - individuals_to_check_against_sanction_list.append(dob_obj.individual) + individuals_to_check_against_sanction_list.append( + dob_obj.individual + ) individuals_to_check_against_sanction_list.extend(individuals_to_create) individuals_to_check_against_sanction_list.extend(individuals_to_update) if individuals_to_check_against_sanction_list: - CheckAgainstSanctionListPreMergeTask.execute(individuals_to_check_against_sanction_list) + CheckAgainstSanctionListPreMergeTask.execute( + individuals_to_check_against_sanction_list + ) diff --git a/backend/hct_mis_api/apps/steficon/admin.py b/backend/hct_mis_api/apps/steficon/admin.py index 4cb7551a61..ef345bc3cd 100644 --- a/backend/hct_mis_api/apps/steficon/admin.py +++ b/backend/hct_mis_api/apps/steficon/admin.py @@ -57,7 +57,10 @@ def __init__(self, model, admin_site, attrs=None, choices=(), using=None): def get_url(self): model = self.model - return reverse(self.url_name % (self.admin_site.name, model._meta.app_label, model._meta.model_name)) + return reverse( + self.url_name + % (self.admin_site.name, model._meta.app_label, model._meta.model_name) + ) def get_context(self, name, value, attrs): context = {} @@ -80,7 +83,9 @@ def get_context(self, name, value, attrs): def media(self): extra = "" if settings.DEBUG else ".min" i18n_name = SELECT2_TRANSLATIONS.get(get_language()) - i18n_file = ("admin/js/vendor/select2/i18n/%s.js" % i18n_name,) if i18n_name else () + i18n_file = ( + ("admin/js/vendor/select2/i18n/%s.js" % i18n_name,) if i18n_name else () + ) return forms.Media( js=( "admin/js/vendor/jquery/jquery%s.js" % extra, @@ -129,7 +134,9 @@ def test(self, request, pk): title = f"Test result for '{rule}' using TargetPopulation '{tp}'" elif selection == "optContentType": ct: ContentType = form.cleaned_data["content_type"] - filters = json.loads(form.cleaned_data.get("content_type_filters") or "{}") + filters = json.loads( + form.cleaned_data.get("content_type_filters") or "{}" + ) qs = ct.model_class().objects.filter(**filters) data = qs.all() title = f"Test result for '{rule}' using ContentType '{ct}'" @@ -154,29 +161,39 @@ def test(self, request, pk): else: row["result"] = rule.execute(values) except Exception as e: - row["error"] = "%s: %s" % (e.__class__.__name__, str(e)) + row["error"] = f"{e.__class__.__name__}: {str(e)}" row["success"] = False results.append(row) context["results"] = results else: context["form"] = form else: - context["form"] = RuleTestForm(initial={"raw_data": '{"a": 1, "b":2}', "opt": "optFile"}) + context["form"] = RuleTestForm( + initial={"raw_data": '{"a": 1, "b":2}', "opt": "optFile"} + ) if "form" in context: from hct_mis_api.apps.targeting.models import TargetPopulation - context["form"].fields["target_population"].widget = AutocompleteWidget(TargetPopulation, self.admin_site) - context["form"].fields["content_type"].widget = AutocompleteWidget(ContentType, self.admin_site) + context["form"].fields["target_population"].widget = AutocompleteWidget( + TargetPopulation, self.admin_site + ) + context["form"].fields["content_type"].widget = AutocompleteWidget( + ContentType, self.admin_site + ) return TemplateResponse(request, "admin/steficon/rule/test.html", context) class RuleResource(ModelResource): created_by = fields.Field( - column_name="created_by", attribute="created_by", widget=ForeignKeyWidget(User, "username") + column_name="created_by", + attribute="created_by", + widget=ForeignKeyWidget(User, "username"), ) updated_by = fields.Field( - column_name="updated_by", attribute="created_by", widget=ForeignKeyWidget(User, "username") + column_name="updated_by", + attribute="created_by", + widget=ForeignKeyWidget(User, "username"), ) class Meta: @@ -196,8 +213,19 @@ class Meta: @register(Rule) -class RuleAdmin(ExtraUrlMixin, ImportExportMixin, TestRuleMixin, LinkedObjectsMixin, ModelAdmin): - list_display = ("name", "version", "language", "enabled", "deprecated", "created_by", "updated_by", "stable") +class RuleAdmin( + ExtraUrlMixin, ImportExportMixin, TestRuleMixin, LinkedObjectsMixin, ModelAdmin +): + list_display = ( + "name", + "version", + "language", + "enabled", + "deprecated", + "created_by", + "updated_by", + "stable", + ) list_filter = ("language", "enabled", "deprecated") search_fields = ("name",) form = RuleForm @@ -309,7 +337,9 @@ def process_file(self, request, pk): csv_config = self._get_csv_config(form) f = request.FILES["file"] input = f.read().decode("utf-8") - data = csv.DictReader(StringIO(input), fieldnames=None, **csv_config) + data = csv.DictReader( + StringIO(input), fieldnames=None, **csv_config + ) context["fields"] = data.fieldnames for attr in form.cleaned_data["results"]: context["fields"].append(labelize(attr)) @@ -317,9 +347,13 @@ def process_file(self, request, pk): results = [] for entry in data: try: - result = rule.execute(entry, only_enabled=False, only_release=False) + result = rule.execute( + entry, only_enabled=False, only_release=False + ) for attr in form.cleaned_data["results"]: - entry[labelize(attr)] = getattr(result, attr, "<ATTR NOT FOUND>") + entry[labelize(attr)] = getattr( + result, attr, "<ATTR NOT FOUND>" + ) except Exception as e: entry[info_col] = str(e) results.append(entry) @@ -349,11 +383,14 @@ def process_file(self, request, pk): response = HttpResponse( content_type="text/csv", headers={ - "Content-Disposition": 'attachment; filename="%s"' % form.cleaned_data["filename"] + "Content-Disposition": 'attachment; filename="%s"' + % form.cleaned_data["filename"] }, ) - writer = csv.DictWriter(response, fieldnames=fields, **csv_config) + writer = csv.DictWriter( + response, fieldnames=fields, **csv_config + ) writer.writeheader() writer.writerows(data) return response @@ -364,14 +401,21 @@ def process_file(self, request, pk): else: context["form"] = RuleFileProcessForm(initial={"results": "value"}) - return TemplateResponse(request, "admin/steficon/rule/file_process.html", context) + return TemplateResponse( + request, "admin/steficon/rule/file_process.html", context + ) @button(visible=lambda o, r: "/changelog/" not in r.path) def changelog(self, request, pk): - context = self.get_common_context(request, pk, title="Changelog", state_opts=RuleCommit._meta) + context = self.get_common_context( + request, pk, title="Changelog", state_opts=RuleCommit._meta + ) return TemplateResponse(request, "admin/steficon/rule/changelog.html", context) - @button(urls=[r"^aaa/(?P<pk>.*)/(?P<state>.*)/$", r"^bbb/(?P<pk>.*)/$"], visible=lambda o, r: "/change/" in r.path) + @button( + urls=[r"^aaa/(?P<pk>.*)/(?P<state>.*)/$", r"^bbb/(?P<pk>.*)/$"], + visible=lambda o, r: "/change/" in r.path, + ) def revert(self, request, pk, state=None): try: context = self.get_common_context( @@ -383,7 +427,9 @@ def revert(self, request, pk, state=None): state = self.object.history.get(pk=state) if request.method == "GET": context["state"] = state - return TemplateResponse(request, "admin/steficon/rule/revert.html", context) + return TemplateResponse( + request, "admin/steficon/rule/revert.html", context + ) else: with atomic(): if "_restore" in request.POST: @@ -419,7 +465,8 @@ def diff(self, request, pk): context["state"] = state context["title"] = ( - f"Change #{state.version} on " f"{state.timestamp.strftime('%d, %b %Y at %H:%M')} by {state.updated_by}" + f"Change #{state.version} on " + f"{state.timestamp.strftime('%d, %b %Y at %H:%M')} by {state.updated_by}" ) return TemplateResponse(request, "admin/steficon/rule/diff.html", context) except Exception as e: @@ -444,20 +491,41 @@ def save_model(self, request, obj, form, change): class RuleCommitResource(ModelResource): - rule = fields.Field(column_name="rule", attribute="rule", widget=ForeignKeyWidget(Rule, "name")) + rule = fields.Field( + column_name="rule", attribute="rule", widget=ForeignKeyWidget(Rule, "name") + ) updated_by = fields.Field( - column_name="updated_by", attribute="created_by", widget=ForeignKeyWidget(User, "username") + column_name="updated_by", + attribute="created_by", + widget=ForeignKeyWidget(User, "username"), ) class Meta: model = RuleCommit - fields = ("timestamp", "rule", "version", "updated_by", "affected_fields", "is_release") + fields = ( + "timestamp", + "rule", + "version", + "updated_by", + "affected_fields", + "is_release", + ) import_id_fields = ("rule", "version") @register(RuleCommit) -class RuleCommitAdmin(ExtraUrlMixin, ImportExportMixin, LinkedObjectsMixin, TestRuleMixin, ModelAdmin): - list_display = ("timestamp", "rule", "version", "updated_by", "is_release", "enabled", "deprecated") +class RuleCommitAdmin( + ExtraUrlMixin, ImportExportMixin, LinkedObjectsMixin, TestRuleMixin, ModelAdmin +): + list_display = ( + "timestamp", + "rule", + "version", + "updated_by", + "is_release", + "enabled", + "deprecated", + ) list_filter = (("rule", AutoCompleteFilter), "is_release", "enabled", "deprecated") search_fields = ("name",) readonly_fields = ("updated_by", "rule", "affected_fields", "version") diff --git a/backend/hct_mis_api/apps/steficon/models.py b/backend/hct_mis_api/apps/steficon/models.py index 6621c3bfa4..bae9f0bfb3 100644 --- a/backend/hct_mis_api/apps/steficon/models.py +++ b/backend/hct_mis_api/apps/steficon/models.py @@ -1,8 +1,8 @@ from django.conf import settings -from django.contrib.postgres.fields import ArrayField, CICharField, JSONField +from django.contrib.postgres.fields import ArrayField, CICharField from django.core.validators import ProhibitNullCharactersValidator from django.db import models -from django.db.models import QuerySet +from django.db.models import JSONField, QuerySet from django.db.transaction import atomic from django.forms import model_to_dict from django.utils.functional import cached_property @@ -30,13 +30,19 @@ class Rule(models.Model): name = CICharField( max_length=100, unique=True, - validators=[ProhibitNullCharactersValidator(), StartEndSpaceValidator, DoubleSpaceValidator], + validators=[ + ProhibitNullCharactersValidator(), + StartEndSpaceValidator, + DoubleSpaceValidator, + ], ) definition = models.TextField(blank=True, default="result.value=0") description = models.TextField(blank=True, null=True) enabled = models.BooleanField(default=False) deprecated = models.BooleanField(default=False) - language = models.CharField(max_length=10, default=LANGUAGES[0][0], choices=LANGUAGES) + language = models.CharField( + max_length=10, default=LANGUAGES[0][0], choices=LANGUAGES + ) security = models.IntegerField( choices=( (SAFETY_NONE, "Low"), @@ -45,8 +51,12 @@ class Rule(models.Model): ), default=SAFETY_STANDARD, ) - created_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT) - updated_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT) + created_by = models.ForeignKey( + settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT + ) + updated_by = models.ForeignKey( + settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT + ) created_at = models.DateTimeField(auto_now_add=True, db_index=True) updated_at = models.DateTimeField(auto_now=True, db_index=True) @@ -90,7 +100,9 @@ def get_changes(self): diff = set(data1.items()).symmetric_difference(data2.items()) return data1, list(dict(diff).keys()) - def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + def save( + self, force_insert=False, force_update=False, using=None, update_fields=None + ): if "individual_data_needed" not in self.flags: self.flags["individual_data_needed"] = False with atomic(): @@ -110,9 +122,13 @@ def commit(self, is_release=False, force=False): "affected_fields": changes, } if changes: - release = RuleCommit.objects.create(rule=self, version=self.version, **values) + release = RuleCommit.objects.create( + rule=self, version=self.version, **values + ) elif force: - release, __ = RuleCommit.objects.update_or_create(rule=self, version=self.version, defaults=values) + release, __ = RuleCommit.objects.update_or_create( + rule=self, version=self.version, defaults=values + ) if is_release: self.history.exclude(pk=release.pk).update(deprecated=True) return release @@ -180,13 +196,19 @@ class RuleCommit(models.Model): timestamp = models.DateTimeField(auto_now=True) version = models.IntegerField() - rule = models.ForeignKey(Rule, null=True, related_name="history", on_delete=models.SET_NULL) - updated_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT) + rule = models.ForeignKey( + Rule, null=True, related_name="history", on_delete=models.SET_NULL + ) + updated_by = models.ForeignKey( + settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT + ) definition = models.TextField(blank=True, default="result.value=0") is_release = models.BooleanField(default=False) enabled = models.BooleanField(default=False) deprecated = models.BooleanField(default=False) - language = models.CharField(max_length=10, default=Rule.LANGUAGES[0][0], choices=Rule.LANGUAGES) + language = models.CharField( + max_length=10, default=Rule.LANGUAGES[0][0], choices=Rule.LANGUAGES + ) affected_fields = ArrayField(models.CharField(max_length=100)) before = JSONField(help_text="The record before change", editable=False) diff --git a/backend/hct_mis_api/apps/steficon/result.py b/backend/hct_mis_api/apps/steficon/result.py index 552c066382..69fef946a1 100644 --- a/backend/hct_mis_api/apps/steficon/result.py +++ b/backend/hct_mis_api/apps/steficon/result.py @@ -11,7 +11,7 @@ def __str__(self): class Score(Result): def __init__(self): - super(Score, self).__init__() + super().__init__() self.extra = {} def __repr__(self): diff --git a/backend/hct_mis_api/apps/steficon/templatetags/engine.py b/backend/hct_mis_api/apps/steficon/templatetags/engine.py index 32c3c5a3f3..ee066d1cda 100644 --- a/backend/hct_mis_api/apps/steficon/templatetags/engine.py +++ b/backend/hct_mis_api/apps/steficon/templatetags/engine.py @@ -15,15 +15,17 @@ class HtmlDiff(difflib.HtmlDiff): def _format_line(self, side, flag, linenum, text): try: linenum = "%d" % linenum - id = ' id="%s%s"' % (self._prefix[side], linenum) + id = f' id="{self._prefix[side]}{linenum}"' except TypeError: id = "" text = text.replace("&", "&").replace(">", ">").replace("<", "<") text = text.replace(" ", " ").rstrip() - return '<td class="diff_header lineno"%s>%s</td><td class="code" nowrap="nowrap">%s</td>' % (id, linenum, text) + return f'<td class="diff_header lineno"{id}>{linenum}</td><td class="code" nowrap="nowrap">{text}</td>' - def make_table(self, fromlines, tolines, fromdesc="", todesc="", context=False, numlines=5): + def make_table( + self, fromlines, tolines, fromdesc="", todesc="", context=False, numlines=5 + ): """Returns HTML table of side by side comparison with change highlights Arguments: @@ -82,9 +84,12 @@ def make_table(self, fromlines, tolines, fromdesc="", todesc="", context=False, if i > 0: s.append(" </tbody> \n <tbody>\n") else: - s.append(fmt % (next_id[i], next_href[i], fromlist[i], next_href[i], tolist[i])) + s.append( + fmt + % (next_id[i], next_href[i], fromlist[i], next_href[i], tolist[i]) + ) if fromdesc or todesc: - header_row = "<thead><tr>%s%s%s%s</tr></thead>" % ( + header_row = "<thead><tr>{}{}{}{}</tr></thead>".format( '<th class="diff_next"><br /></th>', '<th colspan="2" class="diff_header">%s</th>' % fromdesc, '<th class="diff_next"><br /></th>', @@ -93,7 +98,9 @@ def make_table(self, fromlines, tolines, fromdesc="", todesc="", context=False, else: header_row = "" - table = self._table_template % dict(data_rows="".join(s), header_row=header_row, prefix=self._prefix[1]) + table = self._table_template % dict( + data_rows="".join(s), header_row=header_row, prefix=self._prefix[1] + ) return ( table.replace("\0+", '<span class="diff_add">') @@ -116,7 +123,9 @@ def define(val=None): @register.filter def adults(hh): - return hh.members.filter(age__gte=18, age__lte=65, work__in=["fulltime", "seasonal", "parttime"]).count() + return hh.members.filter( + age__gte=18, age__lte=65, work__in=["fulltime", "seasonal", "parttime"] + ).count() @register.filter @@ -160,4 +169,8 @@ def diff(commit, panels="before,after"): right_label = f"Version current ({rule.version})" right_panel = rule.definition.split("\n") - return mark_safe(HtmlDiff(wrapcolumn=80).make_table(left_panel, right_panel, left_label, right_label)) + return mark_safe( + HtmlDiff(wrapcolumn=80).make_table( + left_panel, right_panel, left_label, right_label + ) + ) diff --git a/backend/hct_mis_api/apps/targeting/models.py b/backend/hct_mis_api/apps/targeting/models.py index dfe0c6ce62..e5ac4873cd 100644 --- a/backend/hct_mis_api/apps/targeting/models.py +++ b/backend/hct_mis_api/apps/targeting/models.py @@ -2,7 +2,7 @@ import logging from django.conf import settings -from django.contrib.postgres.fields import CICharField, IntegerRangeField, JSONField +from django.contrib.postgres.fields import CICharField, IntegerRangeField from django.contrib.postgres.validators import ( RangeMaxValueValidator, RangeMinValueValidator, @@ -14,9 +14,9 @@ ProhibitNullCharactersValidator, ) from django.db import models -from django.db.models import Case, Count, Q, Value, When +from django.db.models import Case, Count, JSONField, Q, Value, When from django.utils.text import Truncator -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from dateutil.relativedelta import relativedelta from model_utils import Choices @@ -70,7 +70,10 @@ def get_integer_range(min_range=None, max_range=None): return IntegerRangeField( default=get_serialized_range, blank=True, - validators=[RangeMinValueValidator(min_range), RangeMaxValueValidator(max_range)], + validators=[ + RangeMinValueValidator(min_range), + RangeMaxValueValidator(max_range), + ], ) @@ -81,8 +84,14 @@ def get_queryset(self): .get_queryset() .annotate( number_of_households=Case( - When(status=TargetPopulation.STATUS_LOCKED, then="candidate_list_total_households"), - When(status=TargetPopulation.STATUS_READY_FOR_CASH_ASSIST, then="final_list_total_households"), + When( + status=TargetPopulation.STATUS_LOCKED, + then="candidate_list_total_households", + ), + When( + status=TargetPopulation.STATUS_READY_FOR_CASH_ASSIST, + then="final_list_total_households", + ), default=Value(0), ) ) @@ -184,8 +193,12 @@ class TargetPopulation(SoftDeletableModel, TimeStampedUUIDModel, ConcurrencyMode null=True, blank=True, ) - business_area = models.ForeignKey("core.BusinessArea", null=True, on_delete=models.CASCADE) - status = models.CharField(max_length=_MAX_LEN, choices=STATUS_CHOICES, default=STATUS_DRAFT, db_index=True) + business_area = models.ForeignKey( + "core.BusinessArea", null=True, on_delete=models.CASCADE + ) + status = models.CharField( + max_length=_MAX_LEN, choices=STATUS_CHOICES, default=STATUS_DRAFT, db_index=True + ) households = models.ManyToManyField( "household.Household", related_name="target_populations", @@ -243,30 +256,48 @@ class TargetPopulation(SoftDeletableModel, TimeStampedUUIDModel, ConcurrencyMode db_index=True, ) steficon_rule = models.ForeignKey( - RuleCommit, null=True, on_delete=models.PROTECT, related_name="target_populations", blank=True + RuleCommit, + null=True, + on_delete=models.PROTECT, + related_name="target_populations", + blank=True, ) steficon_applied_date = models.DateTimeField(blank=True, null=True) vulnerability_score_min = models.DecimalField( - null=True, decimal_places=3, max_digits=6, help_text="Written by a tool such as Corticon.", blank=True + null=True, + decimal_places=3, + max_digits=6, + help_text="Written by a tool such as Corticon.", + blank=True, ) vulnerability_score_max = models.DecimalField( - null=True, decimal_places=3, max_digits=6, help_text="Written by a tool such as Corticon.", blank=True + null=True, + decimal_places=3, + max_digits=6, + help_text="Written by a tool such as Corticon.", + blank=True, ) excluded_ids = models.TextField(blank=True) exclusion_reason = models.TextField(blank=True) @property def excluded_household_ids(self): - excluded_household_ids_array = map_unicef_ids_to_households_unicef_ids(self.excluded_ids) + excluded_household_ids_array = map_unicef_ids_to_households_unicef_ids( + self.excluded_ids + ) return excluded_household_ids_array @property def vulnerability_score_filtered_households(self): queryset = self.households if self.vulnerability_score_max is not None: - queryset = queryset.filter(selections__vulnerability_score__lte=self.vulnerability_score_max) + queryset = queryset.filter( + selections__vulnerability_score__lte=self.vulnerability_score_max + ) if self.vulnerability_score_min is not None: - queryset = queryset.filter(selections__vulnerability_score__gte=self.vulnerability_score_min) + queryset = queryset.filter( + selections__vulnerability_score__gte=self.vulnerability_score_min + ) queryset = queryset.filter(~Q(unicef_id__in=self.excluded_household_ids)) return queryset.distinct() @@ -275,9 +306,9 @@ def vulnerability_score_filtered_households(self): def candidate_list(self): if self.status != TargetPopulation.STATUS_DRAFT: return [] - return Household.objects.filter(self.candidate_list_targeting_criteria.get_query()).filter( - business_area=self.business_area - ) + return Household.objects.filter( + self.candidate_list_targeting_criteria.get_query() + ).filter(business_area=self.business_area) @property def final_list(self): @@ -294,14 +325,26 @@ def candidate_stats(self): if self.status == TargetPopulation.STATUS_DRAFT: households_ids = self.candidate_list.values_list("id") else: - households_ids = self.vulnerability_score_filtered_households.values_list("id") + households_ids = self.vulnerability_score_filtered_households.values_list( + "id" + ) delta18 = relativedelta(years=+18) date18ago = datetime.datetime.now() - delta18 - targeted_individuals = Individual.objects.filter(household__id__in=households_ids).aggregate( - child_male=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE)), - child_female=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE)), - adult_male=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE)), - adult_female=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE)), + targeted_individuals = Individual.objects.filter( + household__id__in=households_ids + ).aggregate( + child_male=Count( + "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE) + ), + child_female=Count( + "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE) + ), + adult_male=Count( + "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE) + ), + adult_female=Count( + "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE) + ), ) return { "child_male": targeted_individuals.get("child_male"), @@ -331,7 +374,9 @@ def final_stats(self): return None elif self.status == TargetPopulation.STATUS_LOCKED: households_ids = ( - self.vulnerability_score_filtered_households.filter(self.final_list_targeting_criteria.get_query()) + self.vulnerability_score_filtered_households.filter( + self.final_list_targeting_criteria.get_query() + ) .filter(business_area=self.business_area) .values_list("id") .distinct() @@ -341,11 +386,21 @@ def final_stats(self): delta18 = relativedelta(years=+18) date18ago = datetime.datetime.now() - delta18 - targeted_individuals = Individual.objects.filter(household__id__in=households_ids).aggregate( - child_male=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE)), - child_female=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE)), - adult_male=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE)), - adult_female=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE)), + targeted_individuals = Individual.objects.filter( + household__id__in=households_ids + ).aggregate( + child_male=Count( + "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE) + ), + child_female=Count( + "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE) + ), + adult_male=Count( + "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE) + ), + adult_female=Count( + "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE) + ), ) return { @@ -361,7 +416,9 @@ def allowed_steficon_rule(self): return None tp = ( TargetPopulation.objects.filter( - program=self.program, steficon_rule__isnull=False, status=TargetPopulation.STATUS_PROCESSING + program=self.program, + steficon_rule__isnull=False, + status=TargetPopulation.STATUS_PROCESSING, ) .order_by("-created_at") .first() @@ -375,7 +432,10 @@ def set_to_ready_for_cash_assist(self): self.sent_to_datahub = True def is_finalized(self): - return self.status in [self.STATUS_PROCESSING, self.STATUS_READY_FOR_CASH_ASSIST] + return self.status in [ + self.STATUS_PROCESSING, + self.STATUS_READY_FOR_CASH_ASSIST, + ] def is_locked(self): return self.status == self.STATUS_LOCKED @@ -407,7 +467,9 @@ class HouseholdSelection(TimeStampedUUIDModel): on_delete=models.CASCADE, related_name="selections", ) - target_population = models.ForeignKey("TargetPopulation", on_delete=models.CASCADE, related_name="selections") + target_population = models.ForeignKey( + "TargetPopulation", on_delete=models.CASCADE, related_name="selections" + ) vulnerability_score = models.DecimalField( blank=True, null=True, @@ -508,12 +570,16 @@ def get_criteria_string(self): if isinstance(self.individuals_filters_blocks, list) else self.individuals_filters_blocks.all() ) - individuals_filters_blocks_strings = [x.get_criteria_string() for x in individuals_filters_blocks] + individuals_filters_blocks_strings = [ + x.get_criteria_string() for x in individuals_filters_blocks + ] all_strings = [] if len(filters_strings): all_strings.append(f"H({' AND '.join(filters_strings).strip()})") if len(individuals_filters_blocks_strings): - all_strings.append(f"I({' AND '.join(individuals_filters_blocks_strings).strip()})") + all_strings.append( + f"I({' AND '.join(individuals_filters_blocks_strings).strip()})" + ) return " AND ".join(all_strings).strip() def get_query(self): @@ -580,7 +646,9 @@ def get_query(self): if self.target_only_hoh: # only filtering against heads of household individuals_query &= Q(heading_household__isnull=False) - households_id = Individual.objects.filter(individuals_query).values_list("household_id", flat=True) + households_id = Individual.objects.filter(individuals_query).values_list( + "household_id", flat=True + ) return Q(id__in=households_id) @@ -589,7 +657,9 @@ class TargetingIndividualRuleFilterBlock( TargetingIndividualRuleFilterBlockMixin, ): targeting_criteria_rule = models.ForeignKey( - "TargetingCriteriaRule", on_delete=models.CASCADE, related_name="individuals_filters_blocks" + "TargetingCriteriaRule", + on_delete=models.CASCADE, + related_name="individuals_filters_blocks", ) target_only_hoh = models.BooleanField(default=False) @@ -602,7 +672,12 @@ class TargetingCriteriaFilterMixin: "negative": False, "supported_types": ["INTEGER", "SELECT_ONE", "STRING", "BOOL"], }, - "NOT_EQUALS": {"arguments": 1, "lookup": "", "negative": True, "supported_types": ["INTEGER", "SELECT_ONE"]}, + "NOT_EQUALS": { + "arguments": 1, + "lookup": "", + "negative": True, + "supported_types": ["INTEGER", "SELECT_ONE"], + }, "CONTAINS": { "min_arguments": 1, "arguments": 1, @@ -610,7 +685,12 @@ class TargetingCriteriaFilterMixin: "negative": False, "supported_types": ["SELECT_MANY", "STRING"], }, - "NOT_CONTAINS": {"arguments": 1, "lookup": "__icontains", "negative": True, "supported_types": ["STRING"]}, + "NOT_CONTAINS": { + "arguments": 1, + "lookup": "__icontains", + "negative": True, + "supported_types": ["STRING"], + }, "RANGE": { "arguments": 2, "lookup": "__range", @@ -629,7 +709,12 @@ class TargetingCriteriaFilterMixin: "negative": False, "supported_types": ["INTEGER", "DECIMAL"], }, - "LESS_THAN": {"arguments": 1, "lookup": "__lte", "negative": False, "supported_types": ["INTEGER", "DECIMAL"]}, + "LESS_THAN": { + "arguments": 1, + "lookup": "__lte", + "negative": False, + "supported_types": ["INTEGER", "DECIMAL"], + }, } COMPARISON_CHOICES = Choices( @@ -666,17 +751,25 @@ def get_query_for_lookup( field_attr, ): select_many = get_attr_value("type", field_attr, None) == TYPE_SELECT_MANY - comparision_attribute = TargetingCriteriaRuleFilter.COMPARISION_ATTRIBUTES.get(self.comparision_method) + comparision_attribute = TargetingCriteriaRuleFilter.COMPARISION_ATTRIBUTES.get( + self.comparision_method + ) args_count = comparision_attribute.get("arguments") if self.arguments is None: - logger.error(f"{self.field_name} {self.comparision_method} filter query expect {args_count} " f"arguments") + logger.error( + f"{self.field_name} {self.comparision_method} filter query expect {args_count} " + f"arguments" + ) raise ValidationError( - f"{self.field_name} {self.comparision_method} filter query expect {args_count} " f"arguments" + f"{self.field_name} {self.comparision_method} filter query expect {args_count} " + f"arguments" ) args_input_count = len(self.arguments) if select_many: if args_input_count < 1: - logger.error(f"{self.field_name} SELECT MULTIPLE CONTAINS filter query expect at least 1 argument") + logger.error( + f"{self.field_name} SELECT MULTIPLE CONTAINS filter query expect at least 1 argument" + ) raise ValidationError( f"{self.field_name} SELECT MULTIPLE CONTAINS filter query expect at least 1 argument" ) @@ -707,9 +800,13 @@ def get_query_for_lookup( def get_query_for_core_field(self): core_fields = self.get_core_fields() - core_field_attrs = [attr for attr in core_fields if attr.get("name") == self.field_name] + core_field_attrs = [ + attr for attr in core_fields if attr.get("name") == self.field_name + ] if len(core_field_attrs) != 1: - logger.error(f"There are no Core Field Attributes associated with this fieldName {self.field_name}") + logger.error( + f"There are no Core Field Attributes associated with this fieldName {self.field_name}" + ) raise ValidationError( f"There are no Core Field Attributes associated with this fieldName {self.field_name}" ) @@ -733,11 +830,15 @@ def get_query_for_core_field(self): def get_query_for_flex_field(self): flex_field_attr = FlexibleAttribute.objects.get(name=self.field_name) if not flex_field_attr: - logger.error(f"There are no Flex Field Attributes associated with this fieldName {self.field_name}") + logger.error( + f"There are no Flex Field Attributes associated with this fieldName {self.field_name}" + ) raise ValidationError( f"There are no Flex Field Attributes associated with this fieldName {self.field_name}" ) - lookup_prefix = self.get_lookup_prefix(_INDIVIDUAL if flex_field_attr.associated_with == 1 else _HOUSEHOLD) + lookup_prefix = self.get_lookup_prefix( + _INDIVIDUAL if flex_field_attr.associated_with == 1 else _HOUSEHOLD + ) lookup = f"{lookup_prefix}flex_fields__{flex_field_attr.name}" return self.get_query_for_lookup(lookup, flex_field_attr) @@ -781,7 +882,9 @@ def get_core_fields(self): ) -class TargetingIndividualBlockRuleFilter(TimeStampedUUIDModel, TargetingCriteriaFilterMixin): +class TargetingIndividualBlockRuleFilter( + TimeStampedUUIDModel, TargetingCriteriaFilterMixin +): """ This is one explicit filter like: :Age <> 10-20 diff --git a/backend/hct_mis_api/apps/utils/admin.py b/backend/hct_mis_api/apps/utils/admin.py index cf3b560e91..2e57172be7 100644 --- a/backend/hct_mis_api/apps/utils/admin.py +++ b/backend/hct_mis_api/apps/utils/admin.py @@ -1,7 +1,7 @@ from django.conf import settings from django.contrib import admin from django.contrib.admin import SimpleListFilter -from django.contrib.postgres.fields import JSONField +from django.db.models import JSONField from admin_extra_urls.decorators import button from admin_extra_urls.mixins import ExtraUrlMixin, _confirm_action @@ -65,7 +65,9 @@ def reset_sync_date_single(self, request, pk): ) -class HOPEModelAdminBase(SmartDisplayAllMixin, AdminActionPermMixin, JSONWidgetMixin, admin.ModelAdmin): +class HOPEModelAdminBase( + SmartDisplayAllMixin, AdminActionPermMixin, JSONWidgetMixin, admin.ModelAdmin +): list_per_page = 50 def get_fields(self, request, obj=None): diff --git a/backend/hct_mis_api/settings/__init__.py b/backend/hct_mis_api/settings/__init__.py index 1045bd5fa9..2d99e13fa8 100644 --- a/backend/hct_mis_api/settings/__init__.py +++ b/backend/hct_mis_api/settings/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - # import defaults from importlib import import_module diff --git a/backend/hct_mis_api/settings/base.py b/backend/hct_mis_api/settings/base.py index 80dd91a456..9d63d563ce 100644 --- a/backend/hct_mis_api/settings/base.py +++ b/backend/hct_mis_api/settings/base.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import logging import os import re @@ -195,7 +193,10 @@ os.path.join(PROJECT_ROOT, "apps", "core", "templates"), ], "OPTIONS": { - "loaders": ["django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader"], + "loaders": [ + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", + ], "context_processors": [ "django.contrib.auth.context_processors.auth", "django.template.context_processors.debug", @@ -283,13 +284,18 @@ # LOGIN_REDIRECT_URL = f'/api/{ADMIN_PANEL_URL}/' AUTH_PASSWORD_VALIDATORS = [ - {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, - {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", "OPTIONS": {"min_length": 12}}, + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + "OPTIONS": {"min_length": 12}, + }, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] -PASSWORD_RESET_TIMEOUT_DAYS = 31 +PASSWORD_RESET_TIMEOUT = 60 * 60 * 24 * 31 ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = 7 @@ -313,20 +319,34 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): "version": 1, "disable_existing_loggers": False, "formatters": { - "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s"}, + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s" + }, "verbose": { "format": "[%(asctime)s][%(levelname)s][%(name)s] %(filename)s.%(funcName)s:%(lineno)d %(message)s", }, }, "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}}, "handlers": { - "default": {"level": LOG_LEVEL, "class": "logging.StreamHandler", "formatter": "standard"}, - "file": {"level": LOG_LEVEL, "class": "logging.FileHandler", "filename": "debug.log"}, + "default": { + "level": LOG_LEVEL, + "class": "logging.StreamHandler", + "formatter": "standard", + }, + "file": { + "level": LOG_LEVEL, + "class": "logging.FileHandler", + "filename": "debug.log", + }, }, "loggers": { "": {"handlers": ["default"], "level": "INFO", "propagate": True}, "console": {"handlers": ["default"], "level": "DEBUG", "propagate": True}, - "django.request": {"handlers": ["default"], "level": "ERROR", "propagate": False}, + "django.request": { + "handlers": ["default"], + "level": "ERROR", + "propagate": False, + }, "django.security.DisallowedHost": { # Skip "SuspiciousOperation: Invalid HTTP_HOST" e-mails. "handlers": ["default"], @@ -356,9 +376,13 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): REDIS_INSTANCE = os.getenv("REDIS_INSTANCE", "redis:6379") if "CACHE_URL" not in os.environ: if REDIS_INSTANCE: - os.environ["CACHE_URL"] = f"redis://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" + os.environ[ + "CACHE_URL" + ] = f"redis://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" else: - os.environ["CACHE_URL"] = f"dummycache://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" + os.environ[ + "CACHE_URL" + ] = f"dummycache://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" CACHES = { "default": env.cache(), @@ -440,15 +464,24 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): CONSTANCE_ADDITIONAL_FIELDS = { "percentages": ( "django.forms.fields.IntegerField", - {"widget": "django.forms.widgets.NumberInput", "validators": [MinValueValidator(0), MaxValueValidator(100)]}, + { + "widget": "django.forms.widgets.NumberInput", + "validators": [MinValueValidator(0), MaxValueValidator(100)], + }, ), "positive_integers": ( "django.forms.fields.IntegerField", - {"widget": "django.forms.widgets.NumberInput", "validators": [MinValueValidator(0)]}, + { + "widget": "django.forms.widgets.NumberInput", + "validators": [MinValueValidator(0)], + }, ), "positive_floats": ( "django.forms.fields.FloatField", - {"widget": "django.forms.widgets.NumberInput", "validators": [MinValueValidator(0)]}, + { + "widget": "django.forms.widgets.NumberInput", + "validators": [MinValueValidator(0)], + }, ), } @@ -469,7 +502,11 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): "If percentage of duplicates is higher or equal to this setting, deduplication is aborted", "percentages", ), - "CASHASSIST_DOAP_RECIPIENT": ("", "UNHCR email address where to send DOAP updates", str), + "CASHASSIST_DOAP_RECIPIENT": ( + "", + "UNHCR email address where to send DOAP updates", + str, + ), "KOBO_ADMIN_CREDENTIALS": ( "", "Kobo superuser credentislas in format user:password", @@ -501,7 +538,10 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): # RAPID PRO "RAPID_PRO_PROVIDER": ("tel", "Rapid pro messages provider (telegram/tel)"), # CASH ASSIST - "CASH_ASSIST_URL_PREFIX": ("", "Cash Assist base url used to generate url to cash assist"), + "CASH_ASSIST_URL_PREFIX": ( + "", + "Cash Assist base url used to generate url to cash assist", + ), "SEND_GRIEVANCES_NOTIFICATION": ( False, "Should send grievances notification", @@ -560,7 +600,8 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): from hct_mis_api import get_full_version sentry_logging = LoggingIntegration( - level=logging.INFO, event_level=logging.ERROR # Capture info and above as breadcrumbs # Send errors as events + level=logging.INFO, + event_level=logging.ERROR, # Capture info and above as breadcrumbs # Send errors as events ) sentry_sdk.init( @@ -660,7 +701,9 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): SMART_ADMIN_BOOKMARKS_PERMISSION = None SMART_ADMIN_PROFILE_LINK = True -SMART_ADMIN_ISROOT = lambda r, *a: r.user.is_superuser and r.headers.get("x-root-token") == env("ROOT_TOKEN") +SMART_ADMIN_ISROOT = lambda r, *a: r.user.is_superuser and r.headers.get( + "x-root-token" +) == env("ROOT_TOKEN") EXCHANGE_RATE_CACHE_EXPIRY = 1 * 60 * 60 * 24 diff --git a/backend/hct_mis_api/settings/dev.py b/backend/hct_mis_api/settings/dev.py index de34a994d8..f54236bd43 100644 --- a/backend/hct_mis_api/settings/dev.py +++ b/backend/hct_mis_api/settings/dev.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import os from .base import * # noqa: ignore=F403 @@ -34,4 +32,6 @@ "test": {"hosts": "elasticsearch_test:9200"}, } -EMAIL_BACKEND = os.getenv("EMAIL_BACKEND", "django.core.mail.backends.console.EmailBackend") +EMAIL_BACKEND = os.getenv( + "EMAIL_BACKEND", "django.core.mail.backends.console.EmailBackend" +) diff --git a/backend/hct_mis_api/settings/staging.py b/backend/hct_mis_api/settings/staging.py index 3097c25ece..92c153e12e 100644 --- a/backend/hct_mis_api/settings/staging.py +++ b/backend/hct_mis_api/settings/staging.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration @@ -22,14 +20,24 @@ AZURE_ACCOUNT_NAME = os.getenv("STORAGE_AZURE_ACCOUNT_NAME", "") AZURE_ACCOUNT_KEY = os.getenv("STORAGE_AZURE_ACCOUNT_KEY", "") -MEDIA_STORAGE_AZURE_ACCOUNT_NAME = os.getenv("MEDIA_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME) -MEDIA_STORAGE_AZURE_ACCOUNT_KEY = os.getenv("MEDIA_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY) -STATIC_STORAGE_AZURE_ACCOUNT_NAME = os.getenv("STATIC_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME) -STATIC_STORAGE_AZURE_ACCOUNT_KEY = os.getenv("STATIC_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY) +MEDIA_STORAGE_AZURE_ACCOUNT_NAME = os.getenv( + "MEDIA_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME +) +MEDIA_STORAGE_AZURE_ACCOUNT_KEY = os.getenv( + "MEDIA_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY +) +STATIC_STORAGE_AZURE_ACCOUNT_NAME = os.getenv( + "STATIC_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME +) +STATIC_STORAGE_AZURE_ACCOUNT_KEY = os.getenv( + "STATIC_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY +) AZURE_URL_EXPIRATION_SECS = 10800 -AZURE_STATIC_CUSTOM_DOMAIN = f"{STATIC_STORAGE_AZURE_ACCOUNT_NAME}.blob.core.windows.net" +AZURE_STATIC_CUSTOM_DOMAIN = ( + f"{STATIC_STORAGE_AZURE_ACCOUNT_NAME}.blob.core.windows.net" +) AZURE_MEDIA_CUSTOM_DOMAIN = f"{MEDIA_STORAGE_AZURE_ACCOUNT_NAME}.blob.core.windows.net" STATIC_URL = f"https://{AZURE_STATIC_CUSTOM_DOMAIN}/{STATIC_LOCATION}/" MEDIA_URL = f"https://{AZURE_MEDIA_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/" diff --git a/backend/hct_mis_api/settings/test.py b/backend/hct_mis_api/settings/test.py index bd15ecca2c..64b6a80f57 100644 --- a/backend/hct_mis_api/settings/test.py +++ b/backend/hct_mis_api/settings/test.py @@ -1,7 +1,6 @@ -from __future__ import absolute_import +import logging from .base import * # noqa: ignore=F403 -import logging # dev overrides DEBUG = True @@ -16,30 +15,55 @@ # other -CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache", "TIMEOUT": 1800}} +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "TIMEOUT": 1800, + } +} LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { - "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s"}, + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s" + }, "verbose": { "format": "[%(asctime)s][%(levelname)s][%(name)s] %(filename)s.%(funcName)s:%(lineno)d %(message)s", }, }, "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}}, - "handlers": {"default": {"level": LOG_LEVEL, "class": "logging.StreamHandler", "formatter": "standard"}}, + "handlers": { + "default": { + "level": LOG_LEVEL, + "class": "logging.StreamHandler", + "formatter": "standard", + } + }, "loggers": { "": {"handlers": ["default"], "level": "DEBUG", "propagate": True}, - "registration_datahub.tasks.deduplicate": {"handlers": ["default"], "level": "INFO", "propagate": True}, + "registration_datahub.tasks.deduplicate": { + "handlers": ["default"], + "level": "INFO", + "propagate": True, + }, "sanction_list.tasks.check_against_sanction_list_pre_merge": { "handlers": ["default"], "level": "INFO", "propagate": True, }, "graphql": {"handlers": ["default"], "level": "CRITICAL", "propagate": True}, - "elasticsearch": {"handlers": ["default"], "level": "CRITICAL", "propagate": True}, - "elasticsearch-dsl-django": {"handlers": ["default"], "level": "CRITICAL", "propagate": True}, + "elasticsearch": { + "handlers": ["default"], + "level": "CRITICAL", + "propagate": True, + }, + "elasticsearch-dsl-django": { + "handlers": ["default"], + "level": "CRITICAL", + "propagate": True, + }, "hct_mis_api.apps.registration_datahub.tasks.deduplicate": { "handlers": ["default"], "level": "CRITICAL", diff --git a/backend/hct_mis_api/urls.py b/backend/hct_mis_api/urls.py index d1cb5c31d8..fad02994f3 100644 --- a/backend/hct_mis_api/urls.py +++ b/backend/hct_mis_api/urls.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from django.conf import settings from django.conf.urls.static import static from django.contrib import admin @@ -13,9 +11,9 @@ import hct_mis_api.apps.account.views import hct_mis_api.apps.payment.views -import hct_mis_api.apps.targeting.views import hct_mis_api.apps.registration_datahub.views import hct_mis_api.apps.sanction_list.views +import hct_mis_api.apps.targeting.views from hct_mis_api.apps.core.views import ( call_command_view, homepage, @@ -32,8 +30,14 @@ path("api/explorer/", include("explorer.urls")), path(f"api/{settings.ADMIN_PANEL_URL}/hijack/", include("hijack.urls")), path(f"api/{settings.ADMIN_PANEL_URL}/adminactions/", include("adminactions.urls")), - path(f"api/{settings.ADMIN_PANEL_URL}/advanced_filters/", include("advanced_filters.urls")), - path(f"api/{settings.ADMIN_PANEL_URL}/reports/", include("hct_mis_api.apps.power_query.urls")), + path( + f"api/{settings.ADMIN_PANEL_URL}/advanced_filters/", + include("advanced_filters.urls"), + ), + path( + f"api/{settings.ADMIN_PANEL_URL}/reports/", + include("hct_mis_api.apps.power_query.urls"), + ), path("", homepage), path("_health", homepage), path("api/_health", homepage), @@ -42,9 +46,13 @@ path("api/", include("social_django.urls", namespace="social")), path("api/logout", logout_view), path("api/sentry-debug/", trigger_error), - path("api/download-template", hct_mis_api.apps.registration_datahub.views.download_template), path( - "api/download-exported-users/<str:business_area_slug>", hct_mis_api.apps.account.views.download_exported_users + "api/download-template", + hct_mis_api.apps.registration_datahub.views.download_template, + ), + path( + "api/download-exported-users/<str:business_area_slug>", + hct_mis_api.apps.account.views.download_exported_users, ), path( "api/download-cash-plan-payment-verification/<str:verification_id>", diff --git a/deployment/kobo/files/enketo/create_config.py b/deployment/kobo/files/enketo/create_config.py index f1b928c006..f433fec8c9 100644 --- a/deployment/kobo/files/enketo/create_config.py +++ b/deployment/kobo/files/enketo/create_config.py @@ -2,29 +2,30 @@ import json import os - CURRENT_DIR_PATH = os.path.abspath(os.path.dirname(__file__)) -PROJECT_ROOT_PATH = os.path.abspath(os.path.join(CURRENT_DIR_PATH, '../..')) +PROJECT_ROOT_PATH = os.path.abspath(os.path.join(CURRENT_DIR_PATH, "../..")) def get_or_create_encryption_key(): - '''Automate the inconvenient task of generating and maintaining a consistent - encryption key.''' + """Automate the inconvenient task of generating and maintaining a consistent + encryption key.""" # Attempt to get the key from an environment variable. - encryption_key = os.environ.get('ENKETO_ENCRYPTION_KEY') + encryption_key = os.environ.get("ENKETO_ENCRYPTION_KEY") # If the key wasn't in the environment, attempt to get it from disk. - secrets_dir_path = os.path.join(CURRENT_DIR_PATH, 'secrets/') - encryption_key_file_path = os.path.join(secrets_dir_path, 'enketo_encryption_key.txt') + secrets_dir_path = os.path.join(CURRENT_DIR_PATH, "secrets/") + encryption_key_file_path = os.path.join( + secrets_dir_path, "enketo_encryption_key.txt" + ) if not encryption_key and os.path.isfile(encryption_key_file_path): - with open(encryption_key_file_path, 'r') as encryption_key_file: - encryption_key= encryption_key_file.read().strip() + with open(encryption_key_file_path) as encryption_key_file: + encryption_key = encryption_key_file.read().strip() # If the key couldn't be retrieved from disk, generate and store a new one. elif not encryption_key: - encryption_key= base64.b64encode(os.urandom(256)) + encryption_key = base64.b64encode(os.urandom(256)) if not os.path.isdir(secrets_dir_path): os.mkdir(secrets_dir_path) - with open(encryption_key_file_path, 'w') as encryption_key_file: + with open(encryption_key_file_path, "w") as encryption_key_file: encryption_key_file.write(encryption_key) return encryption_key @@ -32,41 +33,53 @@ def get_or_create_encryption_key(): def create_config(): - CONFIG_FILE_PATH = os.path.join(PROJECT_ROOT_PATH, 'config/config.json') + CONFIG_FILE_PATH = os.path.join(PROJECT_ROOT_PATH, "config/config.json") if not os.path.isfile(CONFIG_FILE_PATH): - raise EnvironmentError('No Enketo Express configuration found at `{}`.'.format(CONFIG_FILE_PATH)) + raise OSError(f"No Enketo Express configuration found at `{CONFIG_FILE_PATH}`.") else: try: - with open(CONFIG_FILE_PATH, 'r') as config_file: + with open(CONFIG_FILE_PATH) as config_file: config = json.loads(config_file.read()) except: - raise ValueError('Could not parse JSON content from `{}`.').format(CONFIG_FILE_PATH) + raise ValueError("Could not parse JSON content from `{}`.").format( + CONFIG_FILE_PATH + ) # Ensure an API key was set, retrieving it from the environment as a fallback. - config.setdefault('linked form and data server', dict()).setdefault('api key', os.environ.get('ENKETO_API_KEY')) - if not config['linked form and data server']['api key']: - raise EnvironmentError('An API key for Enketo Express is required.') + config.setdefault("linked form and data server", dict()).setdefault( + "api key", os.environ.get("ENKETO_API_KEY") + ) + if not config["linked form and data server"]["api key"]: + raise OSError("An API key for Enketo Express is required.") # Retrieve/generate the encryption key if not present. - config['linked form and data server'].setdefault('encryption key', get_or_create_encryption_key()) + config["linked form and data server"].setdefault( + "encryption key", get_or_create_encryption_key() + ) # Set the Docker Redis settings. - config.setdefault('redis', dict()).setdefault('main', dict()).setdefault('host', 'redis_main') - config['redis'].setdefault('cache', dict()).setdefault('host', 'redis_cache') - config['redis']['cache'].setdefault('port', '6379') - - config['redis']['cache']['password'] = os.environ.get("ENKETO_REDIS_CACHE_PASSWORD", "") - config['redis']['main']['password'] = os.environ.get("ENKETO_REDIS_MAIN_PASSWORD", "") + config.setdefault("redis", dict()).setdefault("main", dict()).setdefault( + "host", "redis_main" + ) + config["redis"].setdefault("cache", dict()).setdefault("host", "redis_cache") + config["redis"]["cache"].setdefault("port", "6379") + + config["redis"]["cache"]["password"] = os.environ.get( + "ENKETO_REDIS_CACHE_PASSWORD", "" + ) + config["redis"]["main"]["password"] = os.environ.get( + "ENKETO_REDIS_MAIN_PASSWORD", "" + ) # Write the potentially-updated config file to disk. - with open(CONFIG_FILE_PATH, 'w') as config_file: + with open(CONFIG_FILE_PATH, "w") as config_file: config_file.write( # Sort keys so that the file remains consistent between runs. # Indent for readability. Specify separators to avoid trailing # whitespace (https://bugs.python.org/issue16333) - json.dumps(config, indent=4, separators=(',', ': '), sort_keys=True) + json.dumps(config, indent=4, separators=(",", ": "), sort_keys=True) ) -if __name__ == '__main__': +if __name__ == "__main__": create_config() diff --git a/fabfile.py b/fabfile.py index 2dfcb06b3c..a0e28af425 100644 --- a/fabfile.py +++ b/fabfile.py @@ -1,4 +1,3 @@ -from __future__ import unicode_literals from fabric.api import local from fabric.context_managers import shell_env @@ -21,7 +20,7 @@ def managepy(command=""): """ Run specified manage.py command """ - cmd = "docker-compose exec backend python manage.py {}".format(command) + cmd = f"docker-compose exec backend python manage.py {command}" local(cmd) @@ -49,9 +48,7 @@ def tests(test_path=""): """ local( "docker-compose exec backend " - "python manage.py test {} --parallel --noinput".format( - test_path - ) + "python manage.py test {} --parallel --noinput".format(test_path) ) diff --git a/get_version.py b/get_version.py index 3642f3971f..2dc0d16254 100755 --- a/get_version.py +++ b/get_version.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 import json -with open("frontend/package.json", "r") as f: +with open("frontend/package.json") as f: json_dict = json.load(f) frontend_version = json_dict["version"] print(frontend_version) diff --git a/update_version.py b/update_version.py index 36352cf97c..e4b581bec4 100755 --- a/update_version.py +++ b/update_version.py @@ -1,10 +1,11 @@ #!/usr/bin/env python3 -from tomlkit.api import loads import json -with open("backend/pyproject.toml", "r") as f: +from tomlkit.api import loads + +with open("backend/pyproject.toml") as f: toml_dict = loads(f.read()) -with open("frontend/package.json", "r") as f: +with open("frontend/package.json") as f: json_dict = json.load(f) backend_version = toml_dict["tool"]["poetry"]["version"] frontend_version = json_dict["version"] From 9bb1a0af98f92ca60ee8c598a6752827ddca6878 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Wed, 2 Feb 2022 13:36:02 +0000 Subject: [PATCH 02/24] Updated admin --- .../hct_mis_api/apps/administration/admin.py | 2 +- .../apps/cash_assist_datahub/admin.py | 28 +- backend/hct_mis_api/apps/core/admin.py | 134 +-- backend/hct_mis_api/apps/erp_datahub/admin.py | 4 +- backend/hct_mis_api/apps/grievance/admin.py | 5 +- backend/hct_mis_api/apps/household/admin.py | 158 +--- backend/hct_mis_api/apps/mis_datahub/admin.py | 25 +- backend/hct_mis_api/apps/payment/admin.py | 2 +- backend/hct_mis_api/apps/program/admin.py | 4 +- .../apps/registration_datahub/admin.py | 10 +- backend/hct_mis_api/apps/targeting/admin.py | 4 +- backend/hct_mis_api/settings/base.py | 22 +- backend/poetry.lock | 893 +++++++++++------- backend/pyproject.toml | 14 +- 14 files changed, 667 insertions(+), 638 deletions(-) diff --git a/backend/hct_mis_api/apps/administration/admin.py b/backend/hct_mis_api/apps/administration/admin.py index 8bba37679b..0b0bef6379 100644 --- a/backend/hct_mis_api/apps/administration/admin.py +++ b/backend/hct_mis_api/apps/administration/admin.py @@ -22,7 +22,7 @@ class LogEntryAdmin(AdminAdvancedFiltersMixin, SmartLogEntryAdmin): list_filter = ( ("user", AutoCompleteFilter), ("content_type", AutoCompleteFilter), - ForeignKeyFieldFilter.factory("object_id"), + ("object_id", ForeignKeyFieldFilter.factory(title="Object Id")), "action_time", "action_flag", ) diff --git a/backend/hct_mis_api/apps/cash_assist_datahub/admin.py b/backend/hct_mis_api/apps/cash_assist_datahub/admin.py index ba4df6107d..d53c59cd98 100644 --- a/backend/hct_mis_api/apps/cash_assist_datahub/admin.py +++ b/backend/hct_mis_api/apps/cash_assist_datahub/admin.py @@ -207,8 +207,8 @@ class CashPlanAdmin(ExtraUrlMixin, HOPEModelAdminBase): list_display = ("session", "name", "status", "business_area", "cash_plan_id") list_filter = ( "status", - TextFieldFilter.factory("cash_plan_id"), - TextFieldFilter.factory("session__id"), + ("cash_plan_id", TextFieldFilter.factory(title="Cash plan Id")), + ("session__id", TextFieldFilter.factory(title="Session Id")), BusinessAreaFilter, ) date_hierarchy = "session__timestamp" @@ -233,9 +233,9 @@ class PaymentRecordAdmin(ExtraUrlMixin, admin.ModelAdmin): "status", "delivery_type", "service_provider_ca_id", - TextFieldFilter.factory("ca_id"), - TextFieldFilter.factory("cash_plan_ca_id"), - TextFieldFilter.factory("session__id"), + ("ca_id", TextFieldFilter.factory(title="CA Id")), + ("cash_plan_ca_id", TextFieldFilter.factory(title="Cash plan Ca Id")), + ("session__id", TextFieldFilter.factory(title="Session Id")), BusinessAreaFilter, ) @@ -280,7 +280,7 @@ class ServiceProviderAdmin(HOPEModelAdminBase): raw_id_fields = ("session",) date_hierarchy = "session__timestamp" search_fields = ("full_name",) - list_filter = (TextFieldFilter.factory("session__id"), BusinessAreaFilter) + list_filter = (("session__id", TextFieldFilter.factory(title="Session Id")), BusinessAreaFilter) @admin.register(Programme) @@ -289,10 +289,10 @@ class ProgrammeAdmin(HOPEModelAdminBase): raw_id_fields = ("session",) date_hierarchy = "session__timestamp" list_filter = ( - TextFieldFilter.factory("session__id"), - TextFieldFilter.factory("ca_hash_id"), - TextFieldFilter.factory("mis_id"), - TextFieldFilter.factory("ca_id"), + ("session__id", TextFieldFilter.factory(title="Session Id")), + ("ca_hash_id", TextFieldFilter.factory(title="Ca hash Id")), + ("mis_id", TextFieldFilter.factory(title="MIS Id")), + ("ca_id", TextFieldFilter.factory(title="CA Id")), ) @@ -302,8 +302,8 @@ class TargetPopulationAdmin(HOPEModelAdminBase): raw_id_fields = ("session",) date_hierarchy = "session__timestamp" list_filter = ( - TextFieldFilter.factory("session__id"), - TextFieldFilter.factory("ca_hash_id"), - TextFieldFilter.factory("mis_id"), - TextFieldFilter.factory("ca_id"), + ("session__id", TextFieldFilter.factory(title="Session Id")), + ("ca_hash_id", TextFieldFilter.factory(title="Ca hash Id")), + ("mis_id", TextFieldFilter.factory(title="MIS Id")), + ("ca_id", TextFieldFilter.factory(title="CA Id")), ) diff --git a/backend/hct_mis_api/apps/core/admin.py b/backend/hct_mis_api/apps/core/admin.py index 900fb9b357..e4dd9fe791 100644 --- a/backend/hct_mis_api/apps/core/admin.py +++ b/backend/hct_mis_api/apps/core/admin.py @@ -72,9 +72,7 @@ class TestRapidproForm(forms.Form): label="Phone number", required=True, ) - flow_name = forms.CharField( - label="Name of the test flow", initial="Test", required=True - ) + flow_name = forms.CharField(label="Name of the test flow", initial="Test", required=True) class BusinessOfficeCodeValidator(RegexValidator): @@ -199,11 +197,7 @@ def split_business_area(self, request, pk): def _get_doap_matrix(self, obj): matrix = [] - ca_roles = ( - Role.objects.filter(subsystem=Role.CA) - .order_by("name") - .values_list("name", flat=True) - ) + ca_roles = Role.objects.filter(subsystem=Role.CA).order_by("name").values_list("name", flat=True) fields = ["org", "Last Name", "First Name", "Email", "Action"] + list(ca_roles) matrix.append(fields) all_user_data = {} @@ -211,9 +205,7 @@ def _get_doap_matrix(self, obj): user_data = {} if member.user.pk not in all_user_data: user_roles = list( - member.user.user_roles.filter(role__subsystem="CA").values_list( - "role__name", flat=True - ) + member.user.user_roles.filter(role__subsystem="CA").values_list("role__name", flat=True) ) user_data["org"] = member.user.partner.name user_data["Last Name"] = member.user.last_name @@ -226,11 +218,7 @@ def _get_doap_matrix(self, obj): # user_data["user_roles"] = user_roles all_user_data[member.user.pk] = user_data - values = { - key: value - for (key, value) in user_data.items() - if key not in ["action"] - } + values = {key: value for (key, value) in user_data.items() if key not in ["action"]} signature = str(hash(frozenset(values.items()))) user_data["signature"] = signature @@ -259,9 +247,7 @@ def force_sync_doap(self, request, pk): matrix = self._get_doap_matrix(obj) for row in matrix[1:]: User.objects.filter(email=row["Email"]).update(doap_hash=row["signature"]) - return HttpResponseRedirect( - reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk]) - ) + return HttpResponseRedirect(reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk])) @button(label="Send DOAP", group="doap") def send_doap(self, request, pk): @@ -274,9 +260,7 @@ def send_doap(self, request, pk): writer.writeheader() for row in matrix[1:]: writer.writerow(row) - recipients = [request.user.email] + config.CASHASSIST_DOAP_RECIPIENT.split( - ";" - ) + recipients = [request.user.email] + config.CASHASSIST_DOAP_RECIPIENT.split(";") self.log_change(request, obj, f'DOAP sent to {", ".join(recipients)}') buffer.seek(0) mail = EmailMessage( @@ -290,21 +274,15 @@ def send_doap(self, request, pk): if row["Action"] == "REMOVE": User.objects.filter(email=row["Email"]).update(doap_hash="") else: - User.objects.filter(email=row["Email"]).update( - doap_hash=row["signature"] - ) + User.objects.filter(email=row["Email"]).update(doap_hash=row["signature"]) obj.custom_fields.update({"hope": {"last_doap_sync": str(timezone.now())}}) obj.save() - self.message_user( - request, f'Email sent to {", ".join(recipients)}', messages.SUCCESS - ) + self.message_user(request, f'Email sent to {", ".join(recipients)}', messages.SUCCESS) except Exception as e: logger.exception(e) self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) - return HttpResponseRedirect( - reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk]) - ) + return HttpResponseRedirect(reverse("admin:core_businessarea_view_ca_doap", args=[obj.pk])) @button(label="Export DOAP", group="doap", permission="can_export_doap") def export_doap(self, request, pk): @@ -364,9 +342,7 @@ def _test_rapidpro_connection(self, request, pk): context["phone_number"] = phone_number context["flow_name"] = flow_name - error, response = api.test_connection_start_flow( - flow_name, phone_number - ) + error, response = api.test_connection_start_flow(flow_name, phone_number) if response: context["flow_uuid"] = response["flow"]["uuid"] context["flow_status"] = response["status"] @@ -377,9 +353,7 @@ def _test_rapidpro_connection(self, request, pk): else: messages.success(request, "Connection successful") except Exception as e: - self.message_user( - request, f"{e.__class__.__name__}: {e}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) context["form"] = form return TemplateResponse(request, "core/test_rapidpro.html", context) @@ -399,9 +373,7 @@ def mark_submissions(self, request, pk): except Exception as e: logger.exception(e) self.message_user(request, str(e), messages.ERROR) - return HttpResponseRedirect( - reverse("admin:core_businessarea_change", args=[business_area.id]) - ) + return HttpResponseRedirect(reverse("admin:core_businessarea_change", args=[business_area.id])) else: return _confirm_action( self, @@ -422,9 +394,7 @@ class CountryFilter(SimpleListFilter): parameter_name = "country" def lookups(self, request, model_admin): - return AdminArea.objects.filter(admin_area_level__admin_level=0).values_list( - "id", "title" - ) + return AdminArea.objects.filter(admin_area_level__admin_level=0).values_list("id", "title") def value(self): return self.used_parameters.get(self.parameter_name) @@ -488,42 +458,30 @@ def load_from_datamart(self, request): logger.exception(e) if admin_areas_country_name: for admin_area, country_name in admin_areas_country_name: - AdminAreaLevel.objects.filter(country_name=country_name).update( - country=admin_area - ) + AdminAreaLevel.objects.filter(country_name=country_name).update(country=admin_area) class LoadAdminAreaForm(forms.Form): # country = forms.ChoiceField(choices=AdminAreaLevel.objects.get_countries()) - country = forms.ModelChoiceField( - queryset=AdminAreaLevel.objects.filter(admin_level=0).order_by("country_name") - ) + country = forms.ModelChoiceField(queryset=AdminAreaLevel.objects.filter(admin_level=0).order_by("country_name")) geometries = forms.BooleanField(required=False) run_in_background = forms.BooleanField(required=False) page_size = forms.IntegerField(required=True, validators=[lambda x: x >= 1]) - max_records = forms.IntegerField( - required=False, help_text="Leave blank for all records" - ) + max_records = forms.IntegerField(required=False, help_text="Leave blank for all records") - skip_rebuild = forms.BooleanField( - required=False, help_text="Do not rebuild MPTT tree" - ) + skip_rebuild = forms.BooleanField(required=False, help_text="Do not rebuild MPTT tree") class ExportLocationsForm(forms.Form): country = forms.ModelChoiceField( - queryset=AdminArea.objects.filter(admin_area_level__admin_level=0).order_by( - "title" - ) + queryset=AdminArea.objects.filter(admin_area_level__admin_level=0).order_by("title") ) class ImportAreaForm(forms.Form): # country = forms.ChoiceField(choices=AdminAreaLevel.objects.get_countries()) - country = forms.ModelChoiceField( - queryset=AdminArea.objects.filter(admin_area_level__admin_level=0) - ) + country = forms.ModelChoiceField(queryset=AdminArea.objects.filter(admin_area_level__admin_level=0)) file = forms.FileField() @@ -542,8 +500,8 @@ class AdminAreaAdmin(ExtraUrlMixin, MPTTModelAdmin): list_filter = ( AdminLevelFilter, CountryFilter, - TextFieldFilter.factory("tree_id"), - TextFieldFilter.factory("external_id"), + ("tree_id", TextFieldFilter.factory(title="Tree Id")), + ("external_id", TextFieldFilter.factory(title="External Id")), ) @button(permission=lambda r, __: r.user.is_superuser) @@ -551,9 +509,7 @@ def rebuild_tree(self, request): try: AdminArea.objects.rebuild() except Exception as e: - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) @button(permission="core.import_from_csv") def import_file(self, request): @@ -568,15 +524,10 @@ def import_file(self, request): csv_file = form.cleaned_data["file"] # If file is too large if csv_file.multiple_chunks(): - raise Exception( - "Uploaded file is too big (%.2f MB)" - % (csv_file.size(1000 * 1000)) - ) + raise Exception("Uploaded file is too big (%.2f MB)" % (csv_file.size(1000 * 1000))) data_set = csv_file.read().decode("utf-8-sig").splitlines() - reader = csv.DictReader( - data_set, quoting=csv.QUOTE_NONE, delimiter=";" - ) + reader = csv.DictReader(data_set, quoting=csv.QUOTE_NONE, delimiter=";") provided = set(reader.fieldnames) minimum_set = { "area_code", @@ -585,9 +536,7 @@ def import_file(self, request): "area_name", } if not minimum_set.issubset(provided): - raise Exception( - f"Invalid columns {reader.fieldnames}. {provided.difference(minimum_set)}" - ) + raise Exception(f"Invalid columns {reader.fieldnames}. {provided.difference(minimum_set)}") lines = [] infos = {"skipped": 0} # country = form.cleaned_data['country'] @@ -613,9 +562,7 @@ def import_file(self, request): p_code=row["parent_area_code"], ).first() if parent is None: - assert ( - level_number == 0 - ), f"Cannot find parent area for {row}" + assert level_number == 0, f"Cannot find parent area for {row}" AdminArea.objects.create( external_id=external_id, title=row["area_name"], @@ -643,9 +590,7 @@ def import_file(self, request): except Exception as e: logger.exception(e) context["form"] = form - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) else: context["form"] = form @@ -687,9 +632,7 @@ def load_from_datamart(self, request): except Exception as e: logger.exception(e) context["form"] = form - self.message_user( - request, f"{e.__class__.__name__}: {e}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) else: context["form"] = form @@ -711,9 +654,7 @@ def export_locations(self, request): except Exception as e: logger.exception(e) context["form"] = form - self.message_user( - request, f"{e.__class__.__name__}: {e}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {e}", messages.ERROR) else: context["form"] = form return TemplateResponse(request, "core/admin/export_locations.html", context) @@ -864,14 +805,9 @@ def add_view(self, request, form_url="", extra_context=None): "survey_sheet": wb.sheet_by_name("survey"), "choices_sheet": wb.sheet_by_name("choices"), } - validation_errors = KoboTemplateValidator.validate_kobo_template( - **sheets - ) + validation_errors = KoboTemplateValidator.validate_kobo_template(**sheets) if validation_errors: - errors = [ - f"Field: {error['field']} - {error['message']}" - for error in validation_errors - ] + errors = [f"Field: {error['field']} - {error['message']}" for error in validation_errors] form.add_error(field=None, error=errors) except ValidationError as validation_error: logger.exception(validation_error) @@ -902,14 +838,10 @@ def add_view(self, request, form_url="", extra_context=None): return TemplateResponse(request, "core/xls_form.html", payload) def change_view(self, request, object_id=None, form_url="", extra_context=None): - extra_context = dict( - show_save=False, show_save_and_continue=False, show_delete=True - ) + extra_context = dict(show_save=False, show_save_and_continue=False, show_delete=True) has_add_permission = self.has_add_permission self.has_add_permission = lambda __: False - template_response = super().change_view( - request, object_id, form_url, extra_context - ) + template_response = super().change_view(request, object_id, form_url, extra_context) self.has_add_permission = has_add_permission return template_response diff --git a/backend/hct_mis_api/apps/erp_datahub/admin.py b/backend/hct_mis_api/apps/erp_datahub/admin.py index 1e8c79b706..cf6306f7a5 100644 --- a/backend/hct_mis_api/apps/erp_datahub/admin.py +++ b/backend/hct_mis_api/apps/erp_datahub/admin.py @@ -109,7 +109,7 @@ class FundsCommitmentAdmin(ExtraUrlMixin, HOPEModelAdminBase): SplitBusinessAreaFilter, "mis_sync_date", "ca_sync_date", - TextFieldFilter.factory("business_area"), + ("business_area", TextFieldFilter.factory(title="Business Area")), ) date_hierarchy = "create_date" form = FundsCommitmentAddForm @@ -201,7 +201,7 @@ class DownPaymentAdmin(ExtraUrlMixin, HOPEModelAdminBase): list_filter = ( "mis_sync_date", "ca_sync_date", - TextFieldFilter.factory("business_area"), + ("business_area", TextFieldFilter.factory(title="Business Area")), ) form = DownPaymentAddForm date_hierarchy = "create_date" diff --git a/backend/hct_mis_api/apps/grievance/admin.py b/backend/hct_mis_api/apps/grievance/admin.py index 63e1103bd6..2f029bea9d 100644 --- a/backend/hct_mis_api/apps/grievance/admin.py +++ b/backend/hct_mis_api/apps/grievance/admin.py @@ -50,9 +50,8 @@ class GrievanceTicketAdmin(LinkedObjectsMixin, ExtraUrlMixin, AdminAdvancedFilte ("registration_data_import", AutoCompleteFilter), ("created_by", AutoCompleteFilter), ("assigned_to", AutoCompleteFilter), - # TextFieldFilter.factory("created_by__username__istartswith"), - # TextFieldFilter.factory("created_by__username__istartswith"), - # TextFieldFilter.factory("assigned_to__username__istartswith"), + # ("created_by__username__istartswith", TextFieldFilter.factory(title="Create by")), + # ("assigned_to__username__istartswith", TextFieldFilter.factory(title="Assigned to")), "updated_at", ) advanced_filter_fields = ( diff --git a/backend/hct_mis_api/apps/household/admin.py b/backend/hct_mis_api/apps/household/admin.py index bac91c0ca9..6c8f3184ae 100644 --- a/backend/hct_mis_api/apps/household/admin.py +++ b/backend/hct_mis_api/apps/household/admin.py @@ -123,9 +123,9 @@ class HouseholdAdmin( "size", ) list_filter = ( - MultiValueTextFieldFilter.factory("unicef_id", "UNICEF ID"), - MultiValueTextFieldFilter.factory("unhcr_id", "UNHCR ID"), - MultiValueTextFieldFilter.factory("id", "MIS ID"), + ("unicef_id", MultiValueTextFieldFilter.factory(title="UNICEF ID")), + ("unhcr_id", MultiValueTextFieldFilter.factory(title="UNHCR ID")), + ("id", MultiValueTextFieldFilter.factory(title="MIS ID")), # ("country", ChoicesFieldComboFilter), ("business_area", AutoCompleteFilter), ("size", MaxMinFilter), @@ -186,13 +186,9 @@ def withdrawn(self, request, pk): context["status"] = new_withdrawn_status tickets = GrievanceTicket.objects.belong_household(obj) if obj.withdrawn: - tickets = filter( - lambda t: t.ticket.extras.get("status_before_withdrawn", False), tickets - ) + tickets = filter(lambda t: t.ticket.extras.get("status_before_withdrawn", False), tickets) else: - tickets = filter( - lambda t: t.ticket.status != GrievanceTicket.STATUS_CLOSED, tickets - ) + tickets = filter(lambda t: t.ticket.status != GrievanceTicket.STATUS_CLOSED, tickets) context["tickets"] = tickets if request.method == "POST": @@ -207,17 +203,13 @@ def withdrawn(self, request, pk): message = "{} has been restored" obj.withdrawn = withdrawn withdrawns = list(obj.individuals.values_list("id", flat=True)) - for ind in Individual.objects.filter( - id__in=withdrawns, duplicate=False - ): + for ind in Individual.objects.filter(id__in=withdrawns, duplicate=False): ind.withdrawn = withdrawn ind.save() self.log_change(request, ind, message.format("Individual")) for tkt in context["tickets"]: if withdrawn: - tkt.ticket.extras[ - "status_before_withdrawn" - ] = tkt.ticket.status + tkt.ticket.extras["status_before_withdrawn"] = tkt.ticket.status tkt.ticket.status = GrievanceTicket.STATUS_CLOSED self.log_change( request, @@ -226,9 +218,7 @@ def withdrawn(self, request, pk): ) else: if tkt.ticket.extras.get("status_before_withdrawn"): - tkt.ticket.status = tkt.ticket.extras[ - "status_before_withdrawn" - ] + tkt.ticket.status = tkt.ticket.extras["status_before_withdrawn"] tkt.ticket.extras["status_before_withdrawn"] = "" self.log_change( request, @@ -243,23 +233,17 @@ def withdrawn(self, request, pk): except Exception as e: self.message_user(request, str(e), messages.ERROR) - return TemplateResponse( - request, "admin/household/household/withdrawn.html", context - ) + return TemplateResponse(request, "admin/household/household/withdrawn.html", context) @button() def tickets(self, request, pk): context = self.get_common_context(request, pk, title="Tickets") obj = context["original"] tickets = [] - for entry in chain( - obj.sensitive_ticket_details.all(), obj.complaint_ticket_details.all() - ): + for entry in chain(obj.sensitive_ticket_details.all(), obj.complaint_ticket_details.all()): tickets.append(entry.ticket) context["tickets"] = tickets - return TemplateResponse( - request, "admin/household/household/tickets.html", context - ) + return TemplateResponse(request, "admin/household/household/tickets.html", context) @button() def members(self, request, pk): @@ -276,15 +260,11 @@ def sanity_check(self, request, pk): primary = None head = None try: - primary = IndividualRoleInHousehold.objects.get( - household=hh, role=ROLE_PRIMARY - ) + primary = IndividualRoleInHousehold.objects.get(household=hh, role=ROLE_PRIMARY) except IndividualRoleInHousehold.DoesNotExist: warnings.append([messages.ERROR, "Head of househould not found"]) - alternate = IndividualRoleInHousehold.objects.filter( - household=hh, role=ROLE_ALTERNATE - ).first() + alternate = IndividualRoleInHousehold.objects.filter(household=hh, role=ROLE_ALTERNATE).first() try: head = hh.individuals.get(relationship=HEAD) except IndividualRoleInHousehold.DoesNotExist: @@ -296,12 +276,8 @@ def sanity_check(self, request, pk): field = f"{gender}_age_group_{num_range}_count" total_in_ranges += getattr(hh, field, 0) or 0 - active_individuals = hh.individuals.exclude( - Q(duplicate=True) | Q(withdrawn=True) - ) - ghosts_individuals = hh.individuals.filter( - Q(duplicate=True) | Q(withdrawn=True) - ) + active_individuals = hh.individuals.exclude(Q(duplicate=True) | Q(withdrawn=True)) + ghosts_individuals = hh.individuals.filter(Q(duplicate=True) | Q(withdrawn=True)) all_individuals = hh.individuals.all() if hh.collect_individual_data: if active_individuals.count() != hh.size: @@ -309,9 +285,7 @@ def sanity_check(self, request, pk): else: if all_individuals.count() > 1: - warnings.append( - [messages.ERROR, "Individual data not collected but members found"] - ) + warnings.append([messages.ERROR, "Individual data not collected but members found"]) if hh.size != total_in_ranges: warnings.append( @@ -337,9 +311,7 @@ def sanity_check(self, request, pk): "alternate": alternate, "warnings": [(DEFAULT_TAGS[w[0]], w[1]) for w in warnings], } - return TemplateResponse( - request, "admin/household/household/sanity_check.html", context - ) + return TemplateResponse(request, "admin/household/household/sanity_check.html", context) class IndividualRoleInHouseholdInline(TabularInline): @@ -387,8 +359,8 @@ class IndividualAdmin( exclude = ("created_at", "updated_at") inlines = [IndividualRoleInHouseholdInline] list_filter = ( - TextFieldFilter.factory("unicef_id__iexact", "UNICEF ID"), - TextFieldFilter.factory("household__unicef_id__iexact", "Household ID"), + ("unicef_id__iexact", TextFieldFilter.factory(title="UNICEF ID")), + ("household__unicef_id__iexact", TextFieldFilter.factory(title="Household ID")), ("deduplication_golden_record_status", ChoicesFieldComboFilter), ("deduplication_batch_status", ChoicesFieldComboFilter), ("business_area", AutoCompleteFilter), @@ -453,9 +425,7 @@ def formfield_for_dbfield(self, db_field, request, **kwargs): def household_members(self, request, pk): obj = Individual.objects.get(pk=pk) url = reverse("admin:household_individual_changelist") - return HttpResponseRedirect( - f"{url}?household|unicef_id|iexact={obj.household.unicef_id}" - ) + return HttpResponseRedirect(f"{url}?household|unicef_id|iexact={obj.household.unicef_id}") @button() def sanity_check(self, request, pk): @@ -464,9 +434,7 @@ def sanity_check(self, request, pk): context["roles"] = obj.households_and_roles.all() context["duplicates"] = Individual.objects.filter(unicef_id=obj.unicef_id) - return TemplateResponse( - request, "admin/household/individual/sanity_check.html", context - ) + return TemplateResponse(request, "admin/household/individual/sanity_check.html", context) @admin.register(IndividualRoleInHousehold) @@ -492,8 +460,8 @@ class EntitlementCardAdmin(ExtraUrlMixin, HOPEModelAdminBase): raw_id_fields = ("household",) list_filter = ( "status", - TextFieldFilter.factory("card_type"), - TextFieldFilter.factory("service_provider"), + ("card_type", TextFieldFilter.factory(title="Card Type")), + ("service_provider", TextFieldFilter.factory(title="Service Provider")), ) @@ -528,9 +496,7 @@ def xlsx_update_stage2(self, request, old_form): title="Update Individual by xlsx", form=UpdateByXlsxStage1Form(), ) - return TemplateResponse( - request, "admin/household/individual/xlsx_update.html", context - ) + return TemplateResponse(request, "admin/household/individual/xlsx_update.html", context) context = self.get_common_context( request, @@ -540,15 +506,11 @@ def xlsx_update_stage2(self, request, old_form): initial={"xlsx_update_file": xlsx_update_file}, ), ) - return TemplateResponse( - request, "admin/household/individual/xlsx_update_stage2.html", context - ) + return TemplateResponse(request, "admin/household/individual/xlsx_update_stage2.html", context) def xlsx_update_stage3(self, request, old_form): xlsx_update_file = old_form.cleaned_data["xlsx_update_file"] - xlsx_update_file.xlsx_match_columns = old_form.cleaned_data[ - "xlsx_match_columns" - ] + xlsx_update_file.xlsx_match_columns = old_form.cleaned_data["xlsx_match_columns"] xlsx_update_file.save() updater = IndividualXlsxUpdate(xlsx_update_file) report = updater.get_matching_report() @@ -556,15 +518,11 @@ def xlsx_update_stage3(self, request, old_form): request, title="Update Individual by xlsx Report", unique_report_rows=report[IndividualXlsxUpdate.STATUS_UNIQUE], - multiple_match_report_rows=report[ - IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH - ], + multiple_match_report_rows=report[IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH], no_match_report_rows=report[IndividualXlsxUpdate.STATUS_NO_MATCH], xlsx_update_file=xlsx_update_file.id, ) - return TemplateResponse( - request, "admin/household/individual/xlsx_update_stage3.html", context - ) + return TemplateResponse(request, "admin/household/individual/xlsx_update_stage3.html", context) def add_view(self, request, form_url="", extra_context=None): return self.xlsx_update(request) @@ -572,52 +530,30 @@ def add_view(self, request, form_url="", extra_context=None): def xlsx_update(self, request): if request.method == "GET": form = UpdateByXlsxStage1Form() - form.fields["registration_data_import"].widget = AutocompleteWidget( - RegistrationDataImport, self.admin_site - ) - form.fields["business_area"].widget = AutocompleteWidget( - BusinessArea, self.admin_site - ) - context = self.get_common_context( - request, title="Update Individual by xlsx", form=form - ) + form.fields["registration_data_import"].widget = AutocompleteWidget(RegistrationDataImport, self.admin_site) + form.fields["business_area"].widget = AutocompleteWidget(BusinessArea, self.admin_site) + context = self.get_common_context(request, title="Update Individual by xlsx", form=form) elif request.POST.get("stage") == "2": form = UpdateByXlsxStage1Form(request.POST, request.FILES) - context = self.get_common_context( - request, title="Update Individual by xlsx", form=form - ) + context = self.get_common_context(request, title="Update Individual by xlsx", form=form) if form.is_valid(): try: return self.xlsx_update_stage2(request, form) except Exception as e: - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) - return TemplateResponse( - request, "admin/household/individual/xlsx_update.html", context - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + return TemplateResponse(request, "admin/household/individual/xlsx_update.html", context) elif request.POST.get("stage") == "3": - xlsx_update_file = XlsxUpdateFile.objects.get( - pk=request.POST["xlsx_update_file"] - ) + xlsx_update_file = XlsxUpdateFile.objects.get(pk=request.POST["xlsx_update_file"]) updater = IndividualXlsxUpdate(xlsx_update_file) - form = UpdateByXlsxStage2Form( - request.POST, request.FILES, xlsx_columns=updater.columns_names - ) - context = self.get_common_context( - request, title="Update Individual by xlsx", form=form - ) + form = UpdateByXlsxStage2Form(request.POST, request.FILES, xlsx_columns=updater.columns_names) + context = self.get_common_context(request, title="Update Individual by xlsx", form=form) if form.is_valid(): try: return self.xlsx_update_stage3(request, form) except Exception as e: - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) - return TemplateResponse( - request, "admin/household/individual/xlsx_update_stage2.html", context - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) + return TemplateResponse(request, "admin/household/individual/xlsx_update_stage2.html", context) elif request.POST.get("stage") == "4": xlsx_update_file_id = request.POST.get("xlsx_update_file") @@ -627,21 +563,15 @@ def xlsx_update(self, request): with transaction.atomic(): updater.update_individuals() self.message_user(request, "Done", messages.SUCCESS) - return HttpResponseRedirect( - reverse("admin:household_individual_changelist") - ) + return HttpResponseRedirect(reverse("admin:household_individual_changelist")) except Exception as e: - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) report = updater.report_dict context = self.get_common_context( request, title="Update Individual by xlsx Report", unique_report_rows=report[IndividualXlsxUpdate.STATUS_UNIQUE], - multiple_match_report_rows=report[ - IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH - ], + multiple_match_report_rows=report[IndividualXlsxUpdate.STATUS_MULTIPLE_MATCH], no_match_report_rows=report[IndividualXlsxUpdate.STATUS_NO_MATCH], xlsx_update_file=xlsx_update_file.id, ) @@ -651,6 +581,4 @@ def xlsx_update(self, request): context, ) - return TemplateResponse( - request, "admin/household/individual/xlsx_update.html", context - ) + return TemplateResponse(request, "admin/household/individual/xlsx_update.html", context) diff --git a/backend/hct_mis_api/apps/mis_datahub/admin.py b/backend/hct_mis_api/apps/mis_datahub/admin.py index 0204bf4d92..fb3a18941a 100644 --- a/backend/hct_mis_api/apps/mis_datahub/admin.py +++ b/backend/hct_mis_api/apps/mis_datahub/admin.py @@ -80,7 +80,10 @@ def truncate(self, request): @admin.register(Household) class HouseholdAdmin(HUBAdminMixin): - list_filter = (TextFieldFilter.factory("session__id"), TextFieldFilter.factory("business_area")) + list_filter = ( + ("session__id", TextFieldFilter.factory(title="Session Id")), + ("business_area", TextFieldFilter.factory(title="Business Area")), + ) raw_id_fields = ("session",) @href() @@ -106,10 +109,10 @@ class IndividualAdmin(HUBAdminMixin): list_display = ("session", "unicef_id", "mis_id", "household_mis_id", "family_name", "given_name") list_filter = ( BusinessAreaFilter, - TextFieldFilter.factory("session__id"), - TextFieldFilter.factory("unicef_id"), - TextFieldFilter.factory("mis_id"), - TextFieldFilter.factory("household_mis_id"), + ("session__id", TextFieldFilter.factory(title="Session Id")), + ("unicef_id", TextFieldFilter.factory(title="Unicef Id")), + ("mis_id", TextFieldFilter.factory(title="MIS Id")), + ("household_mis_id", TextFieldFilter.factory(title="Household MIS Id")), ) raw_id_fields = ("session",) @@ -133,7 +136,7 @@ class FundsCommitmentAdmin(HUBAdminMixin): class DownPaymentAdmin(HUBAdminMixin): filters = ( BusinessAreaFilter, - TextFieldFilter.factory("rec_serial_number"), + ("rec_serial_number", TextFieldFilter.factory(title="Rec Serial number")), "create_date", "mis_sync_flag", "ca_sync_flag", @@ -142,7 +145,7 @@ class DownPaymentAdmin(HUBAdminMixin): @admin.register(IndividualRoleInHousehold) class IndividualRoleInHouseholdAdmin(HUBAdminMixin): - list_filter = (TextFieldFilter.factory("session__id"),) + list_filter = (("session__id", TextFieldFilter.factory(title="Session Id")),) @admin.register(Session) @@ -240,14 +243,14 @@ def reset_sync_date(self, request, pk): @admin.register(TargetPopulationEntry) class TargetPopulationEntryAdmin(HUBAdminMixin): - list_filter = (TextFieldFilter.factory("session__id"),) + list_filter = (("session__id", TextFieldFilter.factory(title="Session Id")),) raw_id_fields = ("session",) @admin.register(TargetPopulation) class TargetPopulationAdmin(HUBAdminMixin): # list_display = ('name', ) - list_filter = (TextFieldFilter.factory("session__id"), BusinessAreaFilter) + list_filter = (("session__id", TextFieldFilter.factory(title="Session Id")), BusinessAreaFilter) raw_id_fields = ("session",) search_fields = ("name",) @@ -276,7 +279,7 @@ def households(self, button): @admin.register(Program) class ProgramAdmin(HUBAdminMixin): - list_filter = (TextFieldFilter.factory("session__id"), BusinessAreaFilter) + list_filter = (("session__id", TextFieldFilter.factory(title="Session Id")), BusinessAreaFilter) search_fields = ("name",) raw_id_fields = ("session",) @@ -284,5 +287,5 @@ class ProgramAdmin(HUBAdminMixin): @admin.register(Document) class DocumentAdmin(HUBAdminMixin): list_display = ("type", "number") - list_filter = (TextFieldFilter.factory("session__id"), BusinessAreaFilter) + list_filter = (("session__id", TextFieldFilter.factory(title="Session Id")), BusinessAreaFilter) raw_id_fields = ("session",) diff --git a/backend/hct_mis_api/apps/payment/admin.py b/backend/hct_mis_api/apps/payment/admin.py index 679f462940..6a9eaa28e1 100644 --- a/backend/hct_mis_api/apps/payment/admin.py +++ b/backend/hct_mis_api/apps/payment/admin.py @@ -109,7 +109,7 @@ class PaymentVerificationAdmin(HOPEModelAdminBase): ("status", ChoicesFieldComboFilter), ("cash_plan_payment_verification__cash_plan", AutoCompleteFilter), ("cash_plan_payment_verification__cash_plan__business_area", AutoCompleteFilter), - TextFieldFilter.factory("payment_record__household__unicef_id", "Household ID"), + ("payment_record__household__unicef_id", TextFieldFilter.factory(title="Household ID")), ) date_hierarchy = "updated_at" raw_id_fields = ("payment_record", "cash_plan_payment_verification") diff --git a/backend/hct_mis_api/apps/program/admin.py b/backend/hct_mis_api/apps/program/admin.py index 31ef16eb8f..2af1b06ac2 100644 --- a/backend/hct_mis_api/apps/program/admin.py +++ b/backend/hct_mis_api/apps/program/admin.py @@ -39,8 +39,8 @@ class CashPlanAdmin(ExtraUrlMixin, HOPEModelAdminBase): ("business_area", AutoCompleteFilter), ("delivery_type", ChoicesFieldComboFilter), ("verification_status", ChoicesFieldComboFilter), - TextFieldFilter.factory("program__id", "Program ID"), - TextFieldFilter.factory("vision_id", "Vision ID"), + ("program__id", TextFieldFilter.factory(title="Program ID")), + ("vision_id", TextFieldFilter.factory(title="Vision ID")), ) raw_id_fields = ("business_area", "program", "service_provider") search_fields = ("name",) diff --git a/backend/hct_mis_api/apps/registration_datahub/admin.py b/backend/hct_mis_api/apps/registration_datahub/admin.py index 666e742d51..b6fcfca0a7 100644 --- a/backend/hct_mis_api/apps/registration_datahub/admin.py +++ b/backend/hct_mis_api/apps/registration_datahub/admin.py @@ -34,7 +34,7 @@ class RegistrationDataImportDatahubAdmin(ExtraUrlMixin, AdminAdvancedFiltersMixi list_filter = ( "created_at", "import_done", - TextFieldFilter.factory("business_area_slug__istartswith"), + ("business_area_slug__istartswith", TextFieldFilter.factory(title="Business area slug")), ) advanced_filter_fields = ( "created_at", @@ -162,8 +162,8 @@ class ImportedIndividualAdmin(ExtraUrlMixin, HOPEModelAdminBase): list_filter = ( ("deduplication_batch_results", ScoreFilter), ("deduplication_golden_record_results", ScoreFilter), - TextFieldFilter.factory("registration_data_import__name__istartswith"), - TextFieldFilter.factory("individual_id__istartswith"), + ("registration_data_import__name__istartswith", TextFieldFilter.factory(title="Registration data import")), + ("individual_id__istartswith", TextFieldFilter.factory(title="Individual Id")), "deduplication_batch_status", "deduplication_golden_record_status", ) @@ -223,8 +223,8 @@ class ImportedHouseholdAdmin(HOPEModelAdminBase): list_filter = ( ("country", ChoicesFieldComboFilter), ("country_origin", ChoicesFieldComboFilter), - TextFieldFilter.factory("registration_data_import__name__istartswith"), - TextFieldFilter.factory("kobo_submission_uuid__istartswith"), + ("registration_data_import__name__istartswith", TextFieldFilter.factory(title="Registration Data Import Name")), + ("kobo_submission_uuid__istartswith", TextFieldFilter.factory(title="Kobo Submission UUID")), ) diff --git a/backend/hct_mis_api/apps/targeting/admin.py b/backend/hct_mis_api/apps/targeting/admin.py index 766229dcf0..a5cf88e13f 100644 --- a/backend/hct_mis_api/apps/targeting/admin.py +++ b/backend/hct_mis_api/apps/targeting/admin.py @@ -84,9 +84,9 @@ class HouseholdSelectionAdmin(ExtraUrlMixin, HOPEModelAdminBase): "target_population", ) list_filter = ( - TextFieldFilter.factory("household__unicef_id", "Household ID"), + ("household__unicef_id", TextFieldFilter.factory(title="Household ID")), ("target_population", AutoCompleteFilter), - TextFieldFilter.factory("target_population__id", "Target Population ID"), + ("target_population__id", TextFieldFilter.factory(title="Target Population ID")), "final", ("vulnerability_score", MaxMinFilter), ) diff --git a/backend/hct_mis_api/settings/base.py b/backend/hct_mis_api/settings/base.py index 9d63d563ce..b7e170a22b 100644 --- a/backend/hct_mis_api/settings/base.py +++ b/backend/hct_mis_api/settings/base.py @@ -47,7 +47,7 @@ ROOT_URLCONF = "hct_mis_api.urls" DATA_VOLUME = env("DATA_VOLUME") - +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" ALLOWED_EXTENSIONS = ( "pdf", "doc", @@ -284,9 +284,7 @@ # LOGIN_REDIRECT_URL = f'/api/{ADMIN_PANEL_URL}/' AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" - }, + {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, { "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", "OPTIONS": {"min_length": 12}, @@ -319,9 +317,7 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): "version": 1, "disable_existing_loggers": False, "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s" - }, + "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s"}, "verbose": { "format": "[%(asctime)s][%(levelname)s][%(name)s] %(filename)s.%(funcName)s:%(lineno)d %(message)s", }, @@ -376,13 +372,9 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): REDIS_INSTANCE = os.getenv("REDIS_INSTANCE", "redis:6379") if "CACHE_URL" not in os.environ: if REDIS_INSTANCE: - os.environ[ - "CACHE_URL" - ] = f"redis://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" + os.environ["CACHE_URL"] = f"redis://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" else: - os.environ[ - "CACHE_URL" - ] = f"dummycache://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" + os.environ["CACHE_URL"] = f"dummycache://{REDIS_INSTANCE}/1?client_class=django_redis.client.DefaultClient" CACHES = { "default": env.cache(), @@ -701,9 +693,7 @@ def extend_list_avoid_repeats(list_to_extend, extend_with): SMART_ADMIN_BOOKMARKS_PERMISSION = None SMART_ADMIN_PROFILE_LINK = True -SMART_ADMIN_ISROOT = lambda r, *a: r.user.is_superuser and r.headers.get( - "x-root-token" -) == env("ROOT_TOKEN") +SMART_ADMIN_ISROOT = lambda r, *a: r.user.is_superuser and r.headers.get("x-root-token") == env("ROOT_TOKEN") EXCHANGE_RATE_CACHE_EXPIRY = 1 * 60 * 60 * 24 diff --git a/backend/poetry.lock b/backend/poetry.lock index 440b5fb285..c9ba5ec5c4 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,6 +1,6 @@ [[package]] name = "amqp" -version = "5.0.6" +version = "5.0.9" description = "Low-level AMQP client for Python (fork of amqplib)." category = "main" optional = false @@ -41,6 +41,31 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "asgiref" +version = "3.5.0" +description = "ASGI specs, helper code, and adapters" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] + +[[package]] +name = "asttokens" +version = "2.0.5" +description = "Annotate AST trees with source code positions" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + [[package]] name = "azure-common" version = "1.1.27" @@ -141,27 +166,27 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "celery" -version = "5.1.2" +version = "5.2.2" description = "Distributed Task Queue." category = "main" optional = false -python-versions = ">=3.6," +python-versions = ">=3.7," [package.dependencies] billiard = ">=3.6.4.0,<4.0" -click = ">=7.0,<8.0" +click = ">=8.0,<9.0" click-didyoumean = ">=0.0.3" click-plugins = ">=1.1.1" -click-repl = ">=0.1.6" -kombu = ">=5.1.0,<6.0" -pytz = ">0.0-dev" -redis = {version = ">=3.2.0", optional = true, markers = "extra == \"redis\""} +click-repl = ">=0.2.0" +kombu = ">=5.2.2,<6.0" +pytz = ">0.dev.0" +redis = {version = ">=3.4.1,<4.0.0", optional = true, markers = "extra == \"redis\""} vine = ">=5.0.0,<6.0" [package.extras] arangodb = ["pyArango (>=1.3.2)"] auth = ["cryptography"] -azureblockblob = ["azure-storage-blob (==12.6.0)"] +azureblockblob = ["azure-storage-blob (==12.9.0)"] brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] cassandra = ["cassandra-driver (<3.21.0)"] consul = ["python-consul2"] @@ -171,21 +196,21 @@ couchdb = ["pycouchdb"] django = ["Django (>=1.11)"] dynamodb = ["boto3 (>=1.9.178)"] elasticsearch = ["elasticsearch"] -eventlet = ["eventlet (>=0.26.1)"] -gevent = ["gevent (>=1.0.0)"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] librabbitmq = ["librabbitmq (>=1.5.0)"] memcache = ["pylibmc"] -mongodb = ["pymongo[srv] (>=3.3.0)"] +mongodb = ["pymongo[srv] (>=3.3.0,<3.12.1)"] msgpack = ["msgpack"] pymemcache = ["python-memcached"] pyro = ["pyro4"] pytest = ["pytest-celery"] -redis = ["redis (>=3.2.0)"] +redis = ["redis (>=3.4.1,<4.0.0)"] s3 = ["boto3 (>=1.9.125)"] slmq = ["softlayer-messaging (>=1.0.3)"] solar = ["ephem"] sqlalchemy = ["sqlalchemy"] -sqs = ["boto3 (>=1.9.125)", "pycurl (==7.43.0.5)"] +sqs = ["kombu"] tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=1.3.1)"] @@ -212,7 +237,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.7" +version = "2.0.11" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -223,11 +248,14 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "7.1.2" +version = "8.0.3" description = "Composable command line interface toolkit" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "click-didyoumean" @@ -271,13 +299,13 @@ six = "*" name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "cryptography" -version = "35.0.0" +version = "36.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -288,7 +316,7 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] @@ -296,7 +324,7 @@ test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pr [[package]] name = "decorator" -version = "5.1.0" +version = "5.1.1" description = "Decorators for Humans" category = "dev" optional = false @@ -320,18 +348,19 @@ python-versions = ">=2.7" [[package]] name = "django" -version = "2.2.26" +version = "3.2" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] +asgiref = ">=3.3.2,<4" pytz = "*" sqlparse = ">=0.2.2" [package.extras] -argon2 = ["argon2-cffi (>=16.1.0)"] +argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] @@ -348,7 +377,7 @@ test = ["coverage", "factory-boy", "django-webtest", "pdbpp", "pyquery", "pytest [[package]] name = "django-adminactions" -version = "1.13.1" +version = "1.14.0" description = "Collections of useful actions to use with django.contrib.admin.ModelAdmin" category = "main" optional = false @@ -360,12 +389,12 @@ xlrd = ">=0.9.2" xlwt = "*" [package.extras] -dev = ["pdbpp", "virtualenv", "wheel", "check-manifest", "docutils", "check-manifest", "django-dynamic-fixture", "django-webtest (>1.9.6)", "django-admin-extra-urls", "flake8", "flake8-isort", "mock (>=1.0.1)", "modernize", "pillow", "pytest", "pytest-cache", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "readme", "selenium (>=2.42.0)", "setuptools (>=15.0)", "tox"] +dev = ["pdbpp", "virtualenv", "wheel", "check-manifest", "docutils", "django-dynamic-fixture", "django-webtest (>1.9.6)", "django-admin-extra-urls", "flake8", "flake8-isort", "mock (>=1.0.1)", "modernize", "pillow", "pytest", "pytest-cache", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "readme", "selenium (>=2.42.0)", "setuptools (>=15.0)", "tox"] test = ["pytz", "xlrd (>=0.9.2)", "xlwt", "check-manifest", "django-dynamic-fixture", "django-webtest (>1.9.6)", "django-admin-extra-urls", "flake8", "flake8-isort", "mock (>=1.0.1)", "modernize", "pillow", "pytest", "pytest-cache", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "readme", "selenium (>=2.42.0)", "setuptools (>=15.0)", "tox"] [[package]] name = "django-adminfilters" -version = "1.8.0" +version = "1.9.0" description = "Extra filters for django admin site" category = "main" optional = false @@ -377,18 +406,17 @@ test = ["check-manifest", "coverage", "pytest", "pytest-cov", "pytest-django", " [[package]] name = "django-advanced-filters" -version = "1.3.0" +version = "1.4.0" description = "A Django application for advanced admin filters" category = "main" optional = false python-versions = "*" [package.dependencies] -django-braces = ">=1.4.0,<=1.14.0" simplejson = ">=3.6.5,<4" [package.extras] -test = ["coveralls", "factory-boy (==2.12.0)", "pycodestyle (==2.5.0)", "pytest-django (==3.9.0)"] +test = ["coveralls", "factory-boy (==2.12.0)", "pycodestyle (==2.5.0)", "pytest-django (==3.9.0)", "pytest-cov"] [[package]] name = "django-appconf" @@ -416,19 +444,6 @@ python-dateutil = "2.6.0" [package.source] type = "url" url = "https://github.com/Tivix/django-auditlog/archive/refs/heads/master.zip" - -[[package]] -name = "django-braces" -version = "1.14.0" -description = "Reusable, generic mixins for Django" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Django = ">=1.11.0" -six = "*" - [[package]] name = "django-celery-beat" version = "2.2.1" @@ -456,16 +471,16 @@ celery = ">=5.0,<6.0" [[package]] name = "django-compressor" -version = "2.3" +version = "3.1" description = "Compresses linked and inline JavaScript or CSS into single cached files." category = "main" optional = false python-versions = "*" [package.dependencies] -django-appconf = ">=1.0" -rcssmin = "1.0.6" -rjsmin = "1.1.0" +django-appconf = ">=1.0.3" +rcssmin = "1.1.0" +rjsmin = "1.2.0" [[package]] name = "django-concurrency" @@ -528,7 +543,7 @@ six = "*" [[package]] name = "django-environ" -version = "0.8.0" +version = "0.8.1" description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." category = "main" optional = false @@ -552,18 +567,18 @@ six = ">=1.2" [[package]] name = "django-filter" -version = "2.2.0" +version = "2.4.0" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." category = "main" optional = false -python-versions = ">=3.4" +python-versions = ">=3.5" [package.dependencies] -Django = ">=1.11" +Django = ">=2.2" [[package]] name = "django-hijack" -version = "3.0.4" +version = "3.1.4" description = "django-hijack allows superusers to hijack (=login as) and work on behalf of another user." category = "main" optional = false @@ -574,7 +589,7 @@ django = ">=2.2" [[package]] name = "django-import-export" -version = "2.7.0" +version = "2.7.1" description = "Django application and library for importing and exporting data with included admin integration." category = "main" optional = false @@ -595,7 +610,7 @@ python-versions = "*" [[package]] name = "django-jsoneditor" -version = "0.1.6" +version = "0.2.2" description = "Django JSON Editor" category = "main" optional = false @@ -668,7 +683,7 @@ phonenumberslite = ["phonenumberslite (>=7.0.2)"] [[package]] name = "django-redis" -version = "5.0.0" +version = "5.2.0" description = "Full featured redis cache backend for Django." category = "main" optional = false @@ -676,7 +691,10 @@ python-versions = ">=3.6" [package.dependencies] Django = ">=2.2" -redis = ">=3.0.0" +redis = ">=3,<4.0.0 || >4.0.0,<4.0.1 || >4.0.1" + +[package.extras] +hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] [[package]] name = "django-smart-admin" @@ -742,7 +760,7 @@ pytz = "*" [[package]] name = "django-timezone-field" -version = "4.2.1" +version = "4.2.3" description = "A Django app providing database and form fields for pytz timezone objects." category = "main" optional = false @@ -757,7 +775,7 @@ rest_framework = ["djangorestframework (>=3.0.0)"] [[package]] name = "django-webtest" -version = "1.9.8" +version = "1.9.9" description = "Instant integration of Ian Bicking's WebTest (http://docs.pylonsproject.org/projects/webtest/) with Django's testing framework." category = "dev" optional = false @@ -808,6 +826,14 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "executing" +version = "0.8.2" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "factory-boy" version = "2.12.0" @@ -1004,15 +1030,16 @@ ipython = {version = ">=5.1.0", markers = "python_version >= \"3.4\""} [[package]] name = "ipython" -version = "7.28.0" +version = "8.0.1" description = "IPython: Productive Interactive Computing" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" +black = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" jedi = ">=0.16" @@ -1021,10 +1048,11 @@ pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" -traitlets = ">=4.2" +stack-data = "*" +traitlets = ">=5" [package.extras] -all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] +all = ["Sphinx (>=1.3)", "curio", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.19)", "pandas", "pygments", "pytest", "pytest-asyncio", "qtconsole", "testpath", "trio"] doc = ["Sphinx (>=1.3)"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] @@ -1032,7 +1060,8 @@ nbformat = ["nbformat"] notebook = ["notebook", "ipywidgets"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] +test = ["pytest", "pytest-asyncio", "testpath", "pygments"] +test_extra = ["pytest", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "pygments", "trio"] [[package]] name = "jdcal" @@ -1073,29 +1102,29 @@ i18n = ["Babel (>=0.8)"] [[package]] name = "kombu" -version = "5.1.0" +version = "5.2.3" description = "Messaging library for Python." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -amqp = ">=5.0.6,<6.0.0" +amqp = ">=5.0.9,<6.0.0" vine = "*" [package.extras] azureservicebus = ["azure-servicebus (>=7.0.0)"] azurestoragequeues = ["azure-storage-queue"] consul = ["python-consul (>=0.6.0)"] -librabbitmq = ["librabbitmq (>=1.5.2)"] -mongodb = ["pymongo (>=3.3.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=3.3.0,<3.12.1)"] msgpack = ["msgpack"] pyro = ["pyro4"] qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=3.3.11)"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] slmq = ["softlayer-messaging (>=1.0.3)"] sqlalchemy = ["sqlalchemy"] -sqs = ["boto3 (>=1.4.4)", "pycurl (==7.43.0.2)", "urllib3 (<1.26)"] +sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=1.3.1)"] @@ -1136,16 +1165,16 @@ python-versions = "*" [[package]] name = "oauthlib" -version = "3.1.1" +version = "3.2.0" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" category = "main" optional = false python-versions = ">=3.6" [package.extras] -rsa = ["cryptography (>=3.0.0,<4)"] +rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0,<4)", "pyjwt (>=2.0.0,<3)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "odfpy" @@ -1183,14 +1212,14 @@ Pillow = "*" [[package]] name = "packaging" -version = "21.0" +version = "21.3" description = "Core utilities for Python packages" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "parameterized" @@ -1284,7 +1313,7 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.8.0" +version = "5.9.0" description = "Cross-platform lib for process and system monitoring in Python." category = "main" optional = false @@ -1309,6 +1338,17 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +tests = ["pytest"] + [[package]] name = "pycountry" version = "20.7.3" @@ -1319,7 +1359,7 @@ python-versions = "*" [[package]] name = "pycparser" -version = "2.20" +version = "2.21" description = "C parser in Python" category = "main" optional = false @@ -1348,15 +1388,18 @@ test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.7" description = "Python parsing module" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "python-crontab" -version = "2.5.1" +version = "2.6.0" description = "Python Crontab API" category = "main" optional = false @@ -1413,7 +1456,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "rcssmin" -version = "1.0.6" +version = "1.1.0" description = "CSS Minifier" category = "main" optional = false @@ -1432,7 +1475,7 @@ hiredis = ["hiredis (>=0.1.3)"] [[package]] name = "regex" -version = "2021.10.8" +version = "2022.1.18" description = "Alternative regular expression module, to replace re." category = "main" optional = false @@ -1440,7 +1483,7 @@ python-versions = "*" [[package]] name = "requests" -version = "2.26.0" +version = "2.27.1" description = "Python HTTP for Humans." category = "main" optional = false @@ -1474,7 +1517,7 @@ test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.1 [[package]] name = "requests-oauthlib" -version = "1.3.0" +version = "1.3.1" description = "OAuthlib authentication support for Requests." category = "main" optional = false @@ -1505,7 +1548,7 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake [[package]] name = "rjsmin" -version = "1.1.0" +version = "1.2.0" description = "Javascript Minifier" category = "main" optional = false @@ -1549,7 +1592,7 @@ tornado = ["tornado (>=5)"] [[package]] name = "simplejson" -version = "3.17.5" +version = "3.17.6" description = "Simple, fast, extensible JSON encoder/decoder for Python" category = "main" optional = false @@ -1606,7 +1649,7 @@ test = ["six", "pytest (>=3.1.0)", "pytest-cov", "nose", "django (>=1.10.6)"] [[package]] name = "social-auth-app-django" -version = "3.1.0" +version = "3.4.0" description = "Python Social Authentication, Django integration." category = "main" optional = false @@ -1614,11 +1657,11 @@ python-versions = "*" [package.dependencies] six = "*" -social-auth-core = ">=1.2.0" +social-auth-core = ">=3.3.0" [[package]] name = "social-auth-core" -version = "3.3.3" +version = "3.4.0" description = "Python social authentication made simple." category = "main" optional = false @@ -1652,7 +1695,7 @@ python-versions = "*" [[package]] name = "soupsieve" -version = "2.2.1" +version = "2.3.1" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false @@ -1666,13 +1709,29 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "stack-data" +version = "0.1.4" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +asttokens = "*" +executing = "*" +pure-eval = "*" + +[package.extras] +tests = ["pytest", "typeguard", "pygments", "littleutils"] + [[package]] name = "tablib" -version = "3.1.0" +version = "3.2.0" description = "Format agnostic tabular data library (XLS, JSON, YAML, CSV)" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] markuppy = {version = "*", optional = true, markers = "extra == \"html\""} @@ -1718,7 +1777,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "traitlets" -version = "5.1.0" +version = "5.1.1" description = "Traitlets Python configuration system" category = "dev" optional = false @@ -1729,19 +1788,19 @@ test = ["pytest"] [[package]] name = "typed-ast" -version = "1.4.3" +version = "1.5.2" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "3.10.0.2" -description = "Backported and Experimental Type Hints for Python 3.5+" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "unittest-xml-reporting" @@ -1850,7 +1909,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "xlsxwriter" -version = "3.0.1" +version = "3.0.2" description = "A Python module for creating Excel XLSX files." category = "main" optional = false @@ -1891,13 +1950,13 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" -python-versions = "3.9.1" -content-hash = "1d47822eca60f5f6142775df219ca56fd4b9d449c17a76a3c0d6b9576f9f6066" +python-versions = "3.9.10" +content-hash = "fc1a9a2077f38b07f9ed3f79712f23b2e13c462fa94e1a65be642286c7e7f421" [metadata.files] amqp = [ - {file = "amqp-5.0.6-py3-none-any.whl", hash = "sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb"}, - {file = "amqp-5.0.6.tar.gz", hash = "sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2"}, + {file = "amqp-5.0.9-py3-none-any.whl", hash = "sha256:9cd81f7b023fc04bbb108718fbac674f06901b77bfcdce85b10e2a5d0ee91be5"}, + {file = "amqp-5.0.9.tar.gz", hash = "sha256:1e5f707424e544078ca196e72ae6a14887ce74e02bd126be54b7c03c971bef18"}, ] aniso8601 = [ {file = "aniso8601-7.0.0-py2.py3-none-any.whl", hash = "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b"}, @@ -1915,6 +1974,14 @@ argh = [ {file = "argh-0.26.2-py2.py3-none-any.whl", hash = "sha256:a9b3aaa1904eeb78e32394cd46c6f37ac0fb4af6dc488daa58971bdc7d7fcaf3"}, {file = "argh-0.26.2.tar.gz", hash = "sha256:e9535b8c84dc9571a48999094fda7f33e63c3f1b74f3e5f3ac0105a58405bb65"}, ] +asgiref = [ + {file = "asgiref-3.5.0-py3-none-any.whl", hash = "sha256:88d59c13d634dcffe0510be048210188edd79aeccb6a6c9028cdad6f31d730a9"}, + {file = "asgiref-3.5.0.tar.gz", hash = "sha256:2f8abc20f7248433085eda803936d98992f1343ddb022065779f37c5da0181d0"}, +] +asttokens = [ + {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, + {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, +] azure-common = [ {file = "azure-common-1.1.27.zip", hash = "sha256:9f3f5d991023acbd93050cf53c4e863c6973ded7e236c69e99c8ff5c7bad41ef"}, {file = "azure_common-1.1.27-py2.py3-none-any.whl", hash = "sha256:426673962740dbe9aab052a4b52df39c07767decd3f25fdc87c9d4c566a04934"}, @@ -1947,8 +2014,8 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] celery = [ - {file = "celery-5.1.2-py3-none-any.whl", hash = "sha256:9dab2170b4038f7bf10ef2861dbf486ddf1d20592290a1040f7b7a1259705d42"}, - {file = "celery-5.1.2.tar.gz", hash = "sha256:8d9a3de9162965e97f8e8cc584c67aad83b3f7a267584fa47701ed11c3e0d4b0"}, + {file = "celery-5.2.2-py3-none-any.whl", hash = "sha256:5a68a351076cfac4f678fa5ffd898105c28825a2224902da006970005196d061"}, + {file = "celery-5.2.2.tar.gz", hash = "sha256:2844eb040e915398623a43253a8e1016723442ece6b0751a3c416d8a2b34216f"}, ] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, @@ -2007,12 +2074,12 @@ cffi = [ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, - {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, + {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, + {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, ] click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, ] click-didyoumean = [ {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, @@ -2031,30 +2098,30 @@ colorama = [ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] cryptography = [ - {file = "cryptography-35.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9"}, - {file = "cryptography-35.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6"}, - {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d"}, - {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6"}, - {file = "cryptography-35.0.0-cp36-abi3-win32.whl", hash = "sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8"}, - {file = "cryptography-35.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588"}, - {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953"}, - {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6"}, - {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c"}, - {file = "cryptography-35.0.0.tar.gz", hash = "sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d"}, + {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, + {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"}, + {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"}, + {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"}, + {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"}, + {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"}, + {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"}, ] decorator = [ - {file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"}, - {file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"}, + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] defusedxml = [ {file = "defusedxml-0.7.0rc1-py2.py3-none-any.whl", hash = "sha256:8ede8ba04cf5bf7999e1492fa77df545db83717f52c5eab625f97228ebd539bf"}, @@ -2065,31 +2132,28 @@ diff-match-patch = [ {file = "diff_match_patch-20200713-py3-none-any.whl", hash = "sha256:8bf9d9c4e059d917b5c6312bac0c137971a32815ddbda9c682b949f2986b4d34"}, ] django = [ - {file = "Django-2.2.26-py3-none-any.whl", hash = "sha256:85e62019366692f1d5afed946ca32fef34c8693edf342ac9d067d75d64faf0ac"}, - {file = "Django-2.2.26.tar.gz", hash = "sha256:dfa537267d52c6243a62b32855a744ca83c37c70600aacffbfd98bc5d6d8518f"}, + {file = "Django-3.2-py3-none-any.whl", hash = "sha256:0604e84c4fb698a5e53e5857b5aea945b2f19a18f25f10b8748dbdf935788927"}, + {file = "Django-3.2.tar.gz", hash = "sha256:21f0f9643722675976004eb683c55d33c05486f94506672df3d6a141546f389d"}, ] django-admin-extra-urls = [ {file = "django-admin-extra-urls-3.5.1.tar.gz", hash = "sha256:fc68efd40569f2301cb329a0c445dd3517b0aa9c03c1ab7d9fc8a006d22de9b8"}, ] django-adminactions = [ - {file = "django-adminactions-1.13.1.tar.gz", hash = "sha256:264101a3b5118ec0edaf6be188cd869a011d4f719d85f5bd6dc206b891fe7aac"}, + {file = "django-adminactions-1.14.0.tar.gz", hash = "sha256:78d77cb795070726ec1aa157dad67cf2f78f920f0f6a99fccb2ec61de3142501"}, + {file = "django_adminactions-1.14.0-py2.py3-none-any.whl", hash = "sha256:0ba5fdf9d113552b7058716df3c82125975d821155d6ad9b5e4d69b660432243"}, ] django-adminfilters = [ - {file = "django-adminfilters-1.8.0.tar.gz", hash = "sha256:c33972a75074637b554a98685298110b90b62b6a617eef46df11e537d0d431d9"}, + {file = "django-adminfilters-1.9.0.tar.gz", hash = "sha256:73c876b6039053ed39572b977d113ad220ba6b73a24330d08326927d6b013cdb"}, ] django-advanced-filters = [ - {file = "django-advanced-filters-1.3.0.tar.gz", hash = "sha256:fd5edd9dd984b77f458277058c7ece7b547a53d567db75fcdc26629abd769d6a"}, - {file = "django_advanced_filters-1.3.0-py3-none-any.whl", hash = "sha256:d2fb9743002c831138ad3c041cd43d0bc5e6e4a8bc97abee1d1c4d7e2ff513a8"}, + {file = "django-advanced-filters-1.4.0.tar.gz", hash = "sha256:3877ffa956b8f1d3918556f00285750478c67312849fee5d882060fd7cad9d99"}, + {file = "django_advanced_filters-1.4.0-py3-none-any.whl", hash = "sha256:40f4fb677c4bae7393ca7bf4c843b7ef76263469a234dcd0a55b6fdbd102ff1b"}, ] django-appconf = [ {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, {file = "django_appconf-1.0.5-py3-none-any.whl", hash = "sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d"}, ] django-auditlog = [] -django-braces = [ - {file = "django-braces-1.14.0.tar.gz", hash = "sha256:83705b78948de00804bfacf40c315d001bb39630f35bbdd8588211c2d5b4d43f"}, - {file = "django_braces-1.14.0-py2.py3-none-any.whl", hash = "sha256:a6d9b34cf3e4949635e54884097c30410d7964fc7bec7231445ea7079b8c5722"}, -] django-celery-beat = [ {file = "django-celery-beat-2.2.1.tar.gz", hash = "sha256:97ae5eb309541551bdb07bf60cc57cadacf42a74287560ced2d2c06298620234"}, {file = "django_celery_beat-2.2.1-py2.py3-none-any.whl", hash = "sha256:ab43049634fd18dc037927d7c2c7d5f67f95283a20ebbda55f42f8606412e66c"}, @@ -2099,8 +2163,8 @@ django-celery-results = [ {file = "django_celery_results-2.2.0.tar.gz", hash = "sha256:cc0285090a306f97f1d4b7929ed98af0475bf6db2568976b3387de4fbe812edc"}, ] django-compressor = [ - {file = "django_compressor-2.3-py2.py3-none-any.whl", hash = "sha256:da9ee5ce4fc8b9211dcecd2229520514a4ba9ac3bcdc59b48092ec4d7f6b96b0"}, - {file = "django_compressor-2.3.tar.gz", hash = "sha256:47c86347f75c64954a06afbbfc820a750619e10c23a49272b865020a407b7edd"}, + {file = "django_compressor-3.1-py2.py3-none-any.whl", hash = "sha256:89f7ba86777b30672c2f9c7557bf2aff87c5890903c73b1fa3ae38acd143e855"}, + {file = "django_compressor-3.1.tar.gz", hash = "sha256:c4a87bf65f9a534cfaf1c321a000a229c24e50c6d62ba6ab089482db42e819d9"}, ] django-concurrency = [ {file = "django-concurrency-2.3.tar.gz", hash = "sha256:36c5ab4836de6e255285661b3b057a36c58c8c69c81287f4c1b7e7be7c733503"}, @@ -2122,32 +2186,32 @@ django-elasticsearch-dsl = [ {file = "django_elasticsearch_dsl-7.1.4-py2.py3-none-any.whl", hash = "sha256:e733ccfd0e8b83ad6896d812a2c15ccbd563caf4ebf30fd31640538ad2de8e26"}, ] django-environ = [ - {file = "django-environ-0.8.0.tar.gz", hash = "sha256:f77f8890d4cdaf53c3f233bc4367c219d3e8f15073959f8decffc72fd64321c2"}, - {file = "django_environ-0.8.0-py2.py3-none-any.whl", hash = "sha256:e52b3cd0fde08afe6a3367d125cda43a0bf2a0166e2797a31df1a826521dde34"}, + {file = "django-environ-0.8.1.tar.gz", hash = "sha256:6f0bc902b43891656b20486938cba0861dc62892784a44919170719572a534cb"}, + {file = "django_environ-0.8.1-py2.py3-none-any.whl", hash = "sha256:42593bee519a527602a467c7b682aee1a051c2597f98c45f4f4f44169ecdb6e5"}, ] django-extensions = [ {file = "django-extensions-2.2.6.tar.gz", hash = "sha256:936e8e3962024d3c75ea54f4e0248002404ca7ca7fb698430e60b06b5555b4e7"}, {file = "django_extensions-2.2.6-py2.py3-none-any.whl", hash = "sha256:4524eca892d23fa6e93b0620901983b287ff5dc806f1b978d6a98541f06b9471"}, ] django-filter = [ - {file = "django-filter-2.2.0.tar.gz", hash = "sha256:c3deb57f0dd7ff94d7dce52a047516822013e2b441bed472b722a317658cfd14"}, - {file = "django_filter-2.2.0-py3-none-any.whl", hash = "sha256:558c727bce3ffa89c4a7a0b13bc8976745d63e5fd576b3a9a851650ef11c401b"}, + {file = "django-filter-2.4.0.tar.gz", hash = "sha256:84e9d5bb93f237e451db814ed422a3a625751cbc9968b484ecc74964a8696b06"}, + {file = "django_filter-2.4.0-py3-none-any.whl", hash = "sha256:e00d32cebdb3d54273c48f4f878f898dced8d5dfaad009438fe61ebdf535ace1"}, ] django-hijack = [ - {file = "django-hijack-3.0.4.tar.gz", hash = "sha256:ef391c5b497b00ce2e054063240c941e6763af5b730a90462b3fd082fb25927d"}, - {file = "django_hijack-3.0.4-py3-none-any.whl", hash = "sha256:c53dc90880826463a55c961dc766ec4ad56d08891905a03e70df60403c0c11e4"}, + {file = "django-hijack-3.1.4.tar.gz", hash = "sha256:785940c2e693401d8302fff4ced2d8cf0beb69a88b7f944539b035ab11b1b6d3"}, + {file = "django_hijack-3.1.4-py3-none-any.whl", hash = "sha256:93944e6b940d66642fd154e77d960c0f1022eae3e6ca29f0ebce48790c3e62d0"}, ] django-import-export = [ - {file = "django-import-export-2.7.0.tar.gz", hash = "sha256:a063143274efe0897938f51961a228f47bd11f49c962b880865001bd9a5e8255"}, - {file = "django_import_export-2.7.0-py3-none-any.whl", hash = "sha256:ade78b6631b3ddd88bb3423dbdd866fabe689b7019668db753da183f8a9bdd2d"}, + {file = "django-import-export-2.7.1.tar.gz", hash = "sha256:4bc65943a5ce66aeaf2b5d0d6f75b1863b63b10323bf235e46500bef5d6dd85b"}, + {file = "django_import_export-2.7.1-py3-none-any.whl", hash = "sha256:254ca359782efca932c398edabc15dd51d31da241e85cc03af5b720173e0b2fe"}, ] django-js-asset = [ {file = "django-js-asset-1.2.2.tar.gz", hash = "sha256:c163ae80d2e0b22d8fb598047cd0dcef31f81830e127cfecae278ad574167260"}, {file = "django_js_asset-1.2.2-py2.py3-none-any.whl", hash = "sha256:8ec12017f26eec524cab436c64ae73033368a372970af4cf42d9354fcb166bdd"}, ] django-jsoneditor = [ - {file = "django-jsoneditor-0.1.6.tar.gz", hash = "sha256:574f33a60271f464cad5739b6f4718aca81a779188297faa5adab41754cac9ba"}, - {file = "django_jsoneditor-0.1.6-py2.py3-none-any.whl", hash = "sha256:e74760740ef5ecb0d295bb07c21635d2bebea7b9bab4352fa403720cc275a905"}, + {file = "django-jsoneditor-0.2.2.tar.gz", hash = "sha256:c4d9de033840e0b736ac1c4fcc56298a4a0470402db8aaf35d30567127d8397c"}, + {file = "django_jsoneditor-0.2.2-py2.py3-none-any.whl", hash = "sha256:334c9a48791c51b76d737f1aa0b047d4f9f85356acb2c15c1a5b5639202cadb6"}, ] django-jsonfield = [ {file = "django-jsonfield-1.4.1.tar.gz", hash = "sha256:f789a0ea1f80b48aff7d6c36dd356ce125dbf1b7cd97a82d315607ac758f50ff"}, @@ -2170,8 +2234,8 @@ django-phonenumber-field = [ {file = "django_phonenumber_field-4.0.0-py3-none-any.whl", hash = "sha256:2ca3bb0ada0ebc164bd903a981a34f1202a4294006e520b0da961bd7ce9f20a4"}, ] django-redis = [ - {file = "django-redis-5.0.0.tar.gz", hash = "sha256:048f665bbe27f8ff2edebae6aa9c534ab137f1e8fa7234147ef470df3f3aa9b8"}, - {file = "django_redis-5.0.0-py3-none-any.whl", hash = "sha256:97739ca9de3f964c51412d1d7d8aecdfd86737bb197fce6e1ff12620c63c97ee"}, + {file = "django-redis-5.2.0.tar.gz", hash = "sha256:8a99e5582c79f894168f5865c52bd921213253b7fd64d16733ae4591564465de"}, + {file = "django_redis-5.2.0-py3-none-any.whl", hash = "sha256:1d037dc02b11ad7aa11f655d26dac3fb1af32630f61ef4428860a2e29ff92026"}, ] django-smart-admin = [ {file = "django-smart-admin-1.6.0.tar.gz", hash = "sha256:cbd9b0543453cdfac9e3da8fa5cdd9d4ddf9ac4db966d814f9dac04bd4ce1847"}, @@ -2188,12 +2252,12 @@ django-sysinfo = [ {file = "django-sysinfo-2.6.0.tar.gz", hash = "sha256:396f0c69f3b40daaac1f8d3dd374714f985576e907e828c4f1087ad255980433"}, ] django-timezone-field = [ - {file = "django-timezone-field-4.2.1.tar.gz", hash = "sha256:97780cde658daa5094ae515bb55ca97c1352928ab554041207ad515dee3fe971"}, - {file = "django_timezone_field-4.2.1-py3-none-any.whl", hash = "sha256:6dc782e31036a58da35b553bd00c70f112d794700025270d8a6a4c1d2e5b26c6"}, + {file = "django-timezone-field-4.2.3.tar.gz", hash = "sha256:5dd5bd9249382bef8847d3e7e4c32b7be182a4b538f354130d1252ed228892f8"}, + {file = "django_timezone_field-4.2.3-py3-none-any.whl", hash = "sha256:7552d2b0f145684b7de3fb5046101c7efd600cc6ba951b15c630fa1e1b83558e"}, ] django-webtest = [ - {file = "django-webtest-1.9.8.tar.gz", hash = "sha256:4ee691a90f5f14ba1902f03564aecce62276aab6a1559955bb9c02dea33fe6c0"}, - {file = "django_webtest-1.9.8-py2.py3-none-any.whl", hash = "sha256:5f304e5ffc897242c78e2783080ec49661bccfe1d030c31bc9c00913d5ebae72"}, + {file = "django-webtest-1.9.9.tar.gz", hash = "sha256:7b6eab091ba4cb1d6c0aa1059247f181255ffce99e7b976ec391fea04a215c94"}, + {file = "django_webtest-1.9.9-py3-none-any.whl", hash = "sha256:32daef3db0e851d832fe06dbbde70f3a878201086266281fad338eea50f5efc3"}, ] elasticsearch = [ {file = "elasticsearch-7.13.4-py2.py3-none-any.whl", hash = "sha256:5920df0ab2630778680376d86bea349dc99860977eec9b6d2bd0860f337313f2"}, @@ -2207,6 +2271,10 @@ et-xmlfile = [ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, ] +executing = [ + {file = "executing-0.8.2-py2.py3-none-any.whl", hash = "sha256:32fc6077b103bd19e6494a72682d66d5763cf20a106d5aa7c5ccbea4e47b0df7"}, + {file = "executing-0.8.2.tar.gz", hash = "sha256:c23bf42e9a7b9b212f185b1b2c3c91feb895963378887bb10e64a2e612ec0023"}, +] factory-boy = [ {file = "factory_boy-2.12.0-py2.py3-none-any.whl", hash = "sha256:728df59b372c9588b83153facf26d3d28947fc750e8e3c95cefa9bed0e6394ee"}, {file = "factory_boy-2.12.0.tar.gz", hash = "sha256:faf48d608a1735f0d0a3c9cbf536d64f9132b547dae7ba452c4d99a79e84a370"}, @@ -2300,8 +2368,8 @@ ipdb = [ {file = "ipdb-0.13.4.tar.gz", hash = "sha256:c85398b5fb82f82399fc38c44fe3532c0dde1754abee727d8f5cfcc74547b334"}, ] ipython = [ - {file = "ipython-7.28.0-py3-none-any.whl", hash = "sha256:f16148f9163e1e526f1008d7c8d966d9c15600ca20d1a754287cf96d00ba6f1d"}, - {file = "ipython-7.28.0.tar.gz", hash = "sha256:2097be5c814d1b974aea57673176a924c4c8c9583890e7a5f082f547b9975b11"}, + {file = "ipython-8.0.1-py3-none-any.whl", hash = "sha256:c503a0dd6ccac9c8c260b211f2dd4479c042b49636b097cc9a0d55fe62dff64c"}, + {file = "ipython-8.0.1.tar.gz", hash = "sha256:ab564d4521ea8ceaac26c3a2c6e5ddbca15c8848fd5a5cc325f960da88d42974"}, ] jdcal = [ {file = "jdcal-1.4.1-py2.py3-none-any.whl", hash = "sha256:1abf1305fce18b4e8aa248cf8fe0c56ce2032392bc64bbd61b5dff2a19ec8bba"}, @@ -2316,19 +2384,35 @@ jinja2 = [ {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, ] kombu = [ - {file = "kombu-5.1.0-py3-none-any.whl", hash = "sha256:e2dedd8a86c9077c350555153825a31e456a0dc20c15d5751f00137ec9c75f0a"}, - {file = "kombu-5.1.0.tar.gz", hash = "sha256:01481d99f4606f6939cdc9b637264ed353ee9e3e4f62cfb582324142c41a572d"}, + {file = "kombu-5.2.3-py3-none-any.whl", hash = "sha256:eeaeb8024f3a5cfc71c9250e45cddb8493f269d74ada2f74909a93c59c4b4179"}, + {file = "kombu-5.2.3.tar.gz", hash = "sha256:81a90c1de97e08d3db37dbf163eaaf667445e1068c98bfd89f051a40e9f6dbbd"}, ] markuppy = [ {file = "MarkupPy-1.14.tar.gz", hash = "sha256:1adee2c0a542af378fe84548ff6f6b0168f3cb7f426b46961038a2bcfaad0d5f"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2337,14 +2421,27 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2354,6 +2451,12 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2367,8 +2470,8 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] oauthlib = [ - {file = "oauthlib-3.1.1-py2.py3-none-any.whl", hash = "sha256:42bf6354c2ed8c6acb54d971fce6f88193d97297e18602a3a886603f9d7730cc"}, - {file = "oauthlib-3.1.1.tar.gz", hash = "sha256:8f0215fcc533dd8dd1bee6f4c412d4f0cd7297307d43ac61666389e3bc3198a3"}, + {file = "oauthlib-3.2.0-py3-none-any.whl", hash = "sha256:6db33440354787f9b7f3a6dbd4febf5d0f93758354060e802f6c06cb493022fe"}, + {file = "oauthlib-3.2.0.tar.gz", hash = "sha256:23a8208d75b902797ea29fd31fa80a15ed9dc2c6c16fe73f5d346f83f6fa27a2"}, ] odfpy = [ {file = "odfpy-1.4.1-py2.7.egg", hash = "sha256:fc3b8d1bc098eba4a0fda865a76d9d1e577c4ceec771426bcb169a82c5e9dfe0"}, @@ -2382,8 +2485,8 @@ openpyxl-image-loader = [ {file = "openpyxl_image_loader-1.0.5-py3-none-any.whl", hash = "sha256:103e6ad4dc433cc3bb18d1f6c413139ad4a91a5f3de0e39783534e22ff0c9c0b"}, ] packaging = [ - {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, - {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] parameterized = [ {file = "parameterized-0.7.4-py2.py3-none-any.whl", hash = "sha256:59ab908e31c01505a987a2be78854e19cb1630c047bbab7848169c371d614d56"}, @@ -2441,34 +2544,38 @@ prompt-toolkit = [ {file = "prompt_toolkit-3.0.8.tar.gz", hash = "sha256:25c95d2ac813909f813c93fde734b6e44406d1477a9faef7c915ff37d39c0a8c"}, ] psutil = [ - {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, - {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"}, - {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"}, - {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"}, - {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"}, - {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"}, - {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"}, - {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"}, - {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"}, - {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"}, - {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"}, - {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"}, - {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"}, - {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"}, - {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"}, - {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"}, - {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"}, - {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"}, - {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"}, - {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"}, - {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"}, - {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"}, - {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"}, - {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"}, - {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"}, - {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"}, - {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, - {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, + {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, + {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, + {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, + {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, + {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, + {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, + {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, + {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, + {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, + {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, + {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, + {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, + {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, + {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, + {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, + {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, + {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, + {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, ] psycopg2 = [ {file = "psycopg2-2.8.4-cp27-cp27m-win32.whl", hash = "sha256:72772181d9bad1fa349792a1e7384dde56742c14af2b9986013eb94a240f005b"}, @@ -2489,12 +2596,16 @@ ptyprocess = [ {file = "ptyprocess-0.6.0-py2.py3-none-any.whl", hash = "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"}, {file = "ptyprocess-0.6.0.tar.gz", hash = "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"}, ] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] pycountry = [ {file = "pycountry-20.7.3.tar.gz", hash = "sha256:81084a53d3454344c0292deebc20fcd0a1488c136d4900312cbd465cf552cb42"}, ] pycparser = [ - {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, - {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pygments = [ {file = "Pygments-2.7.3-py3-none-any.whl", hash = "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08"}, @@ -2505,11 +2616,11 @@ pyjwt = [ {file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, ] python-crontab = [ - {file = "python-crontab-2.5.1.tar.gz", hash = "sha256:4bbe7e720753a132ca4ca9d4094915f40e9d9dc8a807a4564007651018ce8c31"}, + {file = "python-crontab-2.6.0.tar.gz", hash = "sha256:1e35ed7a3cdc3100545b43e196d34754e6551e7f95e4caebbe0e1c0ca41c2f1b"}, ] python-dateutil = [ {file = "python-dateutil-2.6.0.tar.gz", hash = "sha256:62a2f8df3d66f878373fd0072eacf4ee52194ba302e00082828e0d263b0418d2"}, @@ -2556,92 +2667,144 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] rcssmin = [ - {file = "rcssmin-1.0.6.tar.gz", hash = "sha256:ca87b695d3d7864157773a61263e5abb96006e9ff0e021eff90cbe0e1ba18270"}, + {file = "rcssmin-1.1.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2211a5c91ea14a5937b57904c9121f8bfef20987825e55368143da7d25446e3b"}, + {file = "rcssmin-1.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7085d1b51dd2556f3aae03947380f6e9e1da29fb1eeadfa6766b7f105c54c9ff"}, + {file = "rcssmin-1.1.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:1512223b6a687bb747e4e531187bd49a56ed71287e7ead9529cbaa1ca4718a0a"}, + {file = "rcssmin-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:6158d0d86cd611c5304d738dc3d6cfeb23864dd78ad0d83a633f443696ac5d77"}, + {file = "rcssmin-1.1.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:0a6aae7e119509445bf7aa6da6ca0f285cc198273c20f470ad999ff83bbadcf9"}, + {file = "rcssmin-1.1.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:506e33ab4c47051f7deae35b6d8dbb4a5c025f016e90a830929a1ecc7daa1682"}, + {file = "rcssmin-1.1.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:352dd3a78eb914bb1cb269ac2b66b3154f2490a52ab605558c681de3fb5194d2"}, + {file = "rcssmin-1.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:30f5522285065cae0164d20068377d84b5d10b414156115f8729b034d0ea5e8b"}, + {file = "rcssmin-1.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:49807735f26f59404194f1e6f93254b6d5b6f7748c2a954f4470a86a40ff4c13"}, + {file = "rcssmin-1.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f1a37bbd36b050813673e62ae6464467548628690bf4d48a938170e121e8616e"}, + {file = "rcssmin-1.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ddff3a41611664c7f1d9e3d8a9c1669e0e155ac0458e586ffa834dc5953e7d9f"}, + {file = "rcssmin-1.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8b659a88850e772c84cfac4520ec223de6807875e173d8ef3248ab7f90876066"}, + {file = "rcssmin-1.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1d7c2719d014e4e4df4e33b75ae8067c7e246cf470eaec8585e06e2efac7586c"}, + {file = "rcssmin-1.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:37f1242e34ca273ed2c26cf778854e18dd11b31c6bfca60e23fce146c84667c1"}, + {file = "rcssmin-1.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f31c82d06ba2dbf33c20db9550157e80bb0c4cbd24575c098f0831d1d2e3c5df"}, + {file = "rcssmin-1.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7da63fee37edf204bbd86785edb4d7491642adbfd1d36fd230b7ccbbd8db1a6f"}, + {file = "rcssmin-1.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c28b9eb20982b45ebe6adef8bd2547e5ed314dafddfff4eba806b0f8c166cfd1"}, + {file = "rcssmin-1.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:32ccaebbbd4d56eab08cf26aed36f5d33389b9d1d3ca1fecf53eb6ab77760ddf"}, + {file = "rcssmin-1.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7c44002b79f3656348196005b9522ec5e04f182b466f66d72b16be0bd03c13d8"}, + {file = "rcssmin-1.1.0.tar.gz", hash = "sha256:27fc400627fd3d328b7fe95af2a01f5d0af6b5af39731af5d071826a1f08e362"}, ] redis = [ {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, ] regex = [ - {file = "regex-2021.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:094a905e87a4171508c2a0e10217795f83c636ccc05ddf86e7272c26e14056ae"}, - {file = "regex-2021.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981c786293a3115bc14c103086ae54e5ee50ca57f4c02ce7cf1b60318d1e8072"}, - {file = "regex-2021.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b0f2f874c6a157c91708ac352470cb3bef8e8814f5325e3c5c7a0533064c6a24"}, - {file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51feefd58ac38eb91a21921b047da8644155e5678e9066af7bcb30ee0dca7361"}, - {file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8de658d7db5987b11097445f2b1f134400e2232cb40e614e5f7b6f5428710e"}, - {file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1ce02f420a7ec3b2480fe6746d756530f69769292eca363218c2291d0b116a01"}, - {file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39079ebf54156be6e6902f5c70c078f453350616cfe7bfd2dd15bdb3eac20ccc"}, - {file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ff24897f6b2001c38a805d53b6ae72267025878d35ea225aa24675fbff2dba7f"}, - {file = "regex-2021.10.8-cp310-cp310-win32.whl", hash = "sha256:c6569ba7b948c3d61d27f04e2b08ebee24fec9ff8e9ea154d8d1e975b175bfa7"}, - {file = "regex-2021.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:45cb0f7ff782ef51bc79e227a87e4e8f24bc68192f8de4f18aae60b1d60bc152"}, - {file = "regex-2021.10.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fab3ab8aedfb443abb36729410403f0fe7f60ad860c19a979d47fb3eb98ef820"}, - {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e55f8d66f1b41d44bc44c891bcf2c7fad252f8f323ee86fba99d71fd1ad5e3"}, - {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d52c5e089edbdb6083391faffbe70329b804652a53c2fdca3533e99ab0580d9"}, - {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1abbd95cbe9e2467cac65c77b6abd9223df717c7ae91a628502de67c73bf6838"}, - {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b5c215f3870aa9b011c00daeb7be7e1ae4ecd628e9beb6d7e6107e07d81287"}, - {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f540f153c4f5617bc4ba6433534f8916d96366a08797cbbe4132c37b70403e92"}, - {file = "regex-2021.10.8-cp36-cp36m-win32.whl", hash = "sha256:1f51926db492440e66c89cd2be042f2396cf91e5b05383acd7372b8cb7da373f"}, - {file = "regex-2021.10.8-cp36-cp36m-win_amd64.whl", hash = "sha256:5f55c4804797ef7381518e683249310f7f9646da271b71cb6b3552416c7894ee"}, - {file = "regex-2021.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb2baff66b7d2267e07ef71e17d01283b55b3cc51a81b54cc385e721ae172ba4"}, - {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e527ab1c4c7cf2643d93406c04e1d289a9d12966529381ce8163c4d2abe4faf"}, - {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c98b013273e9da5790ff6002ab326e3f81072b4616fd95f06c8fa733d2745f"}, - {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55ef044899706c10bc0aa052f2fc2e58551e2510694d6aae13f37c50f3f6ff61"}, - {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0ab3530a279a3b7f50f852f1bab41bc304f098350b03e30a3876b7dd89840e"}, - {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a37305eb3199d8f0d8125ec2fb143ba94ff6d6d92554c4b8d4a8435795a6eccd"}, - {file = "regex-2021.10.8-cp37-cp37m-win32.whl", hash = "sha256:2efd47704bbb016136fe34dfb74c805b1ef5c7313aef3ce6dcb5ff844299f432"}, - {file = "regex-2021.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:924079d5590979c0e961681507eb1773a142553564ccae18d36f1de7324e71ca"}, - {file = "regex-2021.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19b8f6d23b2dc93e8e1e7e288d3010e58fafed323474cf7f27ab9451635136d9"}, - {file = "regex-2021.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b09d3904bf312d11308d9a2867427479d277365b1617e48ad09696fa7dfcdf59"}, - {file = "regex-2021.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:951be934dc25d8779d92b530e922de44dda3c82a509cdb5d619f3a0b1491fafa"}, - {file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f125fce0a0ae4fd5c3388d369d7a7d78f185f904c90dd235f7ecf8fe13fa741"}, - {file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f199419a81c1016e0560c39773c12f0bd924c37715bffc64b97140d2c314354"}, - {file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:09e1031e2059abd91177c302da392a7b6859ceda038be9e015b522a182c89e4f"}, - {file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c070d5895ac6aeb665bd3cd79f673775caf8d33a0b569e98ac434617ecea57d"}, - {file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:176796cb7f82a7098b0c436d6daac82f57b9101bb17b8e8119c36eecf06a60a3"}, - {file = "regex-2021.10.8-cp38-cp38-win32.whl", hash = "sha256:5e5796d2f36d3c48875514c5cd9e4325a1ca172fc6c78b469faa8ddd3d770593"}, - {file = "regex-2021.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:e4204708fa116dd03436a337e8e84261bc8051d058221ec63535c9403a1582a1"}, - {file = "regex-2021.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6dcf53d35850ce938b4f044a43b33015ebde292840cef3af2c8eb4c860730fff"}, - {file = "regex-2021.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8b6ee6555b6fbae578f1468b3f685cdfe7940a65675611365a7ea1f8d724991"}, - {file = "regex-2021.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2ec1c106d3f754444abf63b31e5c4f9b5d272272a491fa4320475aba9e8157c"}, - {file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973499dac63625a5ef9dfa4c791aa33a502ddb7615d992bdc89cf2cc2285daa3"}, - {file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88dc3c1acd3f0ecfde5f95c32fcb9beda709dbdf5012acdcf66acbc4794468eb"}, - {file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4786dae85c1f0624ac77cb3813ed99267c9adb72e59fdc7297e1cf4d6036d493"}, - {file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6ce4f3d3c48f9f402da1ceb571548133d3322003ce01b20d960a82251695d2"}, - {file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e3e2cea8f1993f476a6833ef157f5d9e8c75a59a8d8b0395a9a6887a097243b"}, - {file = "regex-2021.10.8-cp39-cp39-win32.whl", hash = "sha256:82cfb97a36b1a53de32b642482c6c46b6ce80803854445e19bc49993655ebf3b"}, - {file = "regex-2021.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:b04e512eb628ea82ed86eb31c0f7fc6842b46bf2601b66b1356a7008327f7700"}, - {file = "regex-2021.10.8.tar.gz", hash = "sha256:26895d7c9bbda5c52b3635ce5991caa90fbb1ddfac9c9ff1c7ce505e2282fb2a"}, + {file = "regex-2022.1.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:34316bf693b1d2d29c087ee7e4bb10cdfa39da5f9c50fa15b07489b4ab93a1b5"}, + {file = "regex-2022.1.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a0b9f6a1a15d494b35f25ed07abda03209fa76c33564c09c9e81d34f4b919d7"}, + {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f99112aed4fb7cee00c7f77e8b964a9b10f69488cdff626ffd797d02e2e4484f"}, + {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a2bf98ac92f58777c0fafc772bf0493e67fcf677302e0c0a630ee517a43b949"}, + {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8618d9213a863c468a865e9d2ec50221015f7abf52221bc927152ef26c484b4c"}, + {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b52cc45e71657bc4743a5606d9023459de929b2a198d545868e11898ba1c3f59"}, + {file = "regex-2022.1.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e12949e5071c20ec49ef00c75121ed2b076972132fc1913ddf5f76cae8d10b4"}, + {file = "regex-2022.1.18-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b02e3e72665cd02afafb933453b0c9f6c59ff6e3708bd28d0d8580450e7e88af"}, + {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:abfcb0ef78df0ee9df4ea81f03beea41849340ce33a4c4bd4dbb99e23ec781b6"}, + {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6213713ac743b190ecbf3f316d6e41d099e774812d470422b3a0f137ea635832"}, + {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:61ebbcd208d78658b09e19c78920f1ad38936a0aa0f9c459c46c197d11c580a0"}, + {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b013f759cd69cb0a62de954d6d2096d648bc210034b79b1881406b07ed0a83f9"}, + {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9187500d83fd0cef4669385cbb0961e227a41c0c9bc39219044e35810793edf7"}, + {file = "regex-2022.1.18-cp310-cp310-win32.whl", hash = "sha256:94c623c331a48a5ccc7d25271399aff29729fa202c737ae3b4b28b89d2b0976d"}, + {file = "regex-2022.1.18-cp310-cp310-win_amd64.whl", hash = "sha256:1a171eaac36a08964d023eeff740b18a415f79aeb212169080c170ec42dd5184"}, + {file = "regex-2022.1.18-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:49810f907dfe6de8da5da7d2b238d343e6add62f01a15d03e2195afc180059ed"}, + {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2f5c3f7057530afd7b739ed42eb04f1011203bc5e4663e1e1d01bb50f813e3"}, + {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85ffd6b1cb0dfb037ede50ff3bef80d9bf7fa60515d192403af6745524524f3b"}, + {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba37f11e1d020969e8a779c06b4af866ffb6b854d7229db63c5fdddfceaa917f"}, + {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e27ea1ebe4a561db75a880ac659ff439dec7f55588212e71700bb1ddd5af9"}, + {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37978254d9d00cda01acc1997513f786b6b971e57b778fbe7c20e30ae81a97f3"}, + {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54a1eb9fd38f2779e973d2f8958fd575b532fe26013405d1afb9ee2374e7ab8"}, + {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:768632fd8172ae03852e3245f11c8a425d95f65ff444ce46b3e673ae5b057b74"}, + {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:de2923886b5d3214be951bc2ce3f6b8ac0d6dfd4a0d0e2a4d2e5523d8046fdfb"}, + {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1333b3ce73269f986b1fa4d5d395643810074dc2de5b9d262eb258daf37dc98f"}, + {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:d19a34f8a3429bd536996ad53597b805c10352a8561d8382e05830df389d2b43"}, + {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d2f355a951f60f0843f2368b39970e4667517e54e86b1508e76f92b44811a8a"}, + {file = "regex-2022.1.18-cp36-cp36m-win32.whl", hash = "sha256:2245441445099411b528379dee83e56eadf449db924648e5feb9b747473f42e3"}, + {file = "regex-2022.1.18-cp36-cp36m-win_amd64.whl", hash = "sha256:25716aa70a0d153cd844fe861d4f3315a6ccafce22b39d8aadbf7fcadff2b633"}, + {file = "regex-2022.1.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7e070d3aef50ac3856f2ef5ec7214798453da878bb5e5a16c16a61edf1817cc3"}, + {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22709d701e7037e64dae2a04855021b62efd64a66c3ceed99dfd684bfef09e38"}, + {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9099bf89078675c372339011ccfc9ec310310bf6c292b413c013eb90ffdcafc"}, + {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04611cc0f627fc4a50bc4a9a2e6178a974c6a6a4aa9c1cca921635d2c47b9c87"}, + {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:552a39987ac6655dad4bf6f17dd2b55c7b0c6e949d933b8846d2e312ee80005a"}, + {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e031899cb2bc92c0cf4d45389eff5b078d1936860a1be3aa8c94fa25fb46ed8"}, + {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2dacb3dae6b8cc579637a7b72f008bff50a94cde5e36e432352f4ca57b9e54c4"}, + {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e5c31d70a478b0ca22a9d2d76d520ae996214019d39ed7dd93af872c7f301e52"}, + {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb804c7d0bfbd7e3f33924ff49757de9106c44e27979e2492819c16972ec0da2"}, + {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:36b2d700a27e168fa96272b42d28c7ac3ff72030c67b32f37c05616ebd22a202"}, + {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:16f81025bb3556eccb0681d7946e2b35ff254f9f888cff7d2120e8826330315c"}, + {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:da80047524eac2acf7c04c18ac7a7da05a9136241f642dd2ed94269ef0d0a45a"}, + {file = "regex-2022.1.18-cp37-cp37m-win32.whl", hash = "sha256:6ca45359d7a21644793de0e29de497ef7f1ae7268e346c4faf87b421fea364e6"}, + {file = "regex-2022.1.18-cp37-cp37m-win_amd64.whl", hash = "sha256:38289f1690a7e27aacd049e420769b996826f3728756859420eeee21cc857118"}, + {file = "regex-2022.1.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6014038f52b4b2ac1fa41a58d439a8a00f015b5c0735a0cd4b09afe344c94899"}, + {file = "regex-2022.1.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b5d6f9aed3153487252d00a18e53f19b7f52a1651bc1d0c4b5844bc286dfa52"}, + {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d24b03daf7415f78abc2d25a208f234e2c585e5e6f92f0204d2ab7b9ab48e3"}, + {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf594cc7cc9d528338d66674c10a5b25e3cde7dd75c3e96784df8f371d77a298"}, + {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd914db437ec25bfa410f8aa0aa2f3ba87cdfc04d9919d608d02330947afaeab"}, + {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b6840b6448203228a9d8464a7a0d99aa8fa9f027ef95fe230579abaf8a6ee1"}, + {file = "regex-2022.1.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11772be1eb1748e0e197a40ffb82fb8fd0d6914cd147d841d9703e2bef24d288"}, + {file = "regex-2022.1.18-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a602bdc8607c99eb5b391592d58c92618dcd1537fdd87df1813f03fed49957a6"}, + {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7e26eac9e52e8ce86f915fd33380f1b6896a2b51994e40bb094841e5003429b4"}, + {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:519c0b3a6fbb68afaa0febf0d28f6c4b0a1074aefc484802ecb9709faf181607"}, + {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3c7ea86b9ca83e30fa4d4cd0eaf01db3ebcc7b2726a25990966627e39577d729"}, + {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:51f02ca184518702975b56affde6c573ebad4e411599005ce4468b1014b4786c"}, + {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:385ccf6d011b97768a640e9d4de25412204fbe8d6b9ae39ff115d4ff03f6fe5d"}, + {file = "regex-2022.1.18-cp38-cp38-win32.whl", hash = "sha256:1f8c0ae0a0de4e19fddaaff036f508db175f6f03db318c80bbc239a1def62d02"}, + {file = "regex-2022.1.18-cp38-cp38-win_amd64.whl", hash = "sha256:760c54ad1b8a9b81951030a7e8e7c3ec0964c1cb9fee585a03ff53d9e531bb8e"}, + {file = "regex-2022.1.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:93c20777a72cae8620203ac11c4010365706062aa13aaedd1a21bb07adbb9d5d"}, + {file = "regex-2022.1.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6aa427c55a0abec450bca10b64446331b5ca8f79b648531138f357569705bc4a"}, + {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38baee6bdb7fe1b110b6b3aaa555e6e872d322206b7245aa39572d3fc991ee4"}, + {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:752e7ddfb743344d447367baa85bccd3629c2c3940f70506eb5f01abce98ee68"}, + {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8acef4d8a4353f6678fd1035422a937c2170de58a2b29f7da045d5249e934101"}, + {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c73d2166e4b210b73d1429c4f1ca97cea9cc090e5302df2a7a0a96ce55373f1c"}, + {file = "regex-2022.1.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24c89346734a4e4d60ecf9b27cac4c1fee3431a413f7aa00be7c4d7bbacc2c4d"}, + {file = "regex-2022.1.18-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:596f5ae2eeddb79b595583c2e0285312b2783b0ec759930c272dbf02f851ff75"}, + {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ecfe51abf7f045e0b9cdde71ca9e153d11238679ef7b5da6c82093874adf3338"}, + {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1d6301f5288e9bdca65fab3de6b7de17362c5016d6bf8ee4ba4cbe833b2eda0f"}, + {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:93cce7d422a0093cfb3606beae38a8e47a25232eea0f292c878af580a9dc7605"}, + {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cf0db26a1f76aa6b3aa314a74b8facd586b7a5457d05b64f8082a62c9c49582a"}, + {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:defa0652696ff0ba48c8aff5a1fac1eef1ca6ac9c660b047fc8e7623c4eb5093"}, + {file = "regex-2022.1.18-cp39-cp39-win32.whl", hash = "sha256:6db1b52c6f2c04fafc8da17ea506608e6be7086715dab498570c3e55e4f8fbd1"}, + {file = "regex-2022.1.18-cp39-cp39-win_amd64.whl", hash = "sha256:ebaeb93f90c0903233b11ce913a7cb8f6ee069158406e056f884854c737d2442"}, + {file = "regex-2022.1.18.tar.gz", hash = "sha256:97f32dc03a8054a4c4a5ab5d761ed4861e828b2c200febd4e46857069a483916"}, ] requests = [ - {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, - {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] requests-mock = [ {file = "requests-mock-1.9.3.tar.gz", hash = "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba"}, {file = "requests_mock-1.9.3-py2.py3-none-any.whl", hash = "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970"}, ] requests-oauthlib = [ - {file = "requests-oauthlib-1.3.0.tar.gz", hash = "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"}, - {file = "requests_oauthlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d"}, - {file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"}, + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, ] responses = [ {file = "responses-0.13.4-py2.py3-none-any.whl", hash = "sha256:d8d0f655710c46fd3513b9202a7f0dcedd02ca0f8cf4976f27fa8ab5b81e656d"}, {file = "responses-0.13.4.tar.gz", hash = "sha256:9476775d856d3c24ae660bbebe29fb6d789d4ad16acd723efbfb6ee20990b899"}, ] rjsmin = [ - {file = "rjsmin-1.1.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:799890bd07a048892d8d3deb9042dbc20b7f5d0eb7da91e9483c561033b23ce2"}, - {file = "rjsmin-1.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:211c2fe8298951663bbc02acdffbf714f6793df54bfc50e1c6c9e71b3f2559a3"}, - {file = "rjsmin-1.1.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:585e75a84d9199b68056fd4a083d9a61e2a92dfd10ff6d4ce5bdb04bc3bdbfaf"}, - {file = "rjsmin-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e487a7783ac4339e79ec610b98228eb9ac72178973e3dee16eba0e3feef25924"}, - {file = "rjsmin-1.1.0-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:0ab825839125eaca57cc59581d72e596e58a7a56fbc0839996b7528f0343a0a8"}, - {file = "rjsmin-1.1.0-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:6044ca86e917cd5bb2f95e6679a4192cef812122f28ee08c677513de019629b3"}, - {file = "rjsmin-1.1.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ecd29f1b3e66a4c0753105baec262b331bcbceefc22fbe6f7e8bcd2067bcb4d7"}, - {file = "rjsmin-1.1.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:975b69754d6a76be47c0bead12367a1ca9220d08e5393f80bab0230d4625d1f4"}, - {file = "rjsmin-1.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:466fe70cc5647c7c51b3260c7e2e323a98b2b173564247f9c89e977720a0645f"}, - {file = "rjsmin-1.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e3908b21ebb584ce74a6ac233bdb5f29485752c9d3be5e50c5484ed74169232c"}, - {file = "rjsmin-1.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:714329db774a90947e0e2086cdddb80d5e8c4ac1c70c9f92436378dedb8ae345"}, - {file = "rjsmin-1.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dd0f4819df4243ffe4c964995794c79ca43943b5b756de84be92b445a652fb86"}, - {file = "rjsmin-1.1.0.tar.gz", hash = "sha256:b15dc75c71f65d9493a8c7fa233fdcec823e3f1b88ad84a843ffef49b338ac32"}, + {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"}, + {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"}, + {file = "rjsmin-1.2.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3b14f4c2933ec194eb816b71a0854ce461b6419a3d852bf360344731ab28c0a6"}, + {file = "rjsmin-1.2.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:54fc30519365841b27556ccc1cb94c5b4413c384ff6d467442fddba66e2e325a"}, + {file = "rjsmin-1.2.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:40e7211a25d9a11ac9ff50446e41268c978555676828af86fa1866615823bfff"}, + {file = "rjsmin-1.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:99e5597a812b60058baa1457387dc79cca7d273b2a700dc98bfd20d43d60711d"}, + {file = "rjsmin-1.2.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:993935654c1311280e69665367d7e6ff694ac9e1609168cf51cae8c0307df0db"}, + {file = "rjsmin-1.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c81229ffe5b0a0d5b3b5d5e6d0431f182572de9e9a077e85dbae5757db0ab75c"}, + {file = "rjsmin-1.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:1c93b29fd725e61718299ffe57de93ff32d71b313eaabbfcc7bd32ddb82831d5"}, + {file = "rjsmin-1.2.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:38a4474ed52e1575fb9da983ec8657faecd8ab3738508d36e04f87769411fd3d"}, + {file = "rjsmin-1.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1622fbb6c6a8daaf77da13cc83356539bfe79c1440f9664b02c7f7b150b9a18e"}, + {file = "rjsmin-1.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4387a00777faddf853eebdece9f2e56ebaf243c3f24676a9de6a20c5d4f3d731"}, + {file = "rjsmin-1.2.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:86c4da7285ddafe6888cb262da563570f28e4a31146b5164a7a6947b1222196b"}, + {file = "rjsmin-1.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d63e193a2f932a786ae82068aa76d1d126fcdff8582094caff9e5e66c4dcc124"}, + {file = "rjsmin-1.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:05efa485dfddb6418e3b86d8862463aa15641a61f6ae05e7e6de8f116ee77c69"}, + {file = "rjsmin-1.2.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:b6a7c8c8d19e154334f640954e43e57283e87bb4a2f6e23295db14eea8e9fc1d"}, + {file = "rjsmin-1.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2ed83aca637186bafdc894b4b7fc3657e2d74014ccca7d3d69122c1e82675216"}, + {file = "rjsmin-1.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:41c7c3910f7b8816e37366b293e576ddecf696c5f2197d53cf2c1526ac336646"}, + {file = "rjsmin-1.2.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8944a8a55ac825b8e5ec29f341ecb7574697691ef416506885898d2f780fb4ca"}, + {file = "rjsmin-1.2.0.tar.gz", hash = "sha256:6c529feb6c400984452494c52dd9fdf59185afeacca2afc5174a28ab37751a1b"}, ] rx = [ {file = "Rx-1.6.1-py2.py3-none-any.whl", hash = "sha256:7357592bc7e881a95e0c2013b73326f704953301ab551fbc8133a6fadab84105"}, @@ -2652,52 +2815,67 @@ sentry-sdk = [ {file = "sentry_sdk-0.19.5-py2.py3-none-any.whl", hash = "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0"}, ] simplejson = [ - {file = "simplejson-3.17.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:376023f51edaf7290332dacfb055bc00ce864cb013c0338d0dea48731f37e42f"}, - {file = "simplejson-3.17.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b2a5688606dffbe95e1347a05b77eb90489fe337edde888e23bbb7fd81b0d93b"}, - {file = "simplejson-3.17.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3ba82f8b421886f4a2311c43fb98faaf36c581976192349fef2a89ed0fcdbdef"}, - {file = "simplejson-3.17.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7332f7b06d42153255f7bfeb10266141c08d48cc1a022a35473c95238ff2aebc"}, - {file = "simplejson-3.17.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c2d5334d935af711f6d6dfeec2d34e071cdf73ec0df8e8bd35ac435b26d8da97"}, - {file = "simplejson-3.17.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:417b7e119d66085dc45bdd563dcb2c575ee10a3b1c492dd3502a029448d4be1c"}, - {file = "simplejson-3.17.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:42b7c7264229860fe879be961877f7466d9f7173bd6427b3ba98144a031d49fb"}, - {file = "simplejson-3.17.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5fe8c6dcb9e6f7066bdc07d3c410a2fca78c0d0b4e0e72510ffd20a60a20eb8e"}, - {file = "simplejson-3.17.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b92fbc2bc549c5045c8233d954f3260ccf99e0f3ec9edfd2372b74b350917752"}, - {file = "simplejson-3.17.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5f7f53b1edd4b23fb112b89208377480c0bcee45d43a03ffacf30f3290e0ed85"}, - {file = "simplejson-3.17.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:40ece8fa730d1a947bff792bcc7824bd02d3ce6105432798e9a04a360c8c07b0"}, - {file = "simplejson-3.17.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10defa88dd10a0a4763f16c1b5504e96ae6dc68953cfe5fc572b4a8fcaf9409b"}, - {file = "simplejson-3.17.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa86cfdeb118795875855589934013e32895715ec2d9e8eb7a59be3e7e07a7e1"}, - {file = "simplejson-3.17.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ce66f730031b9b3683b2fc6ad4160a18db86557c004c3d490a29bf8d450d7ab9"}, - {file = "simplejson-3.17.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:352c11582aa1e49a2f0f7f7d8fd5ec5311da890d1354287e83c63ab6af857cf5"}, - {file = "simplejson-3.17.5-cp310-cp310-win32.whl", hash = "sha256:8e595de17178dd3bbeb2c5b8ea97536341c63b7278639cb8ee2681a84c0ef037"}, - {file = "simplejson-3.17.5-cp310-cp310-win_amd64.whl", hash = "sha256:cb0afc3bad49eb89a579103616574a54b523856d20fc539a4f7a513a0a8ba4b2"}, - {file = "simplejson-3.17.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ade09aa3c284d11f39640aebdcbb748e1996f0c60504f8c4a0c5a9fec821e67a"}, - {file = "simplejson-3.17.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87572213965fd8a4fb7a97f837221e01d8fddcfb558363c671b8aa93477fb6a2"}, - {file = "simplejson-3.17.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2b59acd09b02da97728d0bae8ff48876d7efcbbb08e569c55e2d0c2e018324f5"}, - {file = "simplejson-3.17.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e29b9cea4216ec130df85d8c36efb9985fda1c9039e4706fb30e0fb6a67602ff"}, - {file = "simplejson-3.17.5-cp36-cp36m-win32.whl", hash = "sha256:f550730d18edec4ff9d4252784b62adfe885d4542946b6d5a54c8a6521b56afd"}, - {file = "simplejson-3.17.5-cp36-cp36m-win_amd64.whl", hash = "sha256:1c2688365743b0f190392e674af5e313ebe9d621813d15f9332e874b7c1f2d04"}, - {file = "simplejson-3.17.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f13c48cc4363829bdfecc0c181b6ddf28008931de54908a492dc8ccd0066cd60"}, - {file = "simplejson-3.17.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a6943816e10028eeed512ea03be52b54ea83108b408d1049b999f58a760089b"}, - {file = "simplejson-3.17.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3d72aa9e73134dacd049a2d6f9bd219f7be9c004d03d52395831611d66cedb71"}, - {file = "simplejson-3.17.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b94df70bd34a3b946c0eb272022fb0f8a9eb27cad76e7f313fedbee2ebe4317"}, - {file = "simplejson-3.17.5-cp37-cp37m-win32.whl", hash = "sha256:065230b9659ac38c8021fa512802562d122afb0cf8d4b89e257014dcddb5730a"}, - {file = "simplejson-3.17.5-cp37-cp37m-win_amd64.whl", hash = "sha256:86fcffc06f1125cb443e2bed812805739d64ceb78597ac3c1b2d439471a09717"}, - {file = "simplejson-3.17.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78c6f0ed72b440ebe1892d273c1e5f91e55e6861bea611d3b904e673152a7a4c"}, - {file = "simplejson-3.17.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36b08b886027eac67e7a0e822e3a5bf419429efad7612e69501669d6252a21f2"}, - {file = "simplejson-3.17.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fe1c33f78d2060719d52ea9459d97d7ae3a5b707ec02548575c4fbed1d1d345b"}, - {file = "simplejson-3.17.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:140eb58809f24d843736edb8080b220417e22c82ac07a3dfa473f57e78216b5f"}, - {file = "simplejson-3.17.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c9b30a2524ae6983b708f12741a31fbc2fb8d6fecd0b6c8584a62fd59f59e09"}, - {file = "simplejson-3.17.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:24e413bd845bd17d4d72063d64e053898543fb7abc81afeae13e5c43cef9c171"}, - {file = "simplejson-3.17.5-cp38-cp38-win32.whl", hash = "sha256:5f5051a13e7d53430a990604b532c9124253c5f348857e2d5106d45fc8533860"}, - {file = "simplejson-3.17.5-cp38-cp38-win_amd64.whl", hash = "sha256:188f2c78a8ac1eb7a70a4b2b7b9ad11f52181044957bf981fb3e399c719e30ee"}, - {file = "simplejson-3.17.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:457d9cfe7ece1571770381edccdad7fc255b12cd7b5b813219441146d4f47595"}, - {file = "simplejson-3.17.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa843ee0d34c7193f5a816e79df8142faff851549cab31e84b526f04878ac778"}, - {file = "simplejson-3.17.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2cc4b68e59319e3de778325e34fbff487bfdb2225530e89995402989898d681"}, - {file = "simplejson-3.17.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e90d2e219c3dce1500dda95f5b893c293c4d53c4e330c968afbd4e7a90ff4a5b"}, - {file = "simplejson-3.17.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:917f01db71d5e720b731effa3ff4a2c702a1b6dacad9bcdc580d86a018dfc3ca"}, - {file = "simplejson-3.17.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:07707ba69324eaf58f0c6f59d289acc3e0ed9ec528dae5b0d4219c0d6da27dc5"}, - {file = "simplejson-3.17.5-cp39-cp39-win32.whl", hash = "sha256:2df15814529a4625ea6f7b354a083609b3944c269b954ece0d0e7455872e1b2a"}, - {file = "simplejson-3.17.5-cp39-cp39-win_amd64.whl", hash = "sha256:71a54815ec0212b0cba23adc1b2a731bdd2df7b9e4432718b2ed20e8aaf7f01a"}, - {file = "simplejson-3.17.5.tar.gz", hash = "sha256:91cfb43fb91ff6d1e4258be04eee84b51a4ef40a28d899679b9ea2556322fb50"}, + {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, + {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, + {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, + {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, + {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, + {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, + {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, + {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, + {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, + {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, + {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, + {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, + {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, + {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, + {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, + {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, + {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, + {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, + {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, + {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, + {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, + {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, + {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, + {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, + {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, + {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, + {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, + {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, + {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, + {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, + {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, + {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, + {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, + {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, + {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, + {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, + {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, + {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, + {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, + {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, + {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, + {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, + {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, + {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, + {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, + {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, + {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, + {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, + {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, + {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, + {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, + {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, + {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, ] single-source = [ {file = "single-source-0.1.5.tar.gz", hash = "sha256:61b5c04e558fa73ac725b9fbdfb24027845533d99ca72e225efd5810dc261483"}, @@ -2715,30 +2893,34 @@ snapshottest = [ {file = "snapshottest-0.5.1.tar.gz", hash = "sha256:2cc7157e77674ea8ebeb2351466ff50cd4b5ad8e213adc06df9c16a75ab5bafc"}, ] social-auth-app-django = [ - {file = "social-auth-app-django-3.1.0.tar.gz", hash = "sha256:6d0dd18c2d9e71ca545097d57b44d26f59e624a12833078e8e52f91baf849778"}, - {file = "social_auth_app_django-3.1.0-py2-none-any.whl", hash = "sha256:f151396e5b16e2eee12cd2e211004257826ece24fc4ae97a147df386c1cd7082"}, - {file = "social_auth_app_django-3.1.0-py3-none-any.whl", hash = "sha256:9237e3d7b6f6f59494c3b02e0cce6efc69c9d33ad9d1a064e3b2318bcbe89ae3"}, + {file = "social-auth-app-django-3.4.0.tar.gz", hash = "sha256:09575f5c7dd91465df3a898c58e7c4ae1e78f31edba36b8b7be47ab0aeef2789"}, + {file = "social_auth_app_django-3.4.0-py2-none-any.whl", hash = "sha256:02b561e175d4a93896e4436b591586b61e647bd8eeef14c99a26344eb3b48d0e"}, + {file = "social_auth_app_django-3.4.0-py3-none-any.whl", hash = "sha256:47d1720115a9eaad78a67e99987d556abaa01222b9c2b9538182bbdbb10304ba"}, ] social-auth-core = [ - {file = "social-auth-core-3.3.3.tar.gz", hash = "sha256:2f6ce1af8ec2b2cc37b86d647f7d4e4292f091ee556941db34b1e0e2dee77fc0"}, - {file = "social_auth_core-3.3.3-py2-none-any.whl", hash = "sha256:4a3cdf69c449b235cdabd54a1be7ba3722611297e69fded52e3584b1a990af25"}, - {file = "social_auth_core-3.3.3-py3-none-any.whl", hash = "sha256:21c0639c56befd33ec162c2210d583bb1de8e1136d53b21bafb96afaf2f86c91"}, + {file = "social-auth-core-3.4.0.tar.gz", hash = "sha256:aaec7f1e1a9bb61d0467d05c8cfe8dd55402f39229716b933e3dc29eb5f1e61a"}, + {file = "social_auth_core-3.4.0-py2-none-any.whl", hash = "sha256:a4b972b6250d7a32940aec2972e33ebc645de91b2153d18dcd3e38fb74271042"}, + {file = "social_auth_core-3.4.0-py3-none-any.whl", hash = "sha256:b3aa96be236e59842ae45a5a51fe75c97814087ab5ba3092e80b41cb3dcdd8af"}, ] sorl-thumbnail = [ {file = "sorl-thumbnail-12.5.0.tar.gz", hash = "sha256:8dfe5fda91a5047d1d35a0b9effe7b000764a01d648e15ca076f44e9c34b6dbd"}, {file = "sorl_thumbnail-12.5.0-py2.py3-none-any.whl", hash = "sha256:d9e3f018d19293824803e4ffead96b19dfcd44fa7987cea392f50436817bef34"}, ] soupsieve = [ - {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"}, - {file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"}, + {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, + {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, ] sqlparse = [ {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, ] +stack-data = [ + {file = "stack_data-0.1.4-py3-none-any.whl", hash = "sha256:02cc0683cbc445ae4ca8c4e3a0e58cb1df59f252efb0aa016b34804a707cf9bc"}, + {file = "stack_data-0.1.4.tar.gz", hash = "sha256:7769ed2482ce0030e00175dd1bf4ef1e873603b6ab61cd3da443b410e64e9477"}, +] tablib = [ - {file = "tablib-3.1.0-py3-none-any.whl", hash = "sha256:26141c9cf2d5904a2228d3f5d45f8a46a3f3f2f0fbb4c33b4a1c1ddca9f31348"}, - {file = "tablib-3.1.0.tar.gz", hash = "sha256:d64c9f6712918a3d90ec5d71b44b8bab1083e3609e4844ad2be80eb633e097ed"}, + {file = "tablib-3.2.0-py3-none-any.whl", hash = "sha256:1ba12da7b0b17e33f5997fb1bc8ed560aae3535f5892ae5e5c01a056c47d4d78"}, + {file = "tablib-3.2.0.tar.gz", hash = "sha256:12d8686454c721de88d8ca5adf07e1f419ef6dbcecedf65e8950d4a329daf3a0"}, ] termcolor = [ {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, @@ -2752,45 +2934,38 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] traitlets = [ - {file = "traitlets-5.1.0-py3-none-any.whl", hash = "sha256:03f172516916220b58c9f19d7f854734136dd9528103d04e9bf139a92c9f54c4"}, - {file = "traitlets-5.1.0.tar.gz", hash = "sha256:bd382d7ea181fbbcce157c133db9a829ce06edffe097bcf3ab945b435452b46d"}, + {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, + {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, ] typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, + {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, + {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, + {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"}, + {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"}, + {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"}, + {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"}, + {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"}, + {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"}, + {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"}, + {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"}, + {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"}, + {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"}, + {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"}, + {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"}, + {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"}, + {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"}, + {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"}, + {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"}, + {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"}, + {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"}, + {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"}, + {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"}, + {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"}, + {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, - {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, - {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] unittest-xml-reporting = [ {file = "unittest-xml-reporting-3.0.4.tar.gz", hash = "sha256:984cebba69e889401bfe3adb9088ca376b3a1f923f0590d005126c1bffd1a695"}, @@ -2859,8 +3034,8 @@ xlrd = [ {file = "xlrd-1.2.0.tar.gz", hash = "sha256:546eb36cee8db40c3eaa46c351e67ffee6eeb5fa2650b71bc4c758a29a1b29b2"}, ] xlsxwriter = [ - {file = "XlsxWriter-3.0.1-py3-none-any.whl", hash = "sha256:2f2af944d2b4b5f21cd3ac8e01b2417ec74c60e2ca11cae90b4f32ee172c99d6"}, - {file = "XlsxWriter-3.0.1.tar.gz", hash = "sha256:3f39bf581c55f3ad1438bc170d7f4c4649cee8b6b7a80d21f79508118eeea52a"}, + {file = "XlsxWriter-3.0.2-py3-none-any.whl", hash = "sha256:1aa65166697c42284e82f5bf9a33c2e913341eeef2b262019c3f5b5334768765"}, + {file = "XlsxWriter-3.0.2.tar.gz", hash = "sha256:53005f03e8eb58f061ebf41d5767c7495ee0772c2396fe26b7e0ca22fa9c2570"}, ] xlwt = [ {file = "xlwt-1.3.0-py2.py3-none-any.whl", hash = "sha256:a082260524678ba48a297d922cc385f58278b8aa68741596a87de01a9c628b2e"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 2e41446546..c77d1ed190 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -51,9 +51,11 @@ description = "HCT MIS is UNICEF's humanitarian cash transfer platform." authors = ["Tivix"] [tool.poetry.dependencies] -python = "3.9.1" -Django = "2.2.26" +python = "3.9.10" +Django = "3.2" Jinja2 = "2.11.2" +#name = "HCT MIS Backend" +#version = "2022.1.1" PyJWT = "1.7.1" PyYAML = "^5.4.1" black = "^20.8b1" @@ -73,7 +75,7 @@ django-countries = "7.0" django-elasticsearch-dsl = "7.1.4" django-environ = "*" django-extensions = "2.2.6" -django-filter = "2.2.0" +django-filter = "^2.2.0" django-hijack = "*" django-jsoneditor = "*" django-model-utils = "4.0.0" @@ -85,7 +87,7 @@ django-smart-admin = "1.6" django-sql-explorer = {extras = ["xls"], version = "^2.2"} django-storages = { version = "1.8", extras = ["azure"] } django-sysinfo = ">=2.6.0" -django_compressor = "2.3" +django_compressor = ">=2.4" elasticsearch ="<7.14" elasticsearch-dsl ="^7.0.0" gevent = "20.9.0" @@ -106,8 +108,8 @@ pygments = "2.7.3" pytz = "2020.4" sentry-sdk = "0.19.5" single-source = "^0.1.5" -social-auth-app-django = "3.1.0" -social-auth-core = "3.3.3" +social-auth-app-django = "^3.1.0" +social-auth-core = "^3.3.3" sorl-thumbnail = "12.5.0" urllib3 = "1.26.2" xlrd = "1.2.0" From cd2aae00d72acfeca1d340bdc025afe11ec81175 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Wed, 2 Feb 2022 14:02:39 +0000 Subject: [PATCH 03/24] Bumped version of django-countries --- backend/poetry.lock | 9 +++++---- backend/pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index c9ba5ec5c4..29bbf04188 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -518,7 +518,7 @@ Django = ">=1.11" [[package]] name = "django-countries" -version = "7.0" +version = "7.2.1" description = "Provides a country field for Django models." category = "main" optional = false @@ -527,6 +527,7 @@ python-versions = "*" [package.extras] dev = ["tox", "black", "django", "pytest", "pytest-django", "djangorestframework", "graphene-django"] maintainer = ["transifex-client", "zest.releaser", "django"] +pyuca = ["pyuca"] test = ["pytest", "pytest-django", "pytest-cov", "graphene-django"] [[package]] @@ -1951,7 +1952,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.10" -content-hash = "fc1a9a2077f38b07f9ed3f79712f23b2e13c462fa94e1a65be642286c7e7f421" +content-hash = "129e3199b0bf4f85f4dc7dac35711990e3b8bfb80f3702815f9d5d375dd55b02" [metadata.files] amqp = [ @@ -2178,8 +2179,8 @@ django-cors-headers = [ {file = "django_cors_headers-3.2.0-py3-none-any.whl", hash = "sha256:a8b2772582e8025412f4d4b54b617d8b707076ffd53a2b961bd24f10ec207a7c"}, ] django-countries = [ - {file = "django-countries-7.0.tar.gz", hash = "sha256:11191d245256b4e46a4715e391ce0b2fa982e968a6a2b4d263cace95ebe448c1"}, - {file = "django_countries-7.0-py3-none-any.whl", hash = "sha256:936fd0716cec8388f7c50a8e9e11ba1049d926276fb4e7c8cca61716b78edc91"}, + {file = "django-countries-7.2.1.tar.gz", hash = "sha256:26878b54d36bedff30b4535ceefcb8af6784741a8b30b1b8a662fb14a936a4ab"}, + {file = "django_countries-7.2.1-py3-none-any.whl", hash = "sha256:adc965f1d348124274b7d918fc1aad5e29609758af999e1822baa9f2cc06d1b8"}, ] django-elasticsearch-dsl = [ {file = "django-elasticsearch-dsl-7.1.4.tar.gz", hash = "sha256:5bbd49a9acb51c08fbbb7fba9beee7c9ce73b481af718cc57e4c8ac3d561888f"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index c77d1ed190..4068edb69b 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -71,7 +71,7 @@ django-celery-results = "^2.0.1" django-concurrency = "^2.2" django-constance = { version = "2.8.0", extras = ["redis"] } django-cors-headers = "3.2.0" -django-countries = "7.0" +django-countries = "^7.0" django-elasticsearch-dsl = "7.1.4" django-environ = "*" django-extensions = "2.2.6" From 5ac1d9bdb36755bdddea2cacbcb702dae3f6b6af Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Thu, 3 Feb 2022 11:46:34 +0000 Subject: [PATCH 04/24] Upgrade of graphene --- backend/hct_mis_api/apps/core/schema.py | 2 +- backend/poetry.lock | 27 ++++++++++++++----------- backend/pyproject.toml | 4 ++-- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 0f9c73083a..3e3fa46d30 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -46,7 +46,7 @@ class Meta: model = AdminArea fields = { "title": ["exact", "istartswith"], - "business_area": ["exact"], + # "business_area": ["exact"], } diff --git a/backend/poetry.lock b/backend/poetry.lock index 29bbf04188..5b958e35ee 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -748,11 +748,11 @@ sftp = ["paramiko"] [[package]] name = "django-sysinfo" -version = "2.6.0" +version = "2.6.2" description = "Simple django app to expose system infos: libraries version, databae server infos..." category = "main" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.9,<4.0" [package.dependencies] psutil = "*" @@ -923,7 +923,7 @@ test = ["pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "fastdiff (== [[package]] name = "graphene-django" -version = "2.7.1" +version = "2.15.0" description = "Graphene Django integration" category = "main" optional = false @@ -936,15 +936,16 @@ graphql-core = ">=2.1.0,<3" promise = ">=2.1" singledispatch = ">=3.4.0.3" six = ">=1.10.0" +text-unidecode = "*" [package.extras] -dev = ["black (==19.3b0)", "flake8 (==3.7.7)", "flake8-black (==0.1.0)", "flake8-bugbear (==19.3.0)", "pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-django (>=3.3.2)", "djangorestframework (>=3.6.3)", "django-filter (<2)", "django-filter (>=2)"] +dev = ["black (==19.10b0)", "flake8 (==3.7.9)", "flake8-black (==0.1.1)", "flake8-bugbear (==20.1.4)", "pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-django (>=3.3.2)", "djangorestframework (>=3.6.3)", "django-filter (<2)", "django-filter (>=2)"] rest_framework = ["djangorestframework (>=3.6.3)"] test = ["pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-django (>=3.3.2)", "djangorestframework (>=3.6.3)", "django-filter (<2)", "django-filter (>=2)"] [[package]] name = "graphene-file-upload" -version = "1.2.2" +version = "1.3.0" description = "Lib for adding file upload functionality to GraphQL mutations in Graphene Django and Flask-Graphql" category = "main" optional = false @@ -957,6 +958,7 @@ six = ">=1.11.0" all = ["Flask (>=1.0.2)", "graphene (>=2.1.2)", "Flask-Graphql (>=2.0.0)", "graphene-django (>=2.0.0)"] django = ["graphene-django (>=2.0.0)"] flask = ["Flask (>=1.0.2)", "graphene (>=2.1.2)", "Flask-Graphql (>=2.0.0)"] +tests = ["coverage", "pytest", "pytest-cov", "pytest-django"] [[package]] name = "graphql-core" @@ -1764,7 +1766,7 @@ python-versions = "*" name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -1952,7 +1954,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.10" -content-hash = "129e3199b0bf4f85f4dc7dac35711990e3b8bfb80f3702815f9d5d375dd55b02" +content-hash = "2d27a1683a39d74f9d3ea7a654699b5468c12ac6c445577369b5c5e9a878ffd5" [metadata.files] amqp = [ @@ -2250,7 +2252,8 @@ django-storages = [ {file = "django_storages-1.8-py2.py3-none-any.whl", hash = "sha256:0a9b7e620e969fb0797523695329ed223bf540bbfdf6cd163b061fc11dab2d1c"}, ] django-sysinfo = [ - {file = "django-sysinfo-2.6.0.tar.gz", hash = "sha256:396f0c69f3b40daaac1f8d3dd374714f985576e907e828c4f1087ad255980433"}, + {file = "django-sysinfo-2.6.2.tar.gz", hash = "sha256:2efe6c1a3bedc5766549b8872c7d8b3266ecb45db9a6423b94365d7d089c86dd"}, + {file = "django_sysinfo-2.6.2-py3-none-any.whl", hash = "sha256:9589d4bff0605ed7681a2ca22ce2326a4b64db768038da39180a2ba31ab3ffea"}, ] django-timezone-field = [ {file = "django-timezone-field-4.2.3.tar.gz", hash = "sha256:5dd5bd9249382bef8847d3e7e4c32b7be182a4b538f354130d1252ed228892f8"}, @@ -2322,12 +2325,12 @@ graphene = [ {file = "graphene-2.1.9.tar.gz", hash = "sha256:b9f2850e064eebfee9a3ef4a1f8aa0742848d97652173ab44c82cc8a62b9ed93"}, ] graphene-django = [ - {file = "graphene-django-2.7.1.tar.gz", hash = "sha256:233037a82a56d9d4a1357f51743a2948016a358a0b58d071faa2ae56bf47e65d"}, - {file = "graphene_django-2.7.1-py2.py3-none-any.whl", hash = "sha256:eab206e013eca3366b1a8f78ac9aa1024c7fbabed5cbd0c09fda6d325b37aa32"}, + {file = "graphene-django-2.15.0.tar.gz", hash = "sha256:b78c9b05bc899016b9cc5bf13faa1f37fe1faa8c5407552c6ddd1a28f46fc31a"}, + {file = "graphene_django-2.15.0-py2.py3-none-any.whl", hash = "sha256:02671d195f0c09c8649acff2a8f4ad4f297d0f7d98ea6e6cdf034b81bab92880"}, ] graphene-file-upload = [ - {file = "graphene_file_upload-1.2.2-py3-none-any.whl", hash = "sha256:034ff72d2834b7aebd06fda412fa88a10aba34bac604317f7552457e47040654"}, - {file = "graphene_file_upload-1.2.2.tar.gz", hash = "sha256:b5f47a81f5614d1184ba9480a63835769b26303cb7759316122401784858e9c7"}, + {file = "graphene_file_upload-1.3.0-py3-none-any.whl", hash = "sha256:5afe50f409f50e3d198fd92c883d98d868e6c6aaadf5df3a3f4d88ecad90ed97"}, + {file = "graphene_file_upload-1.3.0.tar.gz", hash = "sha256:6898480b0556826472c80971032917c01968ade5800d84054008fe598795b063"}, ] graphql-core = [ {file = "graphql-core-2.3.2.tar.gz", hash = "sha256:aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 4068edb69b..5502ab29bb 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -91,8 +91,8 @@ django_compressor = ">=2.4" elasticsearch ="<7.14" elasticsearch-dsl ="^7.0.0" gevent = "20.9.0" -graphene-django = "2.7.1" -graphene-file-upload = "1.2.2" +graphene-django = "^2.7.1" +graphene-file-upload = "^1.2.2" greenlet = "0.4.17" gunicorn = "20.0.4" jedi = "0.17.2" From c17fd4a36ad40d8f1cccdf48a3912c9e56673c70 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Fri, 4 Feb 2022 12:46:37 +0000 Subject: [PATCH 05/24] Updated tests and migrations folders for Django 3.2 --- .../apps/account/authentication.py | 3 ++- .../apps/account/tests/test_user_roles.py | 2 +- backend/hct_mis_api/apps/account/views.py | 1 + .../hct_mis_api/apps/activity_log/schema.py | 11 +++++++--- .../apps/cash_assist_datahub/celery_tasks.py | 7 +++--- .../apps/cash_assist_datahub/fixtures.py | 4 ++-- .../tests/test_pull_from_datahub.py | 4 ++-- backend/hct_mis_api/apps/core/celery_tasks.py | 4 +++- backend/hct_mis_api/apps/core/converters.py | 5 +++-- backend/hct_mis_api/apps/core/filters.py | 5 +++-- .../commands/generatedocumenttypes.py | 11 ++++++++-- .../management/commands/generatefakefile.py | 7 ++++-- .../apps/core/management/commands/init.py | 2 +- .../management/commands/pullfromcadatahub.py | 4 +++- .../management/commands/pullfromerpdatahub.py | 4 +++- backend/hct_mis_api/apps/core/storage.py | 1 + .../apps/core/tests/test_exchange_rates.py | 17 +++++++------- .../tests/test_flexible_helper_methods.py | 3 ++- .../apps/core/tests/test_flexibles.py | 7 +++++- .../core/tests/test_kobo_template_upload.py | 2 +- ...test_setting_types_for_calculate_fields.py | 8 ++++++- backend/hct_mis_api/apps/core/views.py | 5 +++-- .../apps/erp_datahub/celery_tasks.py | 4 +++- .../hct_mis_api/apps/erp_datahub/fixtures.py | 4 ++-- .../tests/test_pull_from_erp_datahub.py | 10 +++++---- .../tests/test_sync_to_mis_datahub.py | 2 +- backend/hct_mis_api/apps/geo/utils.py | 19 ++++++++-------- .../apps/grievance/notifications.py | 1 + .../tasks/deduplicate_and_check_sanctions.py | 7 +++++- ...st_reassign_roles_on_disable_individual.py | 1 + ...ap_test_filter_already_existing_tickets.py | 1 - ..._test_grievance_create_complaint_ticket.py | 1 - ..._test_grievance_create_sensitive_ticket.py | 1 - .../tests/snapshots/snap_test_ticket_notes.py | 1 - .../test_filter_already_existing_tickets.py | 11 +++++----- .../test_grievance_data_change_mutations.py | 6 ++--- .../apps/grievance/tests/test_ticket_notes.py | 8 +++++-- .../tests/test_update_grievance_tickets.py | 4 ++-- .../hct_mis_api/apps/grievance/validators.py | 1 + .../apps/household/elasticsearch_utils.py | 1 + .../household/migrations/0044_migration.py | 14 ++++++------ .../snapshots/snap_test_household_query.py | 1 - .../snapshots/snap_test_individual_query.py | 1 - .../tests/test_document_type_model.py | 2 +- .../household/tests/test_household_query.py | 1 + .../household/tests/test_individual_query.py | 6 ++--- .../tests/test_individual_xlsx_update.py | 2 +- .../mis_datahub/migrations/0028_migration.py | 2 +- .../tests/test_send_tp_to_datahub.py | 2 +- .../hct_mis_api/apps/payment/celery_tasks.py | 4 +++- backend/hct_mis_api/apps/payment/fixtures.py | 4 ++-- .../hct_mis_api/apps/payment/rapid_pro/api.py | 5 +++-- ...chart_total_transferred_cash_by_country.py | 1 - ...snap_test_discard_verification_mutation.py | 1 - ...chart_total_transferred_cash_by_country.py | 1 + .../test_discard_verification_mutation.py | 22 +++++++++++-------- backend/hct_mis_api/apps/payment/views.py | 5 ++++- .../xlsx/XlsxVerificationImportService.py | 9 ++++++-- backend/hct_mis_api/apps/power_query/mixin.py | 13 +++++------ backend/hct_mis_api/apps/power_query/utils.py | 2 +- backend/hct_mis_api/apps/power_query/views.py | 2 +- backend/hct_mis_api/apps/program/fixtures.py | 2 +- backend/hct_mis_api/apps/program/mutations.py | 10 ++++++--- backend/hct_mis_api/apps/program/schema.py | 13 ++++++----- .../snapshots/snap_test_cash_plan_choices.py | 1 - .../snapshots/snap_test_cash_plan_queries.py | 1 - .../snap_test_change_program_status.py | 1 - .../snapshots/snap_test_create_program.py | 1 - .../snapshots/snap_test_delete_program.py | 1 - .../snapshots/snap_test_program_choices.py | 1 - .../snapshots/snap_test_update_program.py | 1 - .../program/tests/test_cash_plan_queries.py | 5 +++-- .../tests/test_change_program_status.py | 1 + .../apps/program/tests/test_create_program.py | 1 + .../apps/program/tests/test_delete_program.py | 1 + .../apps/program/tests/test_update_program.py | 1 + .../hct_mis_api/apps/program/validators.py | 1 + .../apps/registration_data/schema.py | 18 ++++++++++----- ...nap_test_registration_data_import_query.py | 1 - .../test_registration_data_import_query.py | 7 +++--- .../migrations/0033_migration.py | 10 ++++----- .../snap_test_imported_household_query.py | 1 - .../snap_test_imported_individuals_query.py | 1 - ...istration_data_import_datahub_mutations.py | 1 - ..._registration_data_import_datahub_query.py | 1 - .../tests/tasks/test_mark_submissions.py | 2 +- .../tests/test_deduplication.py | 4 ++-- .../test_handling_documents_duplicates.py | 8 +++---- .../test_imported_document_type_models.py | 2 +- .../tests/test_imported_household_query.py | 7 +++--- .../tests/test_imported_individuals_query.py | 7 +++--- .../tests/test_rdi_create.py | 4 ++-- .../tests/test_refuse_rdi_mutation.py | 2 +- ...istration_data_import_datahub_mutations.py | 12 +++++----- ..._registration_data_import_datahub_query.py | 6 +++-- .../tests/test_template_file_generator.py | 4 +++- .../apps/registration_datahub/views.py | 5 ++++- backend/hct_mis_api/apps/reporting/admin.py | 2 +- .../hct_mis_api/apps/reporting/fixtures.py | 2 +- backend/hct_mis_api/apps/reporting/schema.py | 22 +++++++++++++------ .../snapshots/snap_test_reporting_choices.py | 1 - .../snap_test_reporting_mutations.py | 1 - .../snapshots/snap_test_reports_query.py | 1 - .../reporting/tests/test_report_service.py | 17 ++++++++------ .../tests/test_reporting_mutations.py | 11 +++++----- .../reporting/tests/test_reports_query.py | 1 + .../apps/sanction_list/celery_tasks.py | 4 +++- .../hct_mis_api/apps/sanction_list/schema.py | 9 ++++---- .../tasks/check_against_sanction_list.py | 12 ++++++---- ...t_check_against_sanction_list_pre_merge.py | 2 +- .../hct_mis_api/apps/sanction_list/views.py | 1 + .../apps/steficon/tests/test_rules.py | 4 ++-- .../hct_mis_api/apps/targeting/fixtures.py | 6 ++--- .../services/xlsx_export_targeting_service.py | 5 +++-- ..._households_by_targeting_criteria_query.py | 1 - ...ap_test_copy_target_population_mutation.py | 1 - ..._test_create_target_population_mutation.py | 1 - ...lden_record_by_targeting_criteria_query.py | 1 - ...tatus_change_target_population_mutation.py | 1 - ..._test_update_target_population_mutation.py | 1 - .../test_create_target_population_mutation.py | 1 + ...lden_record_by_targeting_criteria_query.py | 3 ++- .../tests/test_individual_block_filters.py | 6 ++--- backend/hct_mis_api/apps/targeting/views.py | 5 ++++- backend/hct_mis_api/apps/utils/mutations.py | 3 ++- backend/hct_mis_api/apps/utils/schema.py | 4 +++- backend/hct_mis_api/apps/utils/validators.py | 1 - backend/hct_mis_api/middlewares/version.py | 1 + backend/hct_mis_api/schema.py | 20 ++++++++--------- backend/hct_mis_api/settings/base.py | 2 +- 130 files changed, 348 insertions(+), 250 deletions(-) diff --git a/backend/hct_mis_api/apps/account/authentication.py b/backend/hct_mis_api/apps/account/authentication.py index eb0ab71d1d..7e4210dacf 100644 --- a/backend/hct_mis_api/apps/account/authentication.py +++ b/backend/hct_mis_api/apps/account/authentication.py @@ -1,12 +1,13 @@ import logging from django.contrib.auth import get_user_model + from social_core.exceptions import InvalidEmail from social_core.pipeline import social_auth from social_core.pipeline import user as social_core_user from hct_mis_api.apps.account.microsoft_graph import MicrosoftGraphAPI -from hct_mis_api.apps.account.models import UserRole, Role, ACTIVE +from hct_mis_api.apps.account.models import ACTIVE, Role, UserRole from hct_mis_api.apps.core.models import BusinessArea logger = logging.getLogger(__name__) diff --git a/backend/hct_mis_api/apps/account/tests/test_user_roles.py b/backend/hct_mis_api/apps/account/tests/test_user_roles.py index 0fb1e75dc2..9197e4e6c9 100644 --- a/backend/hct_mis_api/apps/account/tests/test_user_roles.py +++ b/backend/hct_mis_api/apps/account/tests/test_user_roles.py @@ -4,7 +4,7 @@ from hct_mis_api.apps.account.admin import UserRoleAdminForm, UserRoleInlineFormSet from hct_mis_api.apps.account.fixtures import UserFactory -from hct_mis_api.apps.account.models import IncompatibleRoles, Role, UserRole, User +from hct_mis_api.apps.account.models import IncompatibleRoles, Role, User, UserRole from hct_mis_api.apps.core.models import BusinessArea diff --git a/backend/hct_mis_api/apps/account/views.py b/backend/hct_mis_api/apps/account/views.py index 119880f044..f9155bf404 100644 --- a/backend/hct_mis_api/apps/account/views.py +++ b/backend/hct_mis_api/apps/account/views.py @@ -1,6 +1,7 @@ from datetime import datetime from django.http import HttpResponse + from openpyxl.writer.excel import save_virtual_workbook from hct_mis_api.apps.account.export_users_xlsx import ExportUsersXlsx diff --git a/backend/hct_mis_api/apps/activity_log/schema.py b/backend/hct_mis_api/apps/activity_log/schema.py index a8a8ec7692..a4452afa0a 100644 --- a/backend/hct_mis_api/apps/activity_log/schema.py +++ b/backend/hct_mis_api/apps/activity_log/schema.py @@ -1,11 +1,16 @@ -import graphene from django.contrib.contenttypes.models import ContentType from django.db.models import Q -from django_filters import FilterSet, CharFilter + +import graphene +from django_filters import CharFilter, FilterSet from graphene import relay from graphene_django import DjangoObjectType -from hct_mis_api.apps.account.permissions import DjangoPermissionFilterConnectionField, hopePermissionClass, Permissions +from hct_mis_api.apps.account.permissions import ( + DjangoPermissionFilterConnectionField, + Permissions, + hopePermissionClass, +) from hct_mis_api.apps.activity_log.models import LogEntry from hct_mis_api.apps.core.extended_connection import ExtendedConnection from hct_mis_api.apps.core.schema import ChoiceObject diff --git a/backend/hct_mis_api/apps/cash_assist_datahub/celery_tasks.py b/backend/hct_mis_api/apps/cash_assist_datahub/celery_tasks.py index 5ee6723385..46dca95085 100644 --- a/backend/hct_mis_api/apps/cash_assist_datahub/celery_tasks.py +++ b/backend/hct_mis_api/apps/cash_assist_datahub/celery_tasks.py @@ -1,8 +1,7 @@ import logging -from hct_mis_api.apps.core.exchange_rates.utils import fix_exchange_rates - from hct_mis_api.apps.core.celery import app +from hct_mis_api.apps.core.exchange_rates.utils import fix_exchange_rates logger = logging.getLogger(__name__) @@ -12,7 +11,9 @@ def pull_from_cashassist_datahub_task(): logger.info("pull_from_cashassist_datahub_task start") try: - from hct_mis_api.apps.cash_assist_datahub.tasks.pull_from_datahub import PullFromDatahubTask + from hct_mis_api.apps.cash_assist_datahub.tasks.pull_from_datahub import ( + PullFromDatahubTask, + ) PullFromDatahubTask().execute() except Exception as e: diff --git a/backend/hct_mis_api/apps/cash_assist_datahub/fixtures.py b/backend/hct_mis_api/apps/cash_assist_datahub/fixtures.py index 6ffe96bd0e..e9792c160a 100644 --- a/backend/hct_mis_api/apps/cash_assist_datahub/fixtures.py +++ b/backend/hct_mis_api/apps/cash_assist_datahub/fixtures.py @@ -6,10 +6,10 @@ from pytz import utc from hct_mis_api.apps.cash_assist_datahub.models import ( - PaymentRecord, - ServiceProvider, CashPlan, + PaymentRecord, Programme, + ServiceProvider, ) from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.household.models import Household diff --git a/backend/hct_mis_api/apps/cash_assist_datahub/tests/test_pull_from_datahub.py b/backend/hct_mis_api/apps/cash_assist_datahub/tests/test_pull_from_datahub.py index ad1dde7ce2..08802ff72d 100644 --- a/backend/hct_mis_api/apps/cash_assist_datahub/tests/test_pull_from_datahub.py +++ b/backend/hct_mis_api/apps/cash_assist_datahub/tests/test_pull_from_datahub.py @@ -36,7 +36,7 @@ @mock.patch.dict(os.environ, {"EXCHANGE_RATES_API_KEY": "TEST_API_KEY"}) class TestPullDataFromDatahub(TestCase): - multi_db = True + databases = "__all__" program = None target_population = None dh_cash_plan1 = None @@ -276,7 +276,7 @@ def test_pull_data(self, mocker): class TestSessionsPullDataFromDatahub(TestCase): - multi_db = True + databases = "__all__" @classmethod def setUpTestData(cls): diff --git a/backend/hct_mis_api/apps/core/celery_tasks.py b/backend/hct_mis_api/apps/core/celery_tasks.py index 0fc96cc246..e669ee7df5 100644 --- a/backend/hct_mis_api/apps/core/celery_tasks.py +++ b/backend/hct_mis_api/apps/core/celery_tasks.py @@ -2,7 +2,9 @@ from hct_mis_api.apps.core.celery import app from hct_mis_api.apps.core.models import XLSXKoboTemplate -from hct_mis_api.apps.core.tasks.upload_new_template_and_update_flex_fields import KoboRetriableError +from hct_mis_api.apps.core.tasks.upload_new_template_and_update_flex_fields import ( + KoboRetriableError, +) logger = logging.getLogger(__name__) diff --git a/backend/hct_mis_api/apps/core/converters.py b/backend/hct_mis_api/apps/core/converters.py index cc0d02f819..67c55a7a39 100644 --- a/backend/hct_mis_api/apps/core/converters.py +++ b/backend/hct_mis_api/apps/core/converters.py @@ -1,9 +1,10 @@ import json -import graphene -from concurrency.fields import IntegerVersionField from django.contrib.gis.db.models import GeometryField from django.forms import MultipleChoiceField + +import graphene +from concurrency.fields import IntegerVersionField from graphene_django.converter import convert_django_field from graphene_django.forms.converter import convert_form_field diff --git a/backend/hct_mis_api/apps/core/filters.py b/backend/hct_mis_api/apps/core/filters.py index e26729e7ed..f0c65c20c3 100644 --- a/backend/hct_mis_api/apps/core/filters.py +++ b/backend/hct_mis_api/apps/core/filters.py @@ -1,8 +1,9 @@ import json -from datetime import datetime, date, timedelta +from datetime import date, datetime, timedelta + +from django.forms import DateField, DateTimeField, DecimalField, Field, IntegerField from dateutil.parser import parse -from django.forms import IntegerField, DecimalField, Field, DateTimeField, DateField from django_filters import Filter diff --git a/backend/hct_mis_api/apps/core/management/commands/generatedocumenttypes.py b/backend/hct_mis_api/apps/core/management/commands/generatedocumenttypes.py index ce827b5797..fd96a669ff 100644 --- a/backend/hct_mis_api/apps/core/management/commands/generatedocumenttypes.py +++ b/backend/hct_mis_api/apps/core/management/commands/generatedocumenttypes.py @@ -1,10 +1,17 @@ from django.core.management import BaseCommand from django.db import transaction + from django_countries import countries -from hct_mis_api.apps.household.models import IDENTIFICATION_TYPE_CHOICE, Agency, DocumentType +from hct_mis_api.apps.household.models import ( + IDENTIFICATION_TYPE_CHOICE, + Agency, + DocumentType, +) from hct_mis_api.apps.registration_datahub.models import ImportedAgency -from hct_mis_api.apps.registration_datahub.models import ImportedDocumentType as RDHDocumentType +from hct_mis_api.apps.registration_datahub.models import ( + ImportedDocumentType as RDHDocumentType, +) class Command(BaseCommand): diff --git a/backend/hct_mis_api/apps/core/management/commands/generatefakefile.py b/backend/hct_mis_api/apps/core/management/commands/generatefakefile.py index 31ff50b298..6121ae5574 100644 --- a/backend/hct_mis_api/apps/core/management/commands/generatefakefile.py +++ b/backend/hct_mis_api/apps/core/management/commands/generatefakefile.py @@ -1,9 +1,10 @@ import datetime import random -from dateutil.relativedelta import relativedelta from django.conf import settings from django.core.management import BaseCommand + +from dateutil.relativedelta import relativedelta from faker import Faker from openpyxl.drawing.image import Image @@ -18,7 +19,9 @@ ) from hct_mis_api.apps.core.utils import get_combined_attributes from hct_mis_api.apps.household.models import HEAD -from hct_mis_api.apps.registration_datahub.template_generator import TemplateFileGenerator +from hct_mis_api.apps.registration_datahub.template_generator import ( + TemplateFileGenerator, +) class Command(BaseCommand): diff --git a/backend/hct_mis_api/apps/core/management/commands/init.py b/backend/hct_mis_api/apps/core/management/commands/init.py index ea26accc61..7bce5a0eeb 100644 --- a/backend/hct_mis_api/apps/core/management/commands/init.py +++ b/backend/hct_mis_api/apps/core/management/commands/init.py @@ -1,4 +1,4 @@ -from django.core.management import call_command, BaseCommand +from django.core.management import BaseCommand, call_command class Command(BaseCommand): diff --git a/backend/hct_mis_api/apps/core/management/commands/pullfromcadatahub.py b/backend/hct_mis_api/apps/core/management/commands/pullfromcadatahub.py index 32ebba3f41..f84fb0543d 100644 --- a/backend/hct_mis_api/apps/core/management/commands/pullfromcadatahub.py +++ b/backend/hct_mis_api/apps/core/management/commands/pullfromcadatahub.py @@ -1,6 +1,8 @@ from django.core.management.commands import makemigrations -from hct_mis_api.apps.cash_assist_datahub.tasks.pull_from_datahub import PullFromDatahubTask +from hct_mis_api.apps.cash_assist_datahub.tasks.pull_from_datahub import ( + PullFromDatahubTask, +) class Command(makemigrations.Command): diff --git a/backend/hct_mis_api/apps/core/management/commands/pullfromerpdatahub.py b/backend/hct_mis_api/apps/core/management/commands/pullfromerpdatahub.py index 6dc1498374..8039bb01f6 100644 --- a/backend/hct_mis_api/apps/core/management/commands/pullfromerpdatahub.py +++ b/backend/hct_mis_api/apps/core/management/commands/pullfromerpdatahub.py @@ -1,6 +1,8 @@ from django.core.management.commands import makemigrations -from hct_mis_api.apps.erp_datahub.tasks.pull_from_erp_datahub import PullFromErpDatahubTask +from hct_mis_api.apps.erp_datahub.tasks.pull_from_erp_datahub import ( + PullFromErpDatahubTask, +) class Command(makemigrations.Command): diff --git a/backend/hct_mis_api/apps/core/storage.py b/backend/hct_mis_api/apps/core/storage.py index 59f43412f6..e8e74db9ec 100644 --- a/backend/hct_mis_api/apps/core/storage.py +++ b/backend/hct_mis_api/apps/core/storage.py @@ -1,4 +1,5 @@ from django.conf import settings + from storages.backends.azure_storage import AzureStorage diff --git a/backend/hct_mis_api/apps/core/tests/test_exchange_rates.py b/backend/hct_mis_api/apps/core/tests/test_exchange_rates.py index 78ffad0d6d..5ccb70f1d8 100644 --- a/backend/hct_mis_api/apps/core/tests/test_exchange_rates.py +++ b/backend/hct_mis_api/apps/core/tests/test_exchange_rates.py @@ -1,26 +1,27 @@ import os +from datetime import datetime, timedelta from decimal import Decimal from unittest import mock -import requests_mock -from datetime import datetime, timedelta - from django.core.management import call_command -from parameterized import parameterized from django.test import TestCase +import requests_mock +from parameterized import parameterized + from hct_mis_api.apps.core.exchange_rates import ExchangeRateAPI, ExchangeRates from hct_mis_api.apps.core.models import BusinessArea +from hct_mis_api.apps.core.tests.test_files.exchange_rates_api_response import ( + EXCHANGE_RATES_API_RESPONSE, +) from hct_mis_api.apps.household.fixtures import create_household from hct_mis_api.apps.payment.fixtures import ( - ServiceProviderFactory, - RealProgramFactory, RealCashPlanFactory, RealPaymentRecordFactory, + RealProgramFactory, + ServiceProviderFactory, ) from hct_mis_api.apps.payment.models import PaymentRecord -from hct_mis_api.apps.core.tests.test_files.exchange_rates_api_response import EXCHANGE_RATES_API_RESPONSE - EXCHANGE_RATES_WITH_HISTORICAL_DATA = { "ROWSET": { diff --git a/backend/hct_mis_api/apps/core/tests/test_flexible_helper_methods.py b/backend/hct_mis_api/apps/core/tests/test_flexible_helper_methods.py index 51aca5dae8..23fc2c79e0 100644 --- a/backend/hct_mis_api/apps/core/tests/test_flexible_helper_methods.py +++ b/backend/hct_mis_api/apps/core/tests/test_flexible_helper_methods.py @@ -1,7 +1,8 @@ -import xlrd from django.conf import settings from django.core.exceptions import ValidationError from django.test import TestCase + +import xlrd from xlrd.sheet import Cell from hct_mis_api.apps.core.flex_fields_importer import FlexibleAttributeImporter diff --git a/backend/hct_mis_api/apps/core/tests/test_flexibles.py b/backend/hct_mis_api/apps/core/tests/test_flexibles.py index f3ca2d2fca..496d599faa 100644 --- a/backend/hct_mis_api/apps/core/tests/test_flexibles.py +++ b/backend/hct_mis_api/apps/core/tests/test_flexibles.py @@ -1,10 +1,15 @@ from django.conf import settings from django.core.exceptions import ValidationError from django.test import TestCase + from xlrd import XLRDError from hct_mis_api.apps.core.flex_fields_importer import FlexibleAttributeImporter -from hct_mis_api.apps.core.models import FlexibleAttribute, FlexibleAttributeChoice, FlexibleAttributeGroup +from hct_mis_api.apps.core.models import ( + FlexibleAttribute, + FlexibleAttributeChoice, + FlexibleAttributeGroup, +) class MockSuperUser: diff --git a/backend/hct_mis_api/apps/core/tests/test_kobo_template_upload.py b/backend/hct_mis_api/apps/core/tests/test_kobo_template_upload.py index be2f708cbf..61926834f2 100644 --- a/backend/hct_mis_api/apps/core/tests/test_kobo_template_upload.py +++ b/backend/hct_mis_api/apps/core/tests/test_kobo_template_upload.py @@ -123,7 +123,7 @@ def test_upload_valid_template(self): class TestKoboErrorHandling(APITestCase): def generate_empty_template(self): with NamedTemporaryFile(mode="w+b") as tmp_file: - tmp_file.write("abcdefg".encode()) + tmp_file.write(b"abcdefg") tmp_file.seek(0) template = XLSXKoboTemplate(file_name="test.xlsx", status=XLSXKoboTemplate.UPLOADED) template.file.save("test.xlsx", tmp_file) diff --git a/backend/hct_mis_api/apps/core/tests/test_setting_types_for_calculate_fields.py b/backend/hct_mis_api/apps/core/tests/test_setting_types_for_calculate_fields.py index 959b70c2b4..1d0f7c78b6 100644 --- a/backend/hct_mis_api/apps/core/tests/test_setting_types_for_calculate_fields.py +++ b/backend/hct_mis_api/apps/core/tests/test_setting_types_for_calculate_fields.py @@ -1,9 +1,15 @@ from django.conf import settings from django.core.exceptions import ValidationError from django.test import TestCase + from parameterized import parameterized -from hct_mis_api.apps.core.core_fields_attributes import TYPE_STRING, TYPE_INTEGER, TYPE_DECIMAL, TYPE_DATE +from hct_mis_api.apps.core.core_fields_attributes import ( + TYPE_DATE, + TYPE_DECIMAL, + TYPE_INTEGER, + TYPE_STRING, +) from hct_mis_api.apps.core.flex_fields_importer import FlexibleAttributeImporter from hct_mis_api.apps.core.models import FlexibleAttribute diff --git a/backend/hct_mis_api/apps/core/views.py b/backend/hct_mis_api/apps/core/views.py index e1e4f789c7..e8edf8dd3d 100644 --- a/backend/hct_mis_api/apps/core/views.py +++ b/backend/hct_mis_api/apps/core/views.py @@ -2,11 +2,12 @@ from django import forms from django.contrib.auth import logout -from django.contrib.auth.decorators import user_passes_test, login_required +from django.contrib.auth.decorators import login_required, user_passes_test from django.core.exceptions import PermissionDenied from django.core.management import call_command from django.http import HttpResponse -from django.shortcuts import redirect, render, get_object_or_404 +from django.shortcuts import get_object_or_404, redirect, render + from graphene_django.settings import graphene_settings from graphql.utils import schema_printer diff --git a/backend/hct_mis_api/apps/erp_datahub/celery_tasks.py b/backend/hct_mis_api/apps/erp_datahub/celery_tasks.py index 53dcb71178..03b2f27f49 100644 --- a/backend/hct_mis_api/apps/erp_datahub/celery_tasks.py +++ b/backend/hct_mis_api/apps/erp_datahub/celery_tasks.py @@ -10,7 +10,9 @@ def sync_to_mis_datahub_task(): logger.info("sync_to_mis_datahub_task start") try: - from hct_mis_api.apps.erp_datahub.tasks.sync_to_mis_datahub import SyncToMisDatahubTask + from hct_mis_api.apps.erp_datahub.tasks.sync_to_mis_datahub import ( + SyncToMisDatahubTask, + ) SyncToMisDatahubTask().execute() except Exception as e: diff --git a/backend/hct_mis_api/apps/erp_datahub/fixtures.py b/backend/hct_mis_api/apps/erp_datahub/fixtures.py index 630ce1c6b2..dede1557b7 100644 --- a/backend/hct_mis_api/apps/erp_datahub/fixtures.py +++ b/backend/hct_mis_api/apps/erp_datahub/fixtures.py @@ -1,10 +1,10 @@ import factory +from factory import fuzzy from pytz import utc -from factory import fuzzy from hct_mis_api.apps.core.models import BusinessArea -from hct_mis_api.apps.program.models import CashPlan from hct_mis_api.apps.erp_datahub.models import FundsCommitment +from hct_mis_api.apps.program.models import CashPlan class FundsCommitmentFactory(factory.DjangoModelFactory): diff --git a/backend/hct_mis_api/apps/erp_datahub/tests/test_pull_from_erp_datahub.py b/backend/hct_mis_api/apps/erp_datahub/tests/test_pull_from_erp_datahub.py index 6d01f9dc36..d40150a9d1 100644 --- a/backend/hct_mis_api/apps/erp_datahub/tests/test_pull_from_erp_datahub.py +++ b/backend/hct_mis_api/apps/erp_datahub/tests/test_pull_from_erp_datahub.py @@ -3,15 +3,17 @@ from django.core.management import call_command from django.test import TestCase -from hct_mis_api.apps.erp_datahub.tasks.pull_from_erp_datahub import PullFromErpDatahubTask -from hct_mis_api.apps.program.fixtures import CashPlanFactory -from hct_mis_api.apps.payment.fixtures import PaymentRecordFactory from hct_mis_api.apps.erp_datahub.fixtures import FundsCommitmentFactory +from hct_mis_api.apps.erp_datahub.tasks.pull_from_erp_datahub import ( + PullFromErpDatahubTask, +) from hct_mis_api.apps.household.fixtures import create_household +from hct_mis_api.apps.payment.fixtures import PaymentRecordFactory +from hct_mis_api.apps.program.fixtures import CashPlanFactory class TestPullDataFromErpDatahub(TestCase): - multi_db = True + databases = "__all__" cash_plan_1 = None cash_plan_2 = None payment_record_1 = None diff --git a/backend/hct_mis_api/apps/erp_datahub/tests/test_sync_to_mis_datahub.py b/backend/hct_mis_api/apps/erp_datahub/tests/test_sync_to_mis_datahub.py index 7fd98ba18c..1be21e7980 100644 --- a/backend/hct_mis_api/apps/erp_datahub/tests/test_sync_to_mis_datahub.py +++ b/backend/hct_mis_api/apps/erp_datahub/tests/test_sync_to_mis_datahub.py @@ -11,7 +11,7 @@ class TestSyncToMisDatahubTask(TestCase): - multi_db = True + databases = "__all__" bosnia_and_herzegovina = None bosnia = None herzegovina = None diff --git a/backend/hct_mis_api/apps/geo/utils.py b/backend/hct_mis_api/apps/geo/utils.py index 59df098142..9624400908 100644 --- a/backend/hct_mis_api/apps/geo/utils.py +++ b/backend/hct_mis_api/apps/geo/utils.py @@ -104,11 +104,11 @@ def initialise_countries(): def copy_admin_area_data(): areas = {} from django.db import models + from hct_mis_api.apps.grievance.models import GrievanceTicket from hct_mis_api.apps.household.models import Household from hct_mis_api.apps.program.models import Program - from hct_mis_api.apps.reporting.models import DashboardReport - from hct_mis_api.apps.reporting.models import Report + from hct_mis_api.apps.reporting.models import DashboardReport, Report models_to_update = ( GrievanceTicket, @@ -140,14 +140,15 @@ def copy_admin_area_data(): def copy_country_data(): from django.db import models - from hct_mis_api.apps.sanction_list.models import SanctionListIndividual - from hct_mis_api.apps.sanction_list.models import SanctionListIndividualDocument - from hct_mis_api.apps.sanction_list.models import SanctionListIndividualNationalities - from hct_mis_api.apps.sanction_list.models import SanctionListIndividualCountries - from hct_mis_api.apps.household.models import Agency - from hct_mis_api.apps.household.models import DocumentType - from hct_mis_api.apps.household.models import Household + from hct_mis_api.apps.core.models import CountryCodeMap + from hct_mis_api.apps.household.models import Agency, DocumentType, Household + from hct_mis_api.apps.sanction_list.models import ( + SanctionListIndividual, + SanctionListIndividualCountries, + SanctionListIndividualDocument, + SanctionListIndividualNationalities, + ) models_to_update = ( CountryCodeMap, diff --git a/backend/hct_mis_api/apps/grievance/notifications.py b/backend/hct_mis_api/apps/grievance/notifications.py index 78034192d3..8dd88f1bcb 100644 --- a/backend/hct_mis_api/apps/grievance/notifications.py +++ b/backend/hct_mis_api/apps/grievance/notifications.py @@ -5,6 +5,7 @@ from django.conf import settings from django.core.mail import EmailMultiAlternatives from django.template.loader import render_to_string + from constance import config from hct_mis_api.apps.account.models import User, UserRole diff --git a/backend/hct_mis_api/apps/grievance/tasks/deduplicate_and_check_sanctions.py b/backend/hct_mis_api/apps/grievance/tasks/deduplicate_and_check_sanctions.py index 4c57be07c9..8550edf198 100644 --- a/backend/hct_mis_api/apps/grievance/tasks/deduplicate_and_check_sanctions.py +++ b/backend/hct_mis_api/apps/grievance/tasks/deduplicate_and_check_sanctions.py @@ -3,7 +3,12 @@ from hct_mis_api.apps.grievance.common import create_needs_adjudication_tickets from hct_mis_api.apps.household.documents import IndividualDocument from hct_mis_api.apps.household.elasticsearch_utils import populate_index -from hct_mis_api.apps.household.models import Individual, DUPLICATE, NEEDS_ADJUDICATION, Document +from hct_mis_api.apps.household.models import ( + DUPLICATE, + NEEDS_ADJUDICATION, + Document, + Individual, +) from hct_mis_api.apps.registration_data.models import RegistrationDataImport from hct_mis_api.apps.registration_datahub.tasks.deduplicate import DeduplicateTask from hct_mis_api.apps.sanction_list.tasks.check_against_sanction_list_pre_merge import ( diff --git a/backend/hct_mis_api/apps/grievance/tests/mutations_extras/test_reassign_roles_on_disable_individual.py b/backend/hct_mis_api/apps/grievance/tests/mutations_extras/test_reassign_roles_on_disable_individual.py index 6a53005f76..f76ca183d0 100644 --- a/backend/hct_mis_api/apps/grievance/tests/mutations_extras/test_reassign_roles_on_disable_individual.py +++ b/backend/hct_mis_api/apps/grievance/tests/mutations_extras/test_reassign_roles_on_disable_individual.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from graphql import GraphQLError from hct_mis_api.apps.core.base_test_case import APITestCase diff --git a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_filter_already_existing_tickets.py b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_filter_already_existing_tickets.py index a007190428..1f2dc35bc1 100644 --- a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_filter_already_existing_tickets.py +++ b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_filter_already_existing_tickets.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestAlreadyExistingFilterTickets::test_filter_existing_tickets_by_household_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_complaint_ticket.py b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_complaint_ticket.py index a74374808c..18810d3037 100644 --- a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_complaint_ticket.py +++ b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_complaint_ticket.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestGrievanceCreateComplaintTicketQuery::test_create_complaint_ticket_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_sensitive_ticket.py b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_sensitive_ticket.py index 4629e57600..19b0e76ed9 100644 --- a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_sensitive_ticket.py +++ b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_grievance_create_sensitive_ticket.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestGrievanceCreateSensitiveTicketQuery::test_create_sensitive_ticket_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_ticket_notes.py b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_ticket_notes.py index 1d17fcfa93..ab234c4dd0 100644 --- a/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_ticket_notes.py +++ b/backend/hct_mis_api/apps/grievance/tests/snapshots/snap_test_ticket_notes.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestTicketNotes::test_ticket_notes_query_all 1'] = { diff --git a/backend/hct_mis_api/apps/grievance/tests/test_filter_already_existing_tickets.py b/backend/hct_mis_api/apps/grievance/tests/test_filter_already_existing_tickets.py index 11a4e50733..734bb5c510 100644 --- a/backend/hct_mis_api/apps/grievance/tests/test_filter_already_existing_tickets.py +++ b/backend/hct_mis_api/apps/grievance/tests/test_filter_already_existing_tickets.py @@ -1,21 +1,22 @@ from django.core.management import call_command + from parameterized import parameterized -from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.account.fixtures import UserFactory +from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.core.fixtures import AdminAreaLevelFactory, AdminAreaFactory +from hct_mis_api.apps.core.fixtures import AdminAreaFactory, AdminAreaLevelFactory from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.grievance.fixtures import ( - GrievanceTicketFactory, GrievanceComplaintTicketFactory, - SensitiveGrievanceTicketWithoutExtrasFactory, + GrievanceTicketFactory, SensitiveGrievanceTicketFactory, + SensitiveGrievanceTicketWithoutExtrasFactory, ) from hct_mis_api.apps.grievance.models import GrievanceTicket from hct_mis_api.apps.household.fixtures import create_household from hct_mis_api.apps.payment.fixtures import PaymentRecordFactory -from hct_mis_api.apps.program.fixtures import ProgramFactory, CashPlanFactory +from hct_mis_api.apps.program.fixtures import CashPlanFactory, ProgramFactory class TestAlreadyExistingFilterTickets(APITestCase): diff --git a/backend/hct_mis_api/apps/grievance/tests/test_grievance_data_change_mutations.py b/backend/hct_mis_api/apps/grievance/tests/test_grievance_data_change_mutations.py index d764ea068a..391658cc1e 100644 --- a/backend/hct_mis_api/apps/grievance/tests/test_grievance_data_change_mutations.py +++ b/backend/hct_mis_api/apps/grievance/tests/test_grievance_data_change_mutations.py @@ -232,7 +232,7 @@ def test_grievance_create_individual_data_change(self, _, permissions): "type": IDENTIFICATION_TYPE_NATIONAL_ID, "country": "POL", "number": "123-123-UX-321", - "photo": SimpleUploadedFile(name="test.jpg", content="".encode("utf-8")), + "photo": SimpleUploadedFile(name="test.jpg", content=b""), } ], "identities": [ @@ -291,7 +291,7 @@ def test_grievance_update_individual_data_change(self, _, permissions): "type": IDENTIFICATION_TYPE_NATIONAL_PASSPORT, "country": "POL", "number": "321-321-XU-987", - "photo": SimpleUploadedFile(name="test.jpg", content="".encode("utf-8")), + "photo": SimpleUploadedFile(name="test.jpg", content=b""), } ], "documentsToEdit": [ @@ -300,7 +300,7 @@ def test_grievance_update_individual_data_change(self, _, permissions): "type": IDENTIFICATION_TYPE_NATIONAL_ID, "country": "POL", "number": "321-321-XU-123", - "photo": SimpleUploadedFile(name="test.jpg", content="".encode("utf-8")), + "photo": SimpleUploadedFile(name="test.jpg", content=b""), } ], "identities": [ diff --git a/backend/hct_mis_api/apps/grievance/tests/test_ticket_notes.py b/backend/hct_mis_api/apps/grievance/tests/test_ticket_notes.py index ef8017164d..b4965d33fd 100644 --- a/backend/hct_mis_api/apps/grievance/tests/test_ticket_notes.py +++ b/backend/hct_mis_api/apps/grievance/tests/test_ticket_notes.py @@ -1,12 +1,16 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.core.fixtures import AdminAreaLevelFactory, AdminAreaFactory +from hct_mis_api.apps.core.fixtures import AdminAreaFactory, AdminAreaLevelFactory from hct_mis_api.apps.core.models import BusinessArea -from hct_mis_api.apps.grievance.fixtures import TicketNoteFactory, GrievanceTicketFactory +from hct_mis_api.apps.grievance.fixtures import ( + GrievanceTicketFactory, + TicketNoteFactory, +) class TestTicketNotes(APITestCase): diff --git a/backend/hct_mis_api/apps/grievance/tests/test_update_grievance_tickets.py b/backend/hct_mis_api/apps/grievance/tests/test_update_grievance_tickets.py index 1e92170e76..0db5eb22bd 100644 --- a/backend/hct_mis_api/apps/grievance/tests/test_update_grievance_tickets.py +++ b/backend/hct_mis_api/apps/grievance/tests/test_update_grievance_tickets.py @@ -298,7 +298,7 @@ def test_update_add_individual(self, name, permissions): "type": IDENTIFICATION_TYPE_NATIONAL_ID, "country": "USA", "number": "321-321-UX-321", - "photo": SimpleUploadedFile(name="test.jpg", content="".encode("utf-8")), + "photo": SimpleUploadedFile(name="test.jpg", content=b""), } ], "identities": [ @@ -410,7 +410,7 @@ def test_update_change_individual(self, name, permissions): "country": "POL", "type": IDENTIFICATION_TYPE_NATIONAL_ID, "number": "111-222-777", - "photo": SimpleUploadedFile(name="test.jpg", content="".encode("utf-8")), + "photo": SimpleUploadedFile(name="test.jpg", content=b""), }, ], "documentsToRemove": [], diff --git a/backend/hct_mis_api/apps/grievance/validators.py b/backend/hct_mis_api/apps/grievance/validators.py index e2d6e27fe8..8e1e66be34 100644 --- a/backend/hct_mis_api/apps/grievance/validators.py +++ b/backend/hct_mis_api/apps/grievance/validators.py @@ -1,4 +1,5 @@ import logging + from graphql import GraphQLError logger = logging.getLogger(__name__) diff --git a/backend/hct_mis_api/apps/household/elasticsearch_utils.py b/backend/hct_mis_api/apps/household/elasticsearch_utils.py index 1985d5c8f2..9fd5a9849a 100644 --- a/backend/hct_mis_api/apps/household/elasticsearch_utils.py +++ b/backend/hct_mis_api/apps/household/elasticsearch_utils.py @@ -1,6 +1,7 @@ import logging from django.core.management import CommandError + from django_elasticsearch_dsl.registries import registry from elasticsearch_dsl import Search diff --git a/backend/hct_mis_api/apps/household/migrations/0044_migration.py b/backend/hct_mis_api/apps/household/migrations/0044_migration.py index 000882f9c5..d8f379a800 100644 --- a/backend/hct_mis_api/apps/household/migrations/0044_migration.py +++ b/backend/hct_mis_api/apps/household/migrations/0044_migration.py @@ -13,12 +13,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='household', name='child_hoh', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='household', name='consent', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='household', @@ -28,7 +28,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='household', name='fchild_hoh', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='household', @@ -43,7 +43,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='household', name='returnee', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='household', @@ -63,17 +63,17 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='individual', name='administration_of_rutf', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='individual', name='enrolled_in_nutrition_programme', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='individual', name='pregnant', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.DeleteModel( name='HouseholdIdentity', diff --git a/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py b/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py index d6ee9a1a51..1cb717e459 100644 --- a/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py +++ b/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestHouseholdQuery::test_household_query_all_0_all_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_individual_query.py b/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_individual_query.py index 811f90bccd..23f39e73cf 100644 --- a/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_individual_query.py +++ b/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_individual_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestIndividualQuery::test_individual_programme_filter_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/household/tests/test_document_type_model.py b/backend/hct_mis_api/apps/household/tests/test_document_type_model.py index f95255e283..847d65035a 100644 --- a/backend/hct_mis_api/apps/household/tests/test_document_type_model.py +++ b/backend/hct_mis_api/apps/household/tests/test_document_type_model.py @@ -9,7 +9,7 @@ class TestDocumentTypeModel(TestCase): - multi_db = True + databases = "__all__" def test_create_document_type_with_specific_country(self): document_type = DocumentType.objects.create( diff --git a/backend/hct_mis_api/apps/household/tests/test_household_query.py b/backend/hct_mis_api/apps/household/tests/test_household_query.py index aa90166a7e..e200899ad5 100644 --- a/backend/hct_mis_api/apps/household/tests/test_household_query.py +++ b/backend/hct_mis_api/apps/household/tests/test_household_query.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/household/tests/test_individual_query.py b/backend/hct_mis_api/apps/household/tests/test_individual_query.py index ebd28d24ce..d96f80deff 100644 --- a/backend/hct_mis_api/apps/household/tests/test_individual_query.py +++ b/backend/hct_mis_api/apps/household/tests/test_individual_query.py @@ -1,16 +1,14 @@ import unittest from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase from hct_mis_api.apps.core.models import BusinessArea -from hct_mis_api.apps.household.fixtures import ( - IndividualFactory, - HouseholdFactory, -) +from hct_mis_api.apps.household.fixtures import HouseholdFactory, IndividualFactory from hct_mis_api.apps.program.fixtures import ProgramFactory diff --git a/backend/hct_mis_api/apps/household/tests/test_individual_xlsx_update.py b/backend/hct_mis_api/apps/household/tests/test_individual_xlsx_update.py index aa4b466593..5ab7049f73 100644 --- a/backend/hct_mis_api/apps/household/tests/test_individual_xlsx_update.py +++ b/backend/hct_mis_api/apps/household/tests/test_individual_xlsx_update.py @@ -45,7 +45,7 @@ def invalid_file(): class TestIndividualXlsxUpdate(APITestCase): - multi_db = True + databases = "__all__" def setUp(self) -> None: call_command("loadbusinessareas") diff --git a/backend/hct_mis_api/apps/mis_datahub/migrations/0028_migration.py b/backend/hct_mis_api/apps/mis_datahub/migrations/0028_migration.py index efcce4925c..f7817a4d59 100644 --- a/backend/hct_mis_api/apps/mis_datahub/migrations/0028_migration.py +++ b/backend/hct_mis_api/apps/mis_datahub/migrations/0028_migration.py @@ -13,6 +13,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='individual', name='pregnant', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), ] diff --git a/backend/hct_mis_api/apps/mis_datahub/tests/test_send_tp_to_datahub.py b/backend/hct_mis_api/apps/mis_datahub/tests/test_send_tp_to_datahub.py index f658e99710..5b07c33953 100644 --- a/backend/hct_mis_api/apps/mis_datahub/tests/test_send_tp_to_datahub.py +++ b/backend/hct_mis_api/apps/mis_datahub/tests/test_send_tp_to_datahub.py @@ -32,7 +32,7 @@ class TestSendTpToDatahub(TestCase): - multi_db = True + databases = "__all__" @staticmethod def _pre_test_commands(): diff --git a/backend/hct_mis_api/apps/payment/celery_tasks.py b/backend/hct_mis_api/apps/payment/celery_tasks.py index 529e8edc9e..01e35850e3 100644 --- a/backend/hct_mis_api/apps/payment/celery_tasks.py +++ b/backend/hct_mis_api/apps/payment/celery_tasks.py @@ -10,7 +10,9 @@ def get_sync_run_rapid_pro_task(): logger.info(f"get_sync_run_rapid_pro_task start") try: - from hct_mis_api.apps.payment.tasks.CheckRapidProVerificationTask import CheckRapidProVerificationTask + from hct_mis_api.apps.payment.tasks.CheckRapidProVerificationTask import ( + CheckRapidProVerificationTask, + ) CheckRapidProVerificationTask().execute() except Exception as e: diff --git a/backend/hct_mis_api/apps/payment/fixtures.py b/backend/hct_mis_api/apps/payment/fixtures.py index 465700d1a7..064d2b8944 100644 --- a/backend/hct_mis_api/apps/payment/fixtures.py +++ b/backend/hct_mis_api/apps/payment/fixtures.py @@ -11,10 +11,10 @@ from hct_mis_api.apps.household.fixtures import HouseholdFactory from hct_mis_api.apps.household.models import Household from hct_mis_api.apps.payment.models import ( - PaymentRecord, - ServiceProvider, CashPlanPaymentVerification, + PaymentRecord, PaymentVerification, + ServiceProvider, ) from hct_mis_api.apps.program.fixtures import CashPlanFactory from hct_mis_api.apps.program.models import CashPlan, Program diff --git a/backend/hct_mis_api/apps/payment/rapid_pro/api.py b/backend/hct_mis_api/apps/payment/rapid_pro/api.py index 25097c2d64..0bf4a08387 100644 --- a/backend/hct_mis_api/apps/payment/rapid_pro/api.py +++ b/backend/hct_mis_api/apps/payment/rapid_pro/api.py @@ -1,11 +1,12 @@ import logging from decimal import Decimal -import requests -from constance import config from django.conf import settings from django.core.exceptions import ValidationError +import requests +from constance import config + from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.household.models import Individual diff --git a/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py b/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py index 70f862e5c5..9ba57d067d 100644 --- a/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py +++ b/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestChartTotalTransferredCashByCountry::test_resolving_chart_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_discard_verification_mutation.py b/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_discard_verification_mutation.py index fc3eb29feb..ca916b65c9 100644 --- a/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_discard_verification_mutation.py +++ b/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_discard_verification_mutation.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestDiscardVerificationMutation::test_discard_active_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/payment/tests/test_chart_total_transferred_cash_by_country.py b/backend/hct_mis_api/apps/payment/tests/test_chart_total_transferred_cash_by_country.py index a002501952..365de2a0e7 100644 --- a/backend/hct_mis_api/apps/payment/tests/test_chart_total_transferred_cash_by_country.py +++ b/backend/hct_mis_api/apps/payment/tests/test_chart_total_transferred_cash_by_country.py @@ -1,6 +1,7 @@ from datetime import datetime from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/payment/tests/test_discard_verification_mutation.py b/backend/hct_mis_api/apps/payment/tests/test_discard_verification_mutation.py index bb809d3f6a..253bc33c78 100644 --- a/backend/hct_mis_api/apps/payment/tests/test_discard_verification_mutation.py +++ b/backend/hct_mis_api/apps/payment/tests/test_discard_verification_mutation.py @@ -1,23 +1,27 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.core.models import BusinessArea, AdminArea -from hct_mis_api.apps.household.fixtures import ( - create_household, - EntitlementCardFactory, -) +from hct_mis_api.apps.core.models import AdminArea, BusinessArea +from hct_mis_api.apps.household.fixtures import EntitlementCardFactory, create_household from hct_mis_api.apps.payment.fixtures import ( - PaymentRecordFactory, CashPlanPaymentVerificationFactory, + PaymentRecordFactory, PaymentVerificationFactory, ) -from hct_mis_api.apps.payment.models import PaymentVerification, CashPlanPaymentVerification -from hct_mis_api.apps.program.fixtures import ProgramFactory, CashPlanFactory +from hct_mis_api.apps.payment.models import ( + CashPlanPaymentVerification, + PaymentVerification, +) +from hct_mis_api.apps.program.fixtures import CashPlanFactory, ProgramFactory from hct_mis_api.apps.registration_data.fixtures import RegistrationDataImportFactory -from hct_mis_api.apps.targeting.fixtures import TargetingCriteriaFactory, TargetPopulationFactory +from hct_mis_api.apps.targeting.fixtures import ( + TargetingCriteriaFactory, + TargetPopulationFactory, +) class TestDiscardVerificationMutation(APITestCase): diff --git a/backend/hct_mis_api/apps/payment/views.py b/backend/hct_mis_api/apps/payment/views.py index 1f717ae1f8..c7f7691f42 100644 --- a/backend/hct_mis_api/apps/payment/views.py +++ b/backend/hct_mis_api/apps/payment/views.py @@ -4,12 +4,15 @@ from django.core.exceptions import PermissionDenied from django.http import HttpResponse from django.shortcuts import get_object_or_404 + from openpyxl.writer.excel import save_virtual_workbook from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.utils import decode_id_string from hct_mis_api.apps.payment.models import CashPlanPaymentVerification -from hct_mis_api.apps.payment.xlsx.XlsxVerificationExportService import XlsxVerificationExportService +from hct_mis_api.apps.payment.xlsx.XlsxVerificationExportService import ( + XlsxVerificationExportService, +) logger = logging.getLogger(__name__) diff --git a/backend/hct_mis_api/apps/payment/xlsx/XlsxVerificationImportService.py b/backend/hct_mis_api/apps/payment/xlsx/XlsxVerificationImportService.py index d966ce60f4..c79482f86a 100644 --- a/backend/hct_mis_api/apps/payment/xlsx/XlsxVerificationImportService.py +++ b/backend/hct_mis_api/apps/payment/xlsx/XlsxVerificationImportService.py @@ -4,8 +4,13 @@ from graphql import GraphQLError from hct_mis_api.apps.payment.models import PaymentVerification -from hct_mis_api.apps.payment.utils import from_received_yes_no_to_status, float_to_decimal -from hct_mis_api.apps.payment.xlsx.XlsxVerificationExportService import XlsxVerificationExportService +from hct_mis_api.apps.payment.utils import ( + float_to_decimal, + from_received_yes_no_to_status, +) +from hct_mis_api.apps.payment.xlsx.XlsxVerificationExportService import ( + XlsxVerificationExportService, +) class XlsxVerificationImportService: diff --git a/backend/hct_mis_api/apps/power_query/mixin.py b/backend/hct_mis_api/apps/power_query/mixin.py index 4662f477ae..6791191344 100644 --- a/backend/hct_mis_api/apps/power_query/mixin.py +++ b/backend/hct_mis_api/apps/power_query/mixin.py @@ -3,16 +3,15 @@ from admin_extra_urls.decorators import button from admin_extra_urls.mixins import ExtraUrlMixin + from hct_mis_api.apps.power_query.models import Query class PowerQueryMixin(ExtraUrlMixin): - @button(permission='power_query_query_add') + @button(permission="power_query_query_add") def power_query(self, request): ct = ContentType.objects.get_for_model(self.model) - context = self.get_common_context(request, - title='Power Queries', - ct=ct, - entries=Query.objects.filter(target=ct) - ) - return render(request, 'power_query/list.html', context) + context = self.get_common_context( + request, title="Power Queries", ct=ct, entries=Query.objects.filter(target=ct) + ) + return render(request, "power_query/list.html", context) diff --git a/backend/hct_mis_api/apps/power_query/utils.py b/backend/hct_mis_api/apps/power_query/utils.py index cfe87329fd..098dcdc7b6 100644 --- a/backend/hct_mis_api/apps/power_query/utils.py +++ b/backend/hct_mis_api/apps/power_query/utils.py @@ -60,7 +60,7 @@ def wrap(request, *args, **kwargs): return view(request, *args, **kwargs) if "HTTP_AUTHORIZATION" in request.META: - auth = request.META["HTTP_AUTHORIZATION"].split() + auth = request.headers["Authorization"].split() if len(auth) == 2: if auth[0].lower() == "basic": uname, passwd = base64.b64decode(auth[1].encode()).decode().split(":") diff --git a/backend/hct_mis_api/apps/power_query/views.py b/backend/hct_mis_api/apps/power_query/views.py index da38e43590..e44b5459aa 100644 --- a/backend/hct_mis_api/apps/power_query/views.py +++ b/backend/hct_mis_api/apps/power_query/views.py @@ -27,7 +27,7 @@ def fetch(request, pk): report: Report = get_object_or_404(Report, pk=pk) if request.user.is_superuser or report.available_to.filter(pk=request.user.pk): if report.result is None: - content_types = request.META.get("HTTP_ACCEPT", "*/*").split(",") + content_types = request.headers.get("Accept", "*/*").split(",") if "text/html" in content_types: return HttpResponse("This report is not currently available", status=400) elif "application/json" in content_types: diff --git a/backend/hct_mis_api/apps/program/fixtures.py b/backend/hct_mis_api/apps/program/fixtures.py index 88523c209d..7343def621 100644 --- a/backend/hct_mis_api/apps/program/fixtures.py +++ b/backend/hct_mis_api/apps/program/fixtures.py @@ -8,7 +8,7 @@ from hct_mis_api.apps.core.fixtures import AdminAreaFactory from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.payment.models import PaymentRecord -from hct_mis_api.apps.program.models import Program, CashPlan +from hct_mis_api.apps.program.models import CashPlan, Program class ProgramFactory(factory.DjangoModelFactory): diff --git a/backend/hct_mis_api/apps/program/mutations.py b/backend/hct_mis_api/apps/program/mutations.py index a50ee69b8b..36a4912863 100644 --- a/backend/hct_mis_api/apps/program/mutations.py +++ b/backend/hct_mis_api/apps/program/mutations.py @@ -1,19 +1,23 @@ -import graphene from django.core.exceptions import ValidationError from django.db import transaction +import graphene + from hct_mis_api.apps.account.permissions import PermissionMutation, Permissions from hct_mis_api.apps.activity_log.models import log_create from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.core.permissions import is_authenticated -from hct_mis_api.apps.core.utils import decode_id_string, check_concurrency_version_in_mutation from hct_mis_api.apps.core.scalars import BigInt +from hct_mis_api.apps.core.utils import ( + check_concurrency_version_in_mutation, + decode_id_string, +) from hct_mis_api.apps.core.validators import CommonValidator from hct_mis_api.apps.program.models import Program from hct_mis_api.apps.program.schema import ProgramNode from hct_mis_api.apps.program.validators import ( - ProgramValidator, ProgramDeletionValidator, + ProgramValidator, ) from hct_mis_api.apps.utils.mutations import ValidationErrorMutationMixin diff --git a/backend/hct_mis_api/apps/program/schema.py b/backend/hct_mis_api/apps/program/schema.py index 74e6327893..fe31c35dda 100644 --- a/backend/hct_mis_api/apps/program/schema.py +++ b/backend/hct_mis_api/apps/program/schema.py @@ -1,6 +1,7 @@ -import graphene -from django.db.models import Case, IntegerField, Q, Sum, Value, When, Count +from django.db.models import Case, Count, IntegerField, Q, Sum, Value, When from django.db.models.functions import Coalesce, Lower + +import graphene from django_filters import ( CharFilter, DateFilter, @@ -12,22 +13,22 @@ from graphene_django import DjangoObjectType from hct_mis_api.apps.account.permissions import ( + ALL_GRIEVANCES_CREATE_MODIFY, BaseNodePermissionMixin, DjangoPermissionFilterConnectionField, - hopePermissionClass, Permissions, hopeOneOfPermissionClass, - ALL_GRIEVANCES_CREATE_MODIFY, + hopePermissionClass, ) from hct_mis_api.apps.core.extended_connection import ExtendedConnection from hct_mis_api.apps.core.filters import DecimalRangeFilter, IntegerRangeFilter from hct_mis_api.apps.core.schema import ChoiceObject from hct_mis_api.apps.core.utils import ( - to_choice_object, CustomOrderingFilter, + chart_filters_decoder, chart_map_choices, chart_permission_decorator, - chart_filters_decoder, + to_choice_object, ) from hct_mis_api.apps.payment.models import CashPlanPaymentVerification, PaymentRecord from hct_mis_api.apps.payment.utils import get_payment_records_for_dashboard diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_choices.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_choices.py index b96cc7cb57..ae849085c2 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_choices.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_choices.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestCashPlanChoices::test_status_choices_query 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py index 6586924512..90a20b6eec 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestCashPlanQueries::test_cash_plans_0_all_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_change_program_status.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_change_program_status.py index 6a9046be22..89fb34b30a 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_change_program_status.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_change_program_status.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestChangeProgramStatus::test_status_change_0_draft_to_active_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_create_program.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_create_program.py index 0f6bf0ac92..ba47d4d5d2 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_create_program.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_create_program.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestCreateProgram::test_create_program_not_authenticated 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_delete_program.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_delete_program.py index b13b855bec..1833ae9b97 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_delete_program.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_delete_program.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestDeleteProgram::test_delete_program_not_authenticated 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_program_choices.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_program_choices.py index 0e1903b5ee..51163814f1 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_program_choices.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_program_choices.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestProgramChoices::test_program_frequency_of_payments_choices 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_update_program.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_update_program.py index 26f2e8bb5e..929e924272 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_update_program.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_update_program.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestUpdateProgram::test_update_program_not_authenticated 1'] = { diff --git a/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py b/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py index 371fb95147..86814241d5 100644 --- a/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py +++ b/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py @@ -1,13 +1,14 @@ from datetime import datetime -from parameterized import parameterized from django.core.management import call_command +from parameterized import parameterized + from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions -from hct_mis_api.apps.payment.models import PaymentRecord from hct_mis_api.apps.core.base_test_case import APITestCase from hct_mis_api.apps.core.models import BusinessArea +from hct_mis_api.apps.payment.models import PaymentRecord from hct_mis_api.apps.program.fixtures import CashPlanFactory, ProgramFactory QUERY_SINGLE_CASH_PLAN = """ diff --git a/backend/hct_mis_api/apps/program/tests/test_change_program_status.py b/backend/hct_mis_api/apps/program/tests/test_change_program_status.py index b77933d593..da5b967004 100644 --- a/backend/hct_mis_api/apps/program/tests/test_change_program_status.py +++ b/backend/hct_mis_api/apps/program/tests/test_change_program_status.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/program/tests/test_create_program.py b/backend/hct_mis_api/apps/program/tests/test_create_program.py index 270c51ae2b..448f36b519 100644 --- a/backend/hct_mis_api/apps/program/tests/test_create_program.py +++ b/backend/hct_mis_api/apps/program/tests/test_create_program.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/program/tests/test_delete_program.py b/backend/hct_mis_api/apps/program/tests/test_delete_program.py index a1eb87ba5b..774bc52f18 100644 --- a/backend/hct_mis_api/apps/program/tests/test_delete_program.py +++ b/backend/hct_mis_api/apps/program/tests/test_delete_program.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/program/tests/test_update_program.py b/backend/hct_mis_api/apps/program/tests/test_update_program.py index 75f08453b2..a3586db2b7 100644 --- a/backend/hct_mis_api/apps/program/tests/test_update_program.py +++ b/backend/hct_mis_api/apps/program/tests/test_update_program.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/program/validators.py b/backend/hct_mis_api/apps/program/validators.py index 8969b21821..f97bcdc30b 100644 --- a/backend/hct_mis_api/apps/program/validators.py +++ b/backend/hct_mis_api/apps/program/validators.py @@ -1,4 +1,5 @@ import logging + from django.core.exceptions import ValidationError from hct_mis_api.apps.core.validators import BaseValidator diff --git a/backend/hct_mis_api/apps/registration_data/schema.py b/backend/hct_mis_api/apps/registration_data/schema.py index c2d191bfd9..1d4289d39f 100644 --- a/backend/hct_mis_api/apps/registration_data/schema.py +++ b/backend/hct_mis_api/apps/registration_data/schema.py @@ -1,18 +1,24 @@ -import graphene from django.db.models.functions import Lower -from django_filters import FilterSet, DateFilter, CharFilter + +import graphene +from django_filters import CharFilter, DateFilter, FilterSet from graphene_django import DjangoObjectType from hct_mis_api.apps.account.permissions import ( + BaseNodePermissionMixin, DjangoPermissionFilterConnectionField, - hopePermissionClass, Permissions, - BaseNodePermissionMixin, + hopePermissionClass, ) from hct_mis_api.apps.core.extended_connection import ExtendedConnection from hct_mis_api.apps.core.schema import ChoiceObject -from hct_mis_api.apps.core.utils import get_count_and_percentage, CustomOrderingFilter -from hct_mis_api.apps.household.models import DUPLICATE, NEEDS_ADJUDICATION, UNIQUE, DUPLICATE_IN_BATCH +from hct_mis_api.apps.core.utils import CustomOrderingFilter, get_count_and_percentage +from hct_mis_api.apps.household.models import ( + DUPLICATE, + DUPLICATE_IN_BATCH, + NEEDS_ADJUDICATION, + UNIQUE, +) from hct_mis_api.apps.registration_data.models import RegistrationDataImport from hct_mis_api.apps.registration_datahub.models import UNIQUE_IN_BATCH diff --git a/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py b/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py index dca318bcc6..ad72a5464a 100644 --- a/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py +++ b/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestRegistrationDataImportQuery::test_registration_data_import_datahub_query_all_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/registration_data/tests/test_registration_data_import_query.py b/backend/hct_mis_api/apps/registration_data/tests/test_registration_data_import_query.py index ecf4497274..316f392bb7 100644 --- a/backend/hct_mis_api/apps/registration_data/tests/test_registration_data_import_query.py +++ b/backend/hct_mis_api/apps/registration_data/tests/test_registration_data_import_query.py @@ -1,15 +1,16 @@ -from parameterized import parameterized from django.core.management import call_command +from parameterized import parameterized + from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.registration_data.fixtures import RegistrationDataImportFactory from hct_mis_api.apps.core.models import BusinessArea +from hct_mis_api.apps.registration_data.fixtures import RegistrationDataImportFactory class TestRegistrationDataImportQuery(APITestCase): - multi_db = True + databases = "__all__" ALL_REGISTRATION_DATA_IMPORT_DATAHUB_QUERY = """ query AllRegistrationDataImports { diff --git a/backend/hct_mis_api/apps/registration_datahub/migrations/0033_migration.py b/backend/hct_mis_api/apps/registration_datahub/migrations/0033_migration.py index f276684c85..86f2e17e5f 100644 --- a/backend/hct_mis_api/apps/registration_datahub/migrations/0033_migration.py +++ b/backend/hct_mis_api/apps/registration_datahub/migrations/0033_migration.py @@ -13,27 +13,27 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='importedhousehold', name='child_hoh', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='importedhousehold', name='consent', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='importedhousehold', name='fchild_hoh', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='importedhousehold', name='returnee', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.AlterField( model_name='importedindividual', name='pregnant', - field=models.NullBooleanField(), + field=models.BooleanField(null=True), ), migrations.DeleteModel( name='ImportedHouseholdIdentity', diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_household_query.py b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_household_query.py index ae4400d6eb..02d24d8027 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_household_query.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_household_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestImportedHouseholdQuery::test_imported_household_query_all_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_individuals_query.py b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_individuals_query.py index 9cfd739c35..325a6640ec 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_individuals_query.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_imported_individuals_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestImportedIndividualQuery::test_imported_individual_query_0_all_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_mutations.py b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_mutations.py index 23b53da495..1d6df6867b 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_mutations.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_mutations.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestRegistrationDataImportDatahubMutations::test_registration_data_import_datahub_upload_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_query.py b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_query.py index 1d33cb165a..3a6b7400dd 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_query.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/snapshots/snap_test_registration_data_import_datahub_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestRegistrationDataImportDatahubQuery::test_registration_data_import_datahub_query_all 1'] = { diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/tasks/test_mark_submissions.py b/backend/hct_mis_api/apps/registration_datahub/tests/tasks/test_mark_submissions.py index 446ea70d58..f853552b8d 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/tasks/test_mark_submissions.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/tasks/test_mark_submissions.py @@ -22,7 +22,7 @@ class TestMarkSubmissions(TestCase): - multi_db = True + databases = "__all__" def setUp(self) -> None: call_command("loadbusinessareas") diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_deduplication.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_deduplication.py index 7bf0f9fffb..98de6f34df 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_deduplication.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_deduplication.py @@ -29,7 +29,7 @@ class TestBatchDeduplication(BaseElasticSearchTestCase): - multi_db = True + databases = "__all__" @classmethod def setUpTestData(cls): @@ -283,7 +283,7 @@ def test_batch_deduplication(self): class TestGoldenRecordDeduplication(BaseElasticSearchTestCase): - multi_db = True + databases = "__all__" @classmethod def setUpTestData(cls): diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_handling_documents_duplicates.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_handling_documents_duplicates.py index f0b390851d..a855f8d7f6 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_handling_documents_duplicates.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_handling_documents_duplicates.py @@ -8,15 +8,15 @@ DUPLICATE, FEMALE, HEAD, + IDENTIFICATION_TYPE_NATIONAL_ID, MALE, NEEDS_ADJUDICATION, SON_DAUGHTER, UNIQUE, WIFE_HUSBAND, - Individual, - DocumentType, - IDENTIFICATION_TYPE_NATIONAL_ID, Document, + DocumentType, + Individual, ) from hct_mis_api.apps.registration_data.fixtures import RegistrationDataImportFactory from hct_mis_api.apps.registration_datahub.fixtures import ( @@ -33,7 +33,7 @@ class TestGoldenRecordDeduplication(BaseElasticSearchTestCase): - multi_db = True + databases = "__all__" @classmethod def setUpTestData(cls): diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_document_type_models.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_document_type_models.py index f644759aef..27244b5630 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_document_type_models.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_document_type_models.py @@ -7,7 +7,7 @@ class TestImportedDocumentTypeModel(TestCase): - multi_db = True + databases = "__all__" def test_create_document_type_with_specific_country(self): document_type = ImportedDocumentType.objects.create( diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_household_query.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_household_query.py index 2348966daf..98d135bee0 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_household_query.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_household_query.py @@ -1,15 +1,16 @@ -from parameterized import parameterized from django.core.management import call_command +from parameterized import parameterized + from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.registration_datahub.fixtures import ImportedHouseholdFactory from hct_mis_api.apps.core.models import BusinessArea +from hct_mis_api.apps.registration_datahub.fixtures import ImportedHouseholdFactory class TestImportedHouseholdQuery(APITestCase): - multi_db = True + databases = "__all__" ALL_IMPORTED_HOUSEHOLD_QUERY = """ query AllImportedHouseholds{ diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_individuals_query.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_individuals_query.py index 25ef8161ad..f943ffeb15 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_individuals_query.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_imported_individuals_query.py @@ -1,11 +1,12 @@ -from parameterized import parameterized from django.core.management import call_command +from parameterized import parameterized + from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.registration_datahub.fixtures import ImportedIndividualFactory from hct_mis_api.apps.core.models import BusinessArea +from hct_mis_api.apps.registration_datahub.fixtures import ImportedIndividualFactory ALL_IMPORTED_INDIVIDUALS_QUERY = """ query AllImportedIndividuals { @@ -66,7 +67,7 @@ class TestImportedIndividualQuery(APITestCase): - multi_db = True + databases = "__all__" # IMPORTANT! # FREEZGUN doesn't work this snapshot have to be updated once a year diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_rdi_create.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_rdi_create.py index a5f1677cd1..2211d7f6cb 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_rdi_create.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_rdi_create.py @@ -57,7 +57,7 @@ def __init__(self, value, coordinate): class TestRdiCreateTask(TestCase): - multi_db = True + databases = "__all__" @classmethod def setUpTestData(cls): @@ -348,7 +348,7 @@ def test_store_row_id(self): class TestRdiKoboCreateTask(TestCase): - multi_db = True + databases = "__all__" @staticmethod def _return_test_image(*args, **kwargs): diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_refuse_rdi_mutation.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_refuse_rdi_mutation.py index dec6c9527b..cc0bca339b 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_refuse_rdi_mutation.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_refuse_rdi_mutation.py @@ -10,7 +10,7 @@ class TestRefuseRdiMutation(APITestCase): - multi_db = True + databases = "__all__" QUERY = """ mutation RefuseRegistrationDataImportMutation( diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_mutations.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_mutations.py index 41f36c021a..5708389d0c 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_mutations.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_mutations.py @@ -1,14 +1,12 @@ import io -from parameterized import parameterized -from PIL import Image from django.conf import settings -from django.core.files.uploadedfile import ( - InMemoryUploadedFile, - SimpleUploadedFile, -) +from django.core.files.uploadedfile import InMemoryUploadedFile, SimpleUploadedFile from django.core.management import call_command +from parameterized import parameterized +from PIL import Image + from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase @@ -17,7 +15,7 @@ class TestRegistrationDataImportDatahubMutations(APITestCase): - multi_db = True + databases = "__all__" UPLOAD_REGISTRATION_DATA_IMPORT_DATAHUB = """ mutation UploadImportDataXLSXFile( diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_query.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_query.py index 06e81ee29a..4322424710 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_query.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_registration_data_import_datahub_query.py @@ -1,10 +1,12 @@ from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.core.base_test_case import APITestCase -from hct_mis_api.apps.registration_datahub.fixtures import RegistrationDataImportDatahubFactory +from hct_mis_api.apps.registration_datahub.fixtures import ( + RegistrationDataImportDatahubFactory, +) class TestRegistrationDataImportDatahubQuery(APITestCase): - multi_db = True + databases = "__all__" ALL_REGISTRATION_DATA_IMPORT_DATAHUB_QUERY = """ query AllRegistrationDataImportsDatahub { diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_template_file_generator.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_template_file_generator.py index 1f25ce4c9b..6e4f44bc03 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_template_file_generator.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_template_file_generator.py @@ -1,6 +1,8 @@ from unittest import TestCase, mock -from hct_mis_api.apps.registration_datahub.template_generator import TemplateFileGenerator +from hct_mis_api.apps.registration_datahub.template_generator import ( + TemplateFileGenerator, +) class TestTemplateFileGenerator(TestCase): diff --git a/backend/hct_mis_api/apps/registration_datahub/views.py b/backend/hct_mis_api/apps/registration_datahub/views.py index 89f5693d46..b968829885 100644 --- a/backend/hct_mis_api/apps/registration_datahub/views.py +++ b/backend/hct_mis_api/apps/registration_datahub/views.py @@ -1,7 +1,10 @@ from django.http import HttpResponse + from openpyxl.writer.excel import save_virtual_workbook -from hct_mis_api.apps.registration_datahub.template_generator import TemplateFileGenerator +from hct_mis_api.apps.registration_datahub.template_generator import ( + TemplateFileGenerator, +) def download_template(request): diff --git a/backend/hct_mis_api/apps/reporting/admin.py b/backend/hct_mis_api/apps/reporting/admin.py index 6a44c66275..3f8e333b8f 100644 --- a/backend/hct_mis_api/apps/reporting/admin.py +++ b/backend/hct_mis_api/apps/reporting/admin.py @@ -1,6 +1,6 @@ from django.contrib import admin -from .models import Report, DashboardReport +from .models import DashboardReport, Report @admin.register(Report) diff --git a/backend/hct_mis_api/apps/reporting/fixtures.py b/backend/hct_mis_api/apps/reporting/fixtures.py index 4f417bccc3..a2c343e1fa 100644 --- a/backend/hct_mis_api/apps/reporting/fixtures.py +++ b/backend/hct_mis_api/apps/reporting/fixtures.py @@ -5,8 +5,8 @@ from factory import fuzzy from pytz import utc -from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.account.fixtures import UserFactory +from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.reporting.models import Report diff --git a/backend/hct_mis_api/apps/reporting/schema.py b/backend/hct_mis_api/apps/reporting/schema.py index 461f2511d6..bc07b55869 100644 --- a/backend/hct_mis_api/apps/reporting/schema.py +++ b/backend/hct_mis_api/apps/reporting/schema.py @@ -1,22 +1,30 @@ +from datetime import datetime + +from django.db.models.functions import ExtractYear + import graphene +from django_filters import ( + CharFilter, + DateTimeFilter, + FilterSet, + MultipleChoiceFilter, + OrderingFilter, +) from graphene import relay from graphene_django import DjangoObjectType -from django.db.models.functions import ExtractYear -from django_filters import CharFilter, FilterSet, MultipleChoiceFilter, OrderingFilter, DateTimeFilter -from datetime import datetime from hct_mis_api.apps.account.permissions import ( BaseNodePermissionMixin, - hopePermissionClass, - Permissions, DjangoPermissionFilterConnectionField, + Permissions, + hopePermissionClass, ) from hct_mis_api.apps.core.extended_connection import ExtendedConnection from hct_mis_api.apps.core.schema import ChoiceObject from hct_mis_api.apps.core.utils import to_choice_object -from hct_mis_api.apps.reporting.models import Report, DashboardReport -from hct_mis_api.apps.payment.models import PaymentRecord from hct_mis_api.apps.grievance.models import GrievanceTicket +from hct_mis_api.apps.payment.models import PaymentRecord +from hct_mis_api.apps.reporting.models import DashboardReport, Report class ReportFilter(FilterSet): diff --git a/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_choices.py b/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_choices.py index d921420ca9..c389da782b 100644 --- a/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_choices.py +++ b/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_choices.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestProgramChoices::test_report_types_choices 1'] = { diff --git a/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_mutations.py b/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_mutations.py index 63094b2cb6..71f78749e9 100644 --- a/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_mutations.py +++ b/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reporting_mutations.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestReportingMutation::test_create_report_with_no_extra_filters_0_with_permission_individuals_report_with_earlier_dateTo 1'] = { diff --git a/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reports_query.py b/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reports_query.py index 46de720d45..9d0f56de03 100644 --- a/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reports_query.py +++ b/backend/hct_mis_api/apps/reporting/tests/snapshots/snap_test_reports_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestReportsQuery::test_reports_query_all_0_all_with_permissions 1'] = { diff --git a/backend/hct_mis_api/apps/reporting/tests/test_report_service.py b/backend/hct_mis_api/apps/reporting/tests/test_report_service.py index 218caadb10..9bdbf4e79d 100644 --- a/backend/hct_mis_api/apps/reporting/tests/test_report_service.py +++ b/backend/hct_mis_api/apps/reporting/tests/test_report_service.py @@ -1,26 +1,29 @@ -from parameterized import parameterized from django.core.management import call_command from django.test import TestCase -from hct_mis_api.apps.core.models import BusinessArea +from parameterized import parameterized + from hct_mis_api.apps.account.fixtures import UserFactory -from hct_mis_api.apps.reporting.fixtures import ReportFactory -from hct_mis_api.apps.reporting.models import Report +from hct_mis_api.apps.core.fixtures import AdminAreaFactory, AdminAreaLevelFactory +from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.household.fixtures import create_household_and_individuals -from hct_mis_api.apps.core.fixtures import AdminAreaLevelFactory, AdminAreaFactory -from hct_mis_api.apps.program.fixtures import CashPlanFactory, ProgramFactory from hct_mis_api.apps.payment.fixtures import ( CashPlanPaymentVerificationFactory, PaymentRecordFactory, PaymentVerificationFactory, ) +from hct_mis_api.apps.program.fixtures import CashPlanFactory, ProgramFactory +from hct_mis_api.apps.reporting.fixtures import ReportFactory +from hct_mis_api.apps.reporting.models import Report class TestGenerateReportService(TestCase): @classmethod def setUpTestData(self): call_command("loadbusinessareas") - from hct_mis_api.apps.reporting.generate_report_service import GenerateReportService + from hct_mis_api.apps.reporting.generate_report_service import ( + GenerateReportService, + ) self.GenerateReportService = GenerateReportService diff --git a/backend/hct_mis_api/apps/reporting/tests/test_reporting_mutations.py b/backend/hct_mis_api/apps/reporting/tests/test_reporting_mutations.py index 9a5d90edac..94baab937c 100644 --- a/backend/hct_mis_api/apps/reporting/tests/test_reporting_mutations.py +++ b/backend/hct_mis_api/apps/reporting/tests/test_reporting_mutations.py @@ -1,16 +1,17 @@ -from parameterized import parameterized from django.core.management import call_command +from parameterized import parameterized + from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase +from hct_mis_api.apps.core.fixtures import AdminAreaFactory, AdminAreaLevelFactory from hct_mis_api.apps.core.models import BusinessArea -from hct_mis_api.apps.household.fixtures import create_household_and_individuals -from hct_mis_api.apps.reporting.validators import ReportValidator -from hct_mis_api.apps.reporting.models import Report -from hct_mis_api.apps.core.fixtures import AdminAreaLevelFactory, AdminAreaFactory from hct_mis_api.apps.core.utils import encode_id_base64 +from hct_mis_api.apps.household.fixtures import create_household_and_individuals from hct_mis_api.apps.program.fixtures import ProgramFactory +from hct_mis_api.apps.reporting.models import Report +from hct_mis_api.apps.reporting.validators import ReportValidator class TestReportingMutation(APITestCase): diff --git a/backend/hct_mis_api/apps/reporting/tests/test_reports_query.py b/backend/hct_mis_api/apps/reporting/tests/test_reports_query.py index 0f83bde6bc..a529eae413 100644 --- a/backend/hct_mis_api/apps/reporting/tests/test_reports_query.py +++ b/backend/hct_mis_api/apps/reporting/tests/test_reports_query.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/sanction_list/celery_tasks.py b/backend/hct_mis_api/apps/sanction_list/celery_tasks.py index 0210e9b2c3..5c83c3fa63 100644 --- a/backend/hct_mis_api/apps/sanction_list/celery_tasks.py +++ b/backend/hct_mis_api/apps/sanction_list/celery_tasks.py @@ -10,7 +10,9 @@ def sync_sanction_list_task(): logger.info("sync_sanction_list_task start") try: - from hct_mis_api.apps.sanction_list.tasks.load_xml import LoadSanctionListXMLTask + from hct_mis_api.apps.sanction_list.tasks.load_xml import ( + LoadSanctionListXMLTask, + ) LoadSanctionListXMLTask().execute() except Exception as e: diff --git a/backend/hct_mis_api/apps/sanction_list/schema.py b/backend/hct_mis_api/apps/sanction_list/schema.py index 99cadeffce..d1aae38d2f 100644 --- a/backend/hct_mis_api/apps/sanction_list/schema.py +++ b/backend/hct_mis_api/apps/sanction_list/schema.py @@ -1,5 +1,6 @@ -import graphene from django.db.models.functions import Lower + +import graphene from django_filters import FilterSet from graphene import relay from graphene_django import DjangoObjectType @@ -9,11 +10,11 @@ from hct_mis_api.apps.core.utils import CustomOrderingFilter from hct_mis_api.apps.sanction_list.models import ( SanctionListIndividual, - SanctionListIndividualDocument, - SanctionListIndividualNationalities, - SanctionListIndividualCountries, SanctionListIndividualAliasName, + SanctionListIndividualCountries, SanctionListIndividualDateOfBirth, + SanctionListIndividualDocument, + SanctionListIndividualNationalities, ) diff --git a/backend/hct_mis_api/apps/sanction_list/tasks/check_against_sanction_list.py b/backend/hct_mis_api/apps/sanction_list/tasks/check_against_sanction_list.py index 026f194993..130fed516d 100644 --- a/backend/hct_mis_api/apps/sanction_list/tasks/check_against_sanction_list.py +++ b/backend/hct_mis_api/apps/sanction_list/tasks/check_against_sanction_list.py @@ -1,16 +1,20 @@ -from datetime import datetime, date +from datetime import date, datetime from itertools import permutations -import dateutil.parser from django.conf import settings from django.core.mail import EmailMultiAlternatives from django.db.models import Q from django.template.loader import render_to_string -from openpyxl import load_workbook, Workbook + +import dateutil.parser +from openpyxl import Workbook, load_workbook from openpyxl.utils import get_column_letter from openpyxl.writer.excel import save_virtual_workbook -from hct_mis_api.apps.sanction_list.models import UploadedXLSXFile, SanctionListIndividual +from hct_mis_api.apps.sanction_list.models import ( + SanctionListIndividual, + UploadedXLSXFile, +) class CheckAgainstSanctionListTask: diff --git a/backend/hct_mis_api/apps/sanction_list/tests/test_check_against_sanction_list_pre_merge.py b/backend/hct_mis_api/apps/sanction_list/tests/test_check_against_sanction_list_pre_merge.py index 410557280e..8f3d153885 100644 --- a/backend/hct_mis_api/apps/sanction_list/tests/test_check_against_sanction_list_pre_merge.py +++ b/backend/hct_mis_api/apps/sanction_list/tests/test_check_against_sanction_list_pre_merge.py @@ -25,7 +25,7 @@ @override_config(SANCTION_LIST_MATCH_SCORE=3.5) class TestSanctionListPreMerge(BaseElasticSearchTestCase): - multi_db = True + databases = "__all__" TEST_FILES_PATH = f"{settings.PROJECT_ROOT}/apps/sanction_list/tests/test_files" diff --git a/backend/hct_mis_api/apps/sanction_list/views.py b/backend/hct_mis_api/apps/sanction_list/views.py index d3b65df60b..d61e4ea726 100644 --- a/backend/hct_mis_api/apps/sanction_list/views.py +++ b/backend/hct_mis_api/apps/sanction_list/views.py @@ -1,5 +1,6 @@ from django.contrib.auth.decorators import login_required from django.http import HttpResponse + from openpyxl.writer.excel import save_virtual_workbook from hct_mis_api.apps.sanction_list.template_generator import TemplateFileGenerator diff --git a/backend/hct_mis_api/apps/steficon/tests/test_rules.py b/backend/hct_mis_api/apps/steficon/tests/test_rules.py index d43d48128a..86536762fc 100644 --- a/backend/hct_mis_api/apps/steficon/tests/test_rules.py +++ b/backend/hct_mis_api/apps/steficon/tests/test_rules.py @@ -105,8 +105,8 @@ def test_release(self): release2 = rule.release() release1.refresh_from_db() self.assertEqual(release2.version, 2) - self.assertNotEquals(release1, release2) - self.assertNotEquals(release1, release2) + self.assertNotEqual(release1, release2) + self.assertNotEqual(release1, release2) def test_nested_rule(self): rule1 = Rule.objects.create(name="Rule1", definition="result.value=101", enabled=True) diff --git a/backend/hct_mis_api/apps/targeting/fixtures.py b/backend/hct_mis_api/apps/targeting/fixtures.py index 4c4de2d0ba..fcc9e2a429 100644 --- a/backend/hct_mis_api/apps/targeting/fixtures.py +++ b/backend/hct_mis_api/apps/targeting/fixtures.py @@ -9,10 +9,10 @@ from hct_mis_api.apps.household.fixtures import HouseholdFactory from hct_mis_api.apps.household.models import RESIDENCE_STATUS_CHOICE from hct_mis_api.apps.targeting.models import ( - TargetPopulation, - TargetingCriteriaRuleFilter, - TargetingCriteriaRule, TargetingCriteria, + TargetingCriteriaRule, + TargetingCriteriaRuleFilter, + TargetPopulation, ) diff --git a/backend/hct_mis_api/apps/targeting/services/xlsx_export_targeting_service.py b/backend/hct_mis_api/apps/targeting/services/xlsx_export_targeting_service.py index 9cec95ddc7..da16b2af8f 100644 --- a/backend/hct_mis_api/apps/targeting/services/xlsx_export_targeting_service.py +++ b/backend/hct_mis_api/apps/targeting/services/xlsx_export_targeting_service.py @@ -1,11 +1,12 @@ from functools import cached_property -import openpyxl from django.db.models import Q + +import openpyxl from openpyxl.utils import get_column_letter from hct_mis_api.apps.core.utils import nested_getattr -from hct_mis_api.apps.household.models import Individual, Document +from hct_mis_api.apps.household.models import Document, Individual from hct_mis_api.apps.targeting.models import TargetPopulation diff --git a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_candidate_households_by_targeting_criteria_query.py b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_candidate_households_by_targeting_criteria_query.py index 0353622456..d140a23ed2 100644 --- a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_candidate_households_by_targeting_criteria_query.py +++ b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_candidate_households_by_targeting_criteria_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['CandidateListTargetingCriteriaQueryTestCase::test_candidate_households_list_by_targeting_criteria_approved_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_copy_target_population_mutation.py b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_copy_target_population_mutation.py index 663187d2ad..f98ddfe57b 100644 --- a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_copy_target_population_mutation.py +++ b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_copy_target_population_mutation.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestCopyTargetPopulationMutation::test_copy_empty_target_1_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_create_target_population_mutation.py b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_create_target_population_mutation.py index 1c565b6b0e..b2100601e1 100644 --- a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_create_target_population_mutation.py +++ b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_create_target_population_mutation.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestCreateTargetPopulationMutation::test_create_mutation_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_golden_record_by_targeting_criteria_query.py b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_golden_record_by_targeting_criteria_query.py index 19db18b146..56cbee8354 100644 --- a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_golden_record_by_targeting_criteria_query.py +++ b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_golden_record_by_targeting_criteria_query.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['GoldenRecordTargetingCriteriaQueryTestCase::test_golden_record_by_targeting_criteria_flex_field_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_status_change_target_population_mutation.py b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_status_change_target_population_mutation.py index fec66fa334..a6975171ba 100644 --- a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_status_change_target_population_mutation.py +++ b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_status_change_target_population_mutation.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestUnapproveTargetPopulationMutation::test_unapprove_fail_target_population 1'] = { diff --git a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_update_target_population_mutation.py b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_update_target_population_mutation.py index 2ca1d4aec2..ea06fd3b59 100644 --- a/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_update_target_population_mutation.py +++ b/backend/hct_mis_api/apps/targeting/tests/snapshots/snap_test_update_target_population_mutation.py @@ -4,7 +4,6 @@ from snapshottest import Snapshot - snapshots = Snapshot() snapshots['TestUpdateTargetPopulationMutation::test_fail_update_0_wrong_args_count 1'] = { diff --git a/backend/hct_mis_api/apps/targeting/tests/test_create_target_population_mutation.py b/backend/hct_mis_api/apps/targeting/tests/test_create_target_population_mutation.py index 2d5d12f9b7..432dfedecc 100644 --- a/backend/hct_mis_api/apps/targeting/tests/test_create_target_population_mutation.py +++ b/backend/hct_mis_api/apps/targeting/tests/test_create_target_population_mutation.py @@ -1,4 +1,5 @@ from django.core.management import call_command + from parameterized import parameterized from hct_mis_api.apps.account.fixtures import UserFactory diff --git a/backend/hct_mis_api/apps/targeting/tests/test_golden_record_by_targeting_criteria_query.py b/backend/hct_mis_api/apps/targeting/tests/test_golden_record_by_targeting_criteria_query.py index 9a56af620c..0c203b1499 100644 --- a/backend/hct_mis_api/apps/targeting/tests/test_golden_record_by_targeting_criteria_query.py +++ b/backend/hct_mis_api/apps/targeting/tests/test_golden_record_by_targeting_criteria_query.py @@ -1,12 +1,13 @@ from django.core.management import call_command + from parameterized import parameterized +from hct_mis_api.apps.account.fixtures import UserFactory from hct_mis_api.apps.account.permissions import Permissions from hct_mis_api.apps.core.base_test_case import APITestCase from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.household.fixtures import create_household from hct_mis_api.apps.program.fixtures import ProgramFactory -from hct_mis_api.apps.account.fixtures import UserFactory class GoldenRecordTargetingCriteriaQueryTestCase(APITestCase): diff --git a/backend/hct_mis_api/apps/targeting/tests/test_individual_block_filters.py b/backend/hct_mis_api/apps/targeting/tests/test_individual_block_filters.py index 25cbe7b8e0..555c35ea1a 100644 --- a/backend/hct_mis_api/apps/targeting/tests/test_individual_block_filters.py +++ b/backend/hct_mis_api/apps/targeting/tests/test_individual_block_filters.py @@ -3,14 +3,14 @@ from hct_mis_api.apps.core.models import BusinessArea from hct_mis_api.apps.household.fixtures import create_household_and_individuals -from hct_mis_api.apps.household.models import Household, MALE, FEMALE +from hct_mis_api.apps.household.models import FEMALE, MALE, Household from hct_mis_api.apps.targeting.models import ( TargetingCriteria, + TargetingCriteriaQueryingMixin, TargetingCriteriaRule, + TargetingCriteriaRuleQueryingMixin, TargetingIndividualBlockRuleFilter, TargetingIndividualRuleFilterBlock, - TargetingCriteriaQueryingMixin, - TargetingCriteriaRuleQueryingMixin, TargetingIndividualRuleFilterBlockMixin, ) diff --git a/backend/hct_mis_api/apps/targeting/views.py b/backend/hct_mis_api/apps/targeting/views.py index 3f18261afd..b4448d9421 100644 --- a/backend/hct_mis_api/apps/targeting/views.py +++ b/backend/hct_mis_api/apps/targeting/views.py @@ -3,10 +3,13 @@ from django.contrib.admin.views.decorators import staff_member_required from django.http import HttpResponse from django.shortcuts import get_object_or_404 + from openpyxl.writer.excel import save_virtual_workbook from hct_mis_api.apps.targeting.models import TargetPopulation -from hct_mis_api.apps.targeting.services.xlsx_export_targeting_service import XlsxExportTargetingService +from hct_mis_api.apps.targeting.services.xlsx_export_targeting_service import ( + XlsxExportTargetingService, +) logger = logging.getLogger(__name__) diff --git a/backend/hct_mis_api/apps/utils/mutations.py b/backend/hct_mis_api/apps/utils/mutations.py index 91824d64ce..ffe1d90b04 100644 --- a/backend/hct_mis_api/apps/utils/mutations.py +++ b/backend/hct_mis_api/apps/utils/mutations.py @@ -1,8 +1,9 @@ import logging -import graphene from django.core.exceptions import ValidationError +import graphene + from hct_mis_api.apps.utils.schema import Arg logger = logging.getLogger(__name__) diff --git a/backend/hct_mis_api/apps/utils/schema.py b/backend/hct_mis_api/apps/utils/schema.py index 547deec07a..c115bd5b12 100644 --- a/backend/hct_mis_api/apps/utils/schema.py +++ b/backend/hct_mis_api/apps/utils/schema.py @@ -1,7 +1,9 @@ -import graphene import logging + from django.core.files.storage import default_storage +import graphene + from hct_mis_api.apps.core.core_fields_attributes import TYPE_IMAGE from hct_mis_api.apps.core.models import FlexibleAttribute diff --git a/backend/hct_mis_api/apps/utils/validators.py b/backend/hct_mis_api/apps/utils/validators.py index 596f2b4612..c4dd9a7c66 100644 --- a/backend/hct_mis_api/apps/utils/validators.py +++ b/backend/hct_mis_api/apps/utils/validators.py @@ -1,7 +1,6 @@ from django.core.validators import RegexValidator from django.utils.translation import gettext_lazy as _ - DoubleSpaceValidator = RegexValidator( r"\s{2,}", _("Double spaces characters are not allowed."), diff --git a/backend/hct_mis_api/middlewares/version.py b/backend/hct_mis_api/middlewares/version.py index 83168141e7..a216d8406f 100644 --- a/backend/hct_mis_api/middlewares/version.py +++ b/backend/hct_mis_api/middlewares/version.py @@ -1,6 +1,7 @@ import sys from django.conf import settings + from sentry_sdk import configure_scope diff --git a/backend/hct_mis_api/schema.py b/backend/hct_mis_api/schema.py index d9f7f8812a..d3d850621e 100644 --- a/backend/hct_mis_api/schema.py +++ b/backend/hct_mis_api/schema.py @@ -1,31 +1,29 @@ import graphene - -# DO NOT DELETE THIS IMPORT -import hct_mis_api.apps.core.converters - from graphene_django.debug import DjangoDebug import hct_mis_api.apps.account.schema -import hct_mis_api.apps.core.schema +import hct_mis_api.apps.activity_log.schema -import hct_mis_api.apps.grievance.schema +# DO NOT DELETE THIS IMPORT +import hct_mis_api.apps.core.converters +import hct_mis_api.apps.core.schema import hct_mis_api.apps.grievance.mutations +import hct_mis_api.apps.grievance.schema import hct_mis_api.apps.household.schema import hct_mis_api.apps.payment.mutations import hct_mis_api.apps.payment.schema import hct_mis_api.apps.program.mutations import hct_mis_api.apps.program.schema import hct_mis_api.apps.registration_data.schema -import hct_mis_api.apps.registration_datahub.schema import hct_mis_api.apps.registration_datahub.mutations -import hct_mis_api.apps.reporting.schema +import hct_mis_api.apps.registration_datahub.schema import hct_mis_api.apps.reporting.mutations +import hct_mis_api.apps.reporting.schema import hct_mis_api.apps.sanction_list.mutations +import hct_mis_api.apps.sanction_list.schema +import hct_mis_api.apps.steficon.schema import hct_mis_api.apps.targeting.mutations import hct_mis_api.apps.targeting.schema -import hct_mis_api.apps.steficon.schema -import hct_mis_api.apps.sanction_list.schema -import hct_mis_api.apps.activity_log.schema class Query( diff --git a/backend/hct_mis_api/settings/base.py b/backend/hct_mis_api/settings/base.py index b7e170a22b..2f0de9d2e3 100644 --- a/backend/hct_mis_api/settings/base.py +++ b/backend/hct_mis_api/settings/base.py @@ -20,7 +20,7 @@ PROJECT_NAME = "hct_mis_api" # project root and add "apps" to the path -PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +PROJECT_ROOT = os.path.dirname(os.path.dirname(__file__)) from .config import env # domains/hosts etc. From b702e9cf73acfb7680afd7105a580f2ac5429e79 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Fri, 4 Feb 2022 21:59:15 +0000 Subject: [PATCH 06/24] Updated schema for compliance with removing non-model-fields from Meta.fields of filters --- .flake8 | 6 + backend/hct_mis_api/apps/account/schema.py | 34 +-- backend/hct_mis_api/apps/core/schema.py | 7 +- backend/hct_mis_api/apps/grievance/schema.py | 218 +++++------------- backend/hct_mis_api/apps/household/schema.py | 51 ++-- backend/hct_mis_api/apps/program/schema.py | 24 +- .../apps/registration_data/schema.py | 9 +- .../hct_mis_api/apps/sanction_list/schema.py | 8 +- backend/poetry.lock | 6 +- 9 files changed, 131 insertions(+), 232 deletions(-) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000..3e9d84a6fc --- /dev/null +++ b/.flake8 @@ -0,0 +1,6 @@ +[flake8] +max-line-length = 88 +max-complexity = 18 +select = B,C,E,F,W,T4,B9, +ignore = E203, E266, E501, W503, F403, F401, E231, E123, B950, B306, C416, C901, F405, E501, W291 +extend-ignore = E203 diff --git a/backend/hct_mis_api/apps/account/schema.py b/backend/hct_mis_api/apps/account/schema.py index eb4736af98..ac44ac8ba8 100644 --- a/backend/hct_mis_api/apps/account/schema.py +++ b/backend/hct_mis_api/apps/account/schema.py @@ -47,23 +47,19 @@ def permissions_resolver(user_roles): class UsersFilter(FilterSet): business_area = CharFilter(required=True, method="business_area_filter") - search = CharFilter(method="search_filter") + search = CharFilter(method="search_filter", lookup_expr=["exact", "startswith"]) status = MultipleChoiceFilter(field_name="status", choices=USER_STATUS_CHOICES) - partner = MultipleChoiceFilter( - choices=Partner.get_partners_as_choices(), method="partners_filter" - ) - roles = MultipleChoiceFilter( - choices=Role.get_roles_as_choices(), method="roles_filter" - ) + partner = MultipleChoiceFilter(choices=Partner.get_partners_as_choices(), method="partners_filter") + roles = MultipleChoiceFilter(choices=Role.get_roles_as_choices(), method="roles_filter") class Meta: model = get_user_model() - fields = { - "search": ["exact", "startswith"], - "status": ["exact"], - "partner": ["exact"], - "roles": ["exact"], - } + fields = [ + "search", + "status", + "partner", + "roles", + ] order_by = CustomOrderingFilter( fields=( @@ -121,9 +117,7 @@ class UserBusinessAreaNode(DjangoObjectType): permissions = graphene.List(graphene.String) def resolve_permissions(self, info): - user_roles = UserRole.objects.filter( - user=info.context.user, business_area_id=self.id - ) + user_roles = UserRole.objects.filter(user=info.context.user, business_area_id=self.id) return permissions_resolver(user_roles) class Meta: @@ -198,18 +192,14 @@ class Query(graphene.ObjectType): UserNode, filterset_class=UsersFilter, permission_classes=( - hopeOneOfPermissionClass( - Permissions.USER_MANAGEMENT_VIEW_LIST, *ALL_GRIEVANCES_CREATE_MODIFY - ), + hopeOneOfPermissionClass(Permissions.USER_MANAGEMENT_VIEW_LIST, *ALL_GRIEVANCES_CREATE_MODIFY), ), ) # all_log_entries = graphene.ConnectionField(LogEntryObjectConnection, object_id=graphene.String(required=False)) user_roles_choices = graphene.List(ChoiceObject) user_status_choices = graphene.List(ChoiceObject) user_partner_choices = graphene.List(ChoiceObject) - has_available_users_to_export = graphene.Boolean( - business_area_slug=graphene.String(required=True) - ) + has_available_users_to_export = graphene.Boolean(business_area_slug=graphene.String(required=True)) # def resolve_all_log_entries(self, info, **kwargs): # object_id = kwargs.get('object_id') diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 3e3fa46d30..430a8f66bc 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -38,16 +38,17 @@ class AdminAreaFilter(FilterSet): business_area = CharFilter( field_name="admin_area_level__country__business_areas__slug", ) + title = CharFilter(lookup_name="title", lookup_expr=["exact", "istartswith"]) level = IntegerFilter( field_name="level", ) class Meta: model = AdminArea - fields = { - "title": ["exact", "istartswith"], + fields = [ + "title", # "business_area": ["exact"], - } + ] class ChoiceObject(graphene.ObjectType): diff --git a/backend/hct_mis_api/apps/grievance/schema.py b/backend/hct_mis_api/apps/grievance/schema.py index 02f3c82e14..d4e9879b31 100644 --- a/backend/hct_mis_api/apps/grievance/schema.py +++ b/backend/hct_mis_api/apps/grievance/schema.py @@ -14,6 +14,7 @@ ModelChoiceFilter, ModelMultipleChoiceFilter, MultipleChoiceFilter, + NumberFilter, OrderingFilter, TypedMultipleChoiceFilter, UUIDFilter, @@ -134,12 +135,11 @@ class GrievanceTicketFilter(FilterSet): "payment_verifications__payment_record__service_provider", ), ) - + id = NumberFilter(field_name="id", lookup_expr=["exact", "startswith"]) + area = CharFilter(field_name="area", lookup_expr=["exact", "startswith"]) business_area = CharFilter(field_name="business_area__slug", required=True) search = CharFilter(method="search_filter") - status = TypedMultipleChoiceFilter( - field_name="status", choices=GrievanceTicket.STATUS_CHOICES, coerce=int - ) + status = TypedMultipleChoiceFilter(field_name="status", choices=GrievanceTicket.STATUS_CHOICES, coerce=int) fsp = CharFilter(method="fsp_filter") admin = ModelMultipleChoiceFilter( field_name="admin", @@ -151,18 +151,16 @@ class GrievanceTicketFilter(FilterSet): lookup_expr="payment_verifications__cash_plan_payment_verification", ) created_at_range = DateTimeRangeFilter(field_name="created_at") - permissions = MultipleChoiceFilter( - choices=Permissions.choices(), method="permissions_filter" - ) + permissions = MultipleChoiceFilter(choices=Permissions.choices(), method="permissions_filter") class Meta: - fields = { - "id": ["exact", "startswith"], - "category": ["exact"], - "area": ["exact", "startswith"], - "assigned_to": ["exact"], - "registration_data_import": ["exact"], - } + fields = [ + "id", + "category", + "area", + "assigned_to", + "registration_data_import", + ] model = GrievanceTicket order_by = OrderingFilter( @@ -202,9 +200,7 @@ def search_filter(self, qs, name, value): for ticket_type, ticket_fields in self.SEARCH_TICKET_TYPES_LOOKUPS.items(): for field, lookups in ticket_fields.items(): for lookup in lookups: - q_obj |= Q( - **{f"{ticket_type}__{field}__{lookup}__startswith": value} - ) + q_obj |= Q(**{f"{ticket_type}__{field}__{lookup}__startswith": value}) return qs.filter(q_obj) @@ -212,9 +208,7 @@ def fsp_filter(self, qs, name, value): if value: q_obj = Q() for ticket_type, path_to_fsp in self.TICKET_TYPES_WITH_FSP: - q_obj |= Q( - **{f"{ticket_type}__{path_to_fsp}__full_name__istartswith": value} - ) + q_obj |= Q(**{f"{ticket_type}__{path_to_fsp}__full_name__istartswith": value}) return qs.filter(q_obj) return qs @@ -225,25 +219,12 @@ def admin_filter(self, qs, name, value): return qs def permissions_filter(self, qs, name, value): - can_view_ex_sensitive_all = ( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE.value in value - ) - can_view_sensitive_all = ( - Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE.value in value - ) - can_view_ex_sensitive_creator = ( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR.value - in value - ) - can_view_ex_sensitive_owner = ( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER.value in value - ) - can_view_sensitive_creator = ( - Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR.value in value - ) - can_view_sensitive_owner = ( - Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER.value in value - ) + can_view_ex_sensitive_all = Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE.value in value + can_view_sensitive_all = Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE.value in value + can_view_ex_sensitive_creator = Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR.value in value + can_view_ex_sensitive_owner = Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER.value in value + can_view_sensitive_creator = Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR.value in value + can_view_sensitive_owner = Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER.value in value # can view all if can_view_ex_sensitive_all and can_view_sensitive_all: @@ -253,9 +234,7 @@ def permissions_filter(self, qs, name, value): filters_1_exclude = {} filters_2 = {} filters_2_exclude = {} - sensitive_category_filter = { - "category": GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE - } + sensitive_category_filter = {"category": GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE} created_by_filter = {"created_by": self.request.user} assigned_to_filter = {"assigned_to": self.request.user} @@ -267,13 +246,9 @@ def permissions_filter(self, qs, name, value): filters_2.update(assigned_to_filter) if can_view_ex_sensitive_all: - return qs.filter( - ~Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_2) - ) + return qs.filter(~Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_2)) else: - return qs.filter( - Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_1) - ) + return qs.filter(Q(**sensitive_category_filter) | Q(**filters_1) | Q(**filters_1)) else: # no full lists so only creator and/or owner lists @@ -287,8 +262,7 @@ def permissions_filter(self, qs, name, value): filters_2_exclude.update(sensitive_category_filter) if filters_1 or filters_2: return qs.filter( - Q(Q(**filters_1), ~Q(**filters_1_exclude)) - | Q(Q(**filters_2), ~Q(**filters_2_exclude)) + Q(Q(**filters_1), ~Q(**filters_1_exclude)) | Q(Q(**filters_2), ~Q(**filters_2_exclude)) ) else: return GrievanceTicket.objects.none() @@ -296,18 +270,12 @@ def permissions_filter(self, qs, name, value): class ExistingGrievanceTicketFilter(FilterSet): business_area = CharFilter(field_name="business_area__slug", required=True) - category = ChoiceFilter( - field_name="category", choices=GrievanceTicket.CATEGORY_CHOICES - ) - issue_type = ChoiceFilter( - field_name="issue_type", choices=GrievanceTicket.ALL_ISSUE_TYPES - ) + category = ChoiceFilter(field_name="category", choices=GrievanceTicket.CATEGORY_CHOICES) + issue_type = ChoiceFilter(field_name="issue_type", choices=GrievanceTicket.ALL_ISSUE_TYPES) household = ModelChoiceFilter(queryset=Household.objects.all()) individual = ModelChoiceFilter(queryset=Individual.objects.all()) payment_record = ModelMultipleChoiceFilter(queryset=PaymentRecord.objects.all()) - permissions = MultipleChoiceFilter( - choices=Permissions.choices(), method="permissions_filter" - ) + permissions = MultipleChoiceFilter(choices=Permissions.choices(), method="permissions_filter") class Meta: fields = ("id",) @@ -380,12 +348,8 @@ class Meta: class GrievanceTicketNode(BaseNodePermissionMixin, DjangoObjectType): permission_classes = ( hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE), - hopePermissionClass( - Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR - ), - hopePermissionClass( - Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER - ), + hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR), + hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER), hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE), hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_CREATOR), hopePermissionClass(Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_OWNER), @@ -421,31 +385,19 @@ def check_node_permission(cls, info, object_instance): if object_instance.category == GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE: perm = Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE.value - creator_perm = ( - Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_CREATOR.value - ) + creator_perm = Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_CREATOR.value owner_perm = Permissions.GRIEVANCES_VIEW_DETAILS_SENSITIVE_AS_OWNER.value else: perm = Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE.value - creator_perm = ( - Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR.value - ) - owner_perm = ( - Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER.value - ) + creator_perm = Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_CREATOR.value + owner_perm = Permissions.GRIEVANCES_VIEW_DETAILS_EXCLUDING_SENSITIVE_AS_OWNER.value - check_creator = object_instance.created_by == user and user.has_permission( - creator_perm, business_area - ) - check_assignee = object_instance.assigned_to == user and user.has_permission( - owner_perm, business_area - ) + check_creator = object_instance.created_by == user and user.has_permission(creator_perm, business_area) + check_assignee = object_instance.assigned_to == user and user.has_permission(owner_perm, business_area) if user.has_permission(perm, business_area) or check_creator or check_assignee: return True - msg = ( - "User is not active creator/assignee and does not have '{perm}' permission" - ) + msg = "User is not active creator/assignee and does not have '{perm}' permission" logger.error(msg) raise GraphQLError(msg) @@ -465,9 +417,7 @@ def resolve_individual(grievance_ticket, info): return GrievanceTicketNode._search_for_lookup(grievance_ticket, "individual") def resolve_payment_record(grievance_ticket, info): - return GrievanceTicketNode._search_for_lookup( - grievance_ticket, "payment_record" - ) + return GrievanceTicketNode._search_for_lookup(grievance_ticket, "payment_record") def resolve_admin(grievance_ticket, info): if grievance_ticket.admin2: @@ -512,9 +462,7 @@ def resolve_individual_data(self, info): individual_data = self.individual_data flex_fields = individual_data.get("flex_fields") if flex_fields: - images_flex_fields_names = FlexibleAttribute.objects.filter( - type=TYPE_IMAGE - ).values_list("name", flat=True) + images_flex_fields_names = FlexibleAttribute.objects.filter(type=TYPE_IMAGE).values_list("name", flat=True) for name, value in flex_fields.items(): if value and name in images_flex_fields_names: try: @@ -537,17 +485,13 @@ def resolve_individual_data(self, info): previous_value = document.get("previous_value", {}) if previous_value and previous_value.get("photo"): previous_value["photoraw"] = previous_value["photo"] - previous_value["photo"] = default_storage.url( - previous_value.get("photo") - ) + previous_value["photo"] = default_storage.url(previous_value.get("photo")) documents_to_edit[index]["previous_value"] = previous_value current_value = document.get("value", {}) if current_value and current_value.get("photo"): current_value["photoraw"] = current_value["photo"] - current_value["photo"] = default_storage.url( - current_value.get("photo") - ) + current_value["photo"] = default_storage.url(current_value.get("photo")) documents_to_edit[index]["value"] = current_value individual_data["documents_to_edit"] = documents_to_edit @@ -557,9 +501,7 @@ def resolve_individual_data(self, info): current_value = document.get("value", {}) if current_value and current_value.get("photo"): current_value["photoraw"] = current_value["photo"] - current_value["photo"] = default_storage.url( - current_value.get("photo") - ) + current_value["photo"] = default_storage.url(current_value.get("photo")) documents[index]["value"] = current_value individual_data["documents"] = documents @@ -579,9 +521,7 @@ def resolve_individual_data(self, info): individual_data = self.individual_data flex_fields = individual_data.get("flex_fields") if flex_fields: - images_flex_fields_names = FlexibleAttribute.objects.filter( - type=TYPE_IMAGE - ).values_list("name", flat=True) + images_flex_fields_names = FlexibleAttribute.objects.filter(type=TYPE_IMAGE).values_list("name", flat=True) for name, value in flex_fields.items(): if value and name in images_flex_fields_names: try: @@ -648,9 +588,7 @@ class Meta: def resolve_extra_data(parent, info): golden_records = parent.extra_data.get("golden_records") possible_duplicate = parent.extra_data.get("possible_duplicate") - return TicketNeedsAdjudicationDetailsExtraDataNode( - golden_records, possible_duplicate - ) + return TicketNeedsAdjudicationDetailsExtraDataNode(golden_records, possible_duplicate) class TicketSystemFlaggingDetailsNode(DjangoObjectType): @@ -699,10 +637,7 @@ class IssueTypesObject(graphene.ObjectType): sub_categories = graphene.List(ChoiceObject) def resolve_sub_categories(self, info): - return [ - {"name": value, "value": key} - for key, value in self.get("sub_categories").items() - ] + return [{"name": value, "value": key} for key, value in self.get("sub_categories").items()] class AddIndividualFiledObjectType(graphene.ObjectType): @@ -726,12 +661,8 @@ class Query(graphene.ObjectType): filterset_class=GrievanceTicketFilter, permission_classes=( hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE), - hopePermissionClass( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR - ), - hopePermissionClass( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER - ), + hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR), + hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER), @@ -742,12 +673,8 @@ class Query(graphene.ObjectType): filterset_class=ExistingGrievanceTicketFilter, permission_classes=( hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE), - hopePermissionClass( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR - ), - hopePermissionClass( - Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER - ), + hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_CREATOR), + hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_EXCLUDING_SENSITIVE_AS_OWNER), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_CREATOR), hopePermissionClass(Permissions.GRIEVANCES_VIEW_LIST_SENSITIVE_AS_OWNER), @@ -763,12 +690,8 @@ class Query(graphene.ObjectType): year=graphene.Int(required=True), administrative_area=graphene.String(required=False), ) - all_add_individuals_fields_attributes = graphene.List( - FieldAttributeNode, description="All field datatype meta." - ) - all_edit_household_fields_attributes = graphene.List( - FieldAttributeNode, description="All field datatype meta." - ) + all_add_individuals_fields_attributes = graphene.List(FieldAttributeNode, description="All field datatype meta.") + all_edit_household_fields_attributes = graphene.List(FieldAttributeNode, description="All field datatype meta.") grievance_ticket_status_choices = graphene.List(ChoiceObject) grievance_ticket_category_choices = graphene.List(ChoiceObject) grievance_ticket_manual_category_choices = graphene.List(ChoiceObject) @@ -788,10 +711,7 @@ def resolve_grievance_ticket_manual_category_choices(self, info, **kwargs): ] def resolve_grievance_ticket_all_category_choices(self, info, **kwargs): - return [ - {"name": name, "value": value} - for value, name in GrievanceTicket.CATEGORY_CHOICES - ] + return [{"name": name, "value": value} for value, name in GrievanceTicket.CATEGORY_CHOICES] def resolve_grievance_ticket_issue_type_choices(self, info, **kwargs): categories = choices_to_dict(GrievanceTicket.CATEGORY_CHOICES) @@ -833,15 +753,10 @@ def resolve_all_add_individuals_fields_attributes(self, info, **kwargs): [ x for x in CORE_FIELDS_ATTRIBUTES - if x.get("associated_with") == _INDIVIDUAL - and x.get("name") in ACCEPTABLE_FIELDS + if x.get("associated_with") == _INDIVIDUAL and x.get("name") in ACCEPTABLE_FIELDS ] + list(KOBO_ONLY_INDIVIDUAL_FIELDS.values()) - + list( - FlexibleAttribute.objects.filter( - associated_with=FlexibleAttribute.ASSOCIATED_WITH_INDIVIDUAL - ) - ) + + list(FlexibleAttribute.objects.filter(associated_with=FlexibleAttribute.ASSOCIATED_WITH_INDIVIDUAL)) ) return sort_by_attr(all_options, "label.English(EN)") @@ -897,13 +812,8 @@ def resolve_all_edit_household_fields_attributes(self, info, **kwargs): all_options = [ x for x in HOUSEHOLD_EDIT_ONLY_FIELDS + CORE_FIELDS_ATTRIBUTES - if x.get("associated_with") == _HOUSEHOLD - and x.get("name") in ACCEPTABLE_FIELDS - ] + list( - FlexibleAttribute.objects.filter( - associated_with=FlexibleAttribute.ASSOCIATED_WITH_HOUSEHOLD - ) - ) + if x.get("associated_with") == _HOUSEHOLD and x.get("name") in ACCEPTABLE_FIELDS + ] + list(FlexibleAttribute.objects.filter(associated_with=FlexibleAttribute.ASSOCIATED_WITH_HOUSEHOLD)) return sort_by_attr(all_options, "label.English(EN)") @@ -921,9 +831,7 @@ def resolve_chart_grievances(self, info, business_area_slug, year, **kwargs): try: grievance_tickets = grievance_tickets.filter( - admin=AdminArea.objects.get( - id=filters.get("administrative_area") - ).title + admin=AdminArea.objects.get(id=filters.get("administrative_area")).title ) except AdminArea.DoesNotExist: pass @@ -942,12 +850,8 @@ def resolve_chart_grievances(self, info, business_area_slug, year, **kwargs): GrievanceTicket.CATEGORY_POSITIVE_FEEDBACK, GrievanceTicket.CATEGORY_NEGATIVE_FEEDBACK, ] - all_open_tickets = grievance_tickets.filter( - ~Q(status=GrievanceTicket.STATUS_CLOSED) - ) - all_closed_tickets = grievance_tickets.filter( - status=GrievanceTicket.STATUS_CLOSED - ) + all_open_tickets = grievance_tickets.filter(~Q(status=GrievanceTicket.STATUS_CLOSED)) + all_closed_tickets = grievance_tickets.filter(status=GrievanceTicket.STATUS_CLOSED) datasets = [ { @@ -969,12 +873,8 @@ def resolve_chart_grievances(self, info, business_area_slug, year, **kwargs): return { "labels": grievance_status_labels, "datasets": datasets, - "total_number_of_grievances": grievance_tickets.exclude( - category__in=feedback_categories - ).count(), - "total_number_of_feedback": grievance_tickets.filter( - category__in=feedback_categories - ).count(), + "total_number_of_grievances": grievance_tickets.exclude(category__in=feedback_categories).count(), + "total_number_of_feedback": grievance_tickets.filter(category__in=feedback_categories).count(), "total_number_of_open_sensitive": all_open_tickets.filter( category=GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE, ).count(), diff --git a/backend/hct_mis_api/apps/household/schema.py b/backend/hct_mis_api/apps/household/schema.py index cde142f8f7..20b69e9e4b 100644 --- a/backend/hct_mis_api/apps/household/schema.py +++ b/backend/hct_mis_api/apps/household/schema.py @@ -105,6 +105,10 @@ class HouseholdFilter(FilterSet): business_area = CharFilter(field_name="business_area__slug") + country_origin = CharFilter(field_name="country_origin", lookup_expr=["exact", "startswith"]) + head_of_household__full_name = CharFilter( + field_name="head_of_household__full_name", lookup_expr=["exact", "startswith"] + ) size = IntegerRangeFilter(field_name="size") search = CharFilter(method="search_filter") last_registration_date = DateRangeFilter(field_name="last_registration_date") @@ -113,18 +117,18 @@ class HouseholdFilter(FilterSet): class Meta: model = Household - fields = { - "business_area": ["exact"], - "country_origin": ["exact", "startswith"], - "address": ["exact", "startswith"], - "head_of_household__full_name": ["exact", "startswith"], - "size": ["range", "lte", "gte"], - "admin_area": ["exact"], - "target_populations": ["exact"], - "programs": ["exact"], - "residence_status": ["exact"], - "withdrawn": ["exact"], - } + fields = [ + "business_area", + "country_origin", + "address", + "head_of_household__full_name", + "size", + "admin_area", + "target_populations", + "programs", + "residence_status", + "withdrawn", + ] order_by = CustomOrderingFilter( fields=( @@ -167,7 +171,7 @@ class IndividualFilter(FilterSet): business_area = CharFilter( field_name="business_area__slug", ) - age = AgeRangeFilter(field_name="birth_date") + age = AgeRangeFilter(field_name="birth_date", lookup_expr=["range", "lte", "gte"]) sex = MultipleChoiceFilter(field_name="sex", choices=SEX_CHOICE) programs = ModelMultipleChoiceFilter(field_name="household__programs", queryset=Program.objects.all()) search = CharFilter(method="search_filter") @@ -176,20 +180,21 @@ class IndividualFilter(FilterSet): status = MultipleChoiceFilter(choices=INDIVIDUAL_STATUS_CHOICES, method="status_filter") excluded_id = CharFilter(method="filter_excluded_id") withdrawn = BooleanFilter(field_name="withdrawn") + full_name = CharFilter(field_name="full_name", lookup_expr=["exact", "startswith", "endswith"]) flags = MultipleChoiceFilter(choices=INDIVIDUAL_FLAGS_CHOICES, method="flags_filter") class Meta: model = Individual - fields = { - "household__id": ["exact"], - "programs": ["exact"], - "business_area": ["exact"], - "full_name": ["exact", "startswith", "endswith"], - "age": ["range", "lte", "gte"], - "sex": ["exact"], - "household__admin_area": ["exact"], - "withdrawn": ["exact"], - } + fields = [ + "household__id", + "programs", + "business_area", + "full_name", + "age", + "sex", + "household__admin_area", + "withdrawn", + ] order_by = CustomOrderingFilter( fields=( diff --git a/backend/hct_mis_api/apps/program/schema.py b/backend/hct_mis_api/apps/program/schema.py index fe31c35dda..344c775579 100644 --- a/backend/hct_mis_api/apps/program/schema.py +++ b/backend/hct_mis_api/apps/program/schema.py @@ -116,19 +116,23 @@ class CashPlanFilter(FilterSet): verification_status = MultipleChoiceFilter( field_name="verification_status", choices=CashPlanPaymentVerification.STATUS_CHOICES ) - business_area = CharFilter( - field_name="business_area__slug", + assistance_through = CharFilter(field_name="assistance_through", lookup_expr=["exact", "startswith"]) + service_provider__full_name = CharFilter( + field_name="service_provider__full_name", lookup_expr=["exact", "startswith"] ) + end_date = DateFilter(field_name="end_date", lookup_expr=["exact", "lte", "gte"]) + start_date = DateFilter(field_name="start_date", lookup_expr=["exact", "lte", "gte"]) + business_area = CharFilter(field_name="business_area__slug", lookup_expr=["exact", "startswith"]) class Meta: - fields = { - "program": ["exact"], - "assistance_through": ["exact", "startswith"], - "service_provider__full_name": ["exact", "startswith"], - "start_date": ["exact", "lte", "gte"], - "end_date": ["exact", "lte", "gte"], - "business_area": ["exact"], - } + fields = ( + "program", + "assistance_through", + "service_provider__full_name", + "start_date", + "end_date", + "business_area", + ) model = CashPlan order_by = OrderingFilter( diff --git a/backend/hct_mis_api/apps/registration_data/schema.py b/backend/hct_mis_api/apps/registration_data/schema.py index 1d4289d39f..6ce0a8d63e 100644 --- a/backend/hct_mis_api/apps/registration_data/schema.py +++ b/backend/hct_mis_api/apps/registration_data/schema.py @@ -26,16 +26,11 @@ class RegistrationDataImportFilter(FilterSet): import_date = DateFilter(field_name="import_date__date") business_area = CharFilter(field_name="business_area__slug") + name = CharFilter(field_name="name", lookup_expr=["exact", "startswith"]) class Meta: model = RegistrationDataImport - fields = { - "imported_by__id": ["exact"], - "import_date": ["exact"], - "status": ["exact"], - "name": ["exact", "startswith"], - "business_area": ["exact"], - } + fields = ["imported_by__id", "import_date", "status", "name", "business_area"] order_by = CustomOrderingFilter( fields=( diff --git a/backend/hct_mis_api/apps/sanction_list/schema.py b/backend/hct_mis_api/apps/sanction_list/schema.py index d1aae38d2f..fec3d66143 100644 --- a/backend/hct_mis_api/apps/sanction_list/schema.py +++ b/backend/hct_mis_api/apps/sanction_list/schema.py @@ -19,12 +19,10 @@ class SanctionListIndividualFilter(FilterSet): + full_name = CharFilter(field_name="full_name", lookup_expr=["exact", "startswith"]) + class Meta: - fields = { - "id": ["exact"], - "full_name": ["exact", "startswith"], - "reference_number": ["exact"], - } + fields = ("id", "full_name", "reference_number") model = SanctionListIndividual order_by = CustomOrderingFilter( diff --git a/backend/poetry.lock b/backend/poetry.lock index 5b958e35ee..127231e9bd 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -68,7 +68,7 @@ test = ["astroid", "pytest"] [[package]] name = "azure-common" -version = "1.1.27" +version = "1.1.28" description = "Microsoft Azure Client Library for Python (Common)" category = "main" optional = false @@ -1986,8 +1986,8 @@ asttokens = [ {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, ] azure-common = [ - {file = "azure-common-1.1.27.zip", hash = "sha256:9f3f5d991023acbd93050cf53c4e863c6973ded7e236c69e99c8ff5c7bad41ef"}, - {file = "azure_common-1.1.27-py2.py3-none-any.whl", hash = "sha256:426673962740dbe9aab052a4b52df39c07767decd3f25fdc87c9d4c566a04934"}, + {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, + {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, ] azure-storage-blob = [ {file = "azure-storage-blob-2.1.0.tar.gz", hash = "sha256:b90323aad60f207f9f90a0c4cf94c10acc313c20b39403398dfba51f25f7b454"}, From b71cc191c94fc3b419d3101e0074e8e07327a97b Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Fri, 4 Feb 2022 23:30:26 +0000 Subject: [PATCH 07/24] Fixed bugs --- backend/hct_mis_api/apps/core/schema.py | 2 +- backend/hct_mis_api/apps/sanction_list/schema.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 430a8f66bc..55c2199333 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -38,7 +38,7 @@ class AdminAreaFilter(FilterSet): business_area = CharFilter( field_name="admin_area_level__country__business_areas__slug", ) - title = CharFilter(lookup_name="title", lookup_expr=["exact", "istartswith"]) + title = CharFilter(field_name="title", lookup_expr=["exact", "istartswith"]) level = IntegerFilter( field_name="level", ) diff --git a/backend/hct_mis_api/apps/sanction_list/schema.py b/backend/hct_mis_api/apps/sanction_list/schema.py index fec3d66143..61e7d3e306 100644 --- a/backend/hct_mis_api/apps/sanction_list/schema.py +++ b/backend/hct_mis_api/apps/sanction_list/schema.py @@ -1,7 +1,7 @@ from django.db.models.functions import Lower import graphene -from django_filters import FilterSet +from django_filters import CharFilter, FilterSet from graphene import relay from graphene_django import DjangoObjectType From 6369561f38b12efc22b9a1bfb3e1099aa6aeb3e1 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Sat, 5 Feb 2022 00:04:45 +0000 Subject: [PATCH 08/24] Downgraded Python version --- backend/poetry.lock | 4 ++-- backend/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 127231e9bd..c405204880 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1953,8 +1953,8 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" -python-versions = "3.9.10" -content-hash = "2d27a1683a39d74f9d3ea7a654699b5468c12ac6c445577369b5c5e9a878ffd5" +python-versions = "3.9.1" +content-hash = "f2961476c0719f7719a405de9b78ac15f9a20756b8524d976e3f38c4143ce76a" [metadata.files] amqp = [ diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 5502ab29bb..61e1175eaf 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -51,7 +51,7 @@ description = "HCT MIS is UNICEF's humanitarian cash transfer platform." authors = ["Tivix"] [tool.poetry.dependencies] -python = "3.9.10" +python = "3.9.1" Django = "3.2" Jinja2 = "2.11.2" #name = "HCT MIS Backend" From f05cd1401ffc587268c5603bb175c545a2db6b19 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Mon, 7 Feb 2022 16:48:45 +0000 Subject: [PATCH 09/24] Updated shcema to remove non model fields --- .../hct_mis_api/apps/account/permissions.py | 49 ++--- backend/hct_mis_api/apps/account/schema.py | 2 +- .../admin/account/user/change_form.html | 3 +- backend/hct_mis_api/apps/core/schema.py | 7 +- backend/hct_mis_api/apps/grievance/schema.py | 6 +- backend/hct_mis_api/apps/household/admin.py | 5 +- backend/hct_mis_api/apps/household/schema.py | 33 +++- backend/hct_mis_api/apps/program/schema.py | 46 ++++- .../program/tests/test_cash_plan_queries.py | 1 - .../apps/registration_data/schema.py | 4 +- .../apps/registration_datahub/admin.py | 18 +- .../generate_dashboard_report_service.py | 178 +++++------------- .../hct_mis_api/apps/sanction_list/schema.py | 4 +- 13 files changed, 147 insertions(+), 209 deletions(-) diff --git a/backend/hct_mis_api/apps/account/permissions.py b/backend/hct_mis_api/apps/account/permissions.py index e2752318f1..e49458583b 100644 --- a/backend/hct_mis_api/apps/account/permissions.py +++ b/backend/hct_mis_api/apps/account/permissions.py @@ -197,14 +197,11 @@ def has_permission(cls, info, **kwargs): else: if business_area_arg is None: return False - business_area = BusinessArea.objects.filter( - slug=business_area_arg - ).first() + business_area = BusinessArea.objects.filter(slug=business_area_arg).first() if business_area is None: return False - return ( - info.context.user.is_authenticated - and info.context.user.has_permission(permission.name, business_area) + return info.context.user.is_authenticated and info.context.user.has_permission( + permission.name, business_area ) return XDPerm @@ -221,9 +218,7 @@ def has_permission(cls, info, **kwargs): else: if business_area_arg is None: return False - business_area = BusinessArea.objects.filter( - slug=business_area_arg - ).first() + business_area = BusinessArea.objects.filter(slug=business_area_arg).first() if business_area is None: return False for permission in permissions: @@ -240,10 +235,7 @@ class BaseNodePermissionMixin: @classmethod def check_node_permission(cls, info, object_instance): business_area = object_instance.business_area - if not any( - perm.has_permission(info, business_area=business_area) - for perm in cls.permission_classes - ): + if not any(perm.has_permission(info, business_area=business_area) for perm in cls.permission_classes): logger.error("Permission Denied") raise GraphQLError("Permission Denied") @@ -314,15 +306,16 @@ def filterset_class(self): if self._extra_filter_meta: meta.update(self._extra_filter_meta) - filterset_class = self._provided_filterset_class or ( - self.node_type._meta.filterset_class - ) + filterset_class = self._provided_filterset_class or (self.node_type._meta.filterset_class) self._filterset_class = get_filterset_class(filterset_class, **meta) return self._filterset_class @property def filtering_args(self): + import ipdb + + ipdb.set_trace() return get_filtering_args_from_filterset(self.filterset_class, self.node_type) @classmethod @@ -337,15 +330,11 @@ def resolve_queryset( permission_classes, ): filter_kwargs = {k: v for k, v in args.items() if k in filtering_args} - if not any( - perm.has_permission(info, **filter_kwargs) for perm in permission_classes - ): + if not any(perm.has_permission(info, **filter_kwargs) for perm in permission_classes): logger.error("Permission Denied") raise GraphQLError("Permission Denied") if "permissions" in filtering_args: - filter_kwargs[ - "permissions" - ] = info.context.user.permissions_in_business_area( + filter_kwargs["permissions"] = info.context.user.permissions_in_business_area( filter_kwargs.get("business_area") ) qs = super().resolve_queryset(connection, iterable, info, args) @@ -407,16 +396,8 @@ def has_creator_or_owner_permission( cls.is_authenticated(info) if not ( cls.has_permission(info, general_permission, business_area_arg, False) - or ( - is_creator - and cls.has_permission( - info, creator_permission, business_area_arg, False - ) - ) - or ( - is_owner - and cls.has_permission(info, owner_permission, business_area_arg, False) - ) + or (is_creator and cls.has_permission(info, creator_permission, business_area_arg, False)) + or (is_owner and cls.has_permission(info, owner_permission, business_area_arg, False)) ): return cls.raise_permission_denied_error(raise_error=raise_error) return True @@ -430,9 +411,7 @@ def raise_permission_denied_error(not_authenticated=False, raise_error=True): raise PermissionDenied("Permission Denied: User is not authenticated.") else: logger.error("Permission Denied: User does not have correct permission.") - raise PermissionDenied( - "Permission Denied: User does not have correct permission." - ) + raise PermissionDenied("Permission Denied: User does not have correct permission.") class PermissionMutation(BaseMutationPermissionMixin, Mutation): diff --git a/backend/hct_mis_api/apps/account/schema.py b/backend/hct_mis_api/apps/account/schema.py index ac44ac8ba8..a662e35340 100644 --- a/backend/hct_mis_api/apps/account/schema.py +++ b/backend/hct_mis_api/apps/account/schema.py @@ -47,7 +47,7 @@ def permissions_resolver(user_roles): class UsersFilter(FilterSet): business_area = CharFilter(required=True, method="business_area_filter") - search = CharFilter(method="search_filter", lookup_expr=["exact", "startswith"]) + search = CharFilter(method="search_filter") status = MultipleChoiceFilter(field_name="status", choices=USER_STATUS_CHOICES) partner = MultipleChoiceFilter(choices=Partner.get_partners_as_choices(), method="partners_filter") roles = MultipleChoiceFilter(choices=Role.get_roles_as_choices(), method="roles_filter") diff --git a/backend/hct_mis_api/apps/account/templates/admin/account/user/change_form.html b/backend/hct_mis_api/apps/account/templates/admin/account/user/change_form.html index 5c73557df9..e6ebc8089d 100644 --- a/backend/hct_mis_api/apps/account/templates/admin/account/user/change_form.html +++ b/backend/hct_mis_api/apps/account/templates/admin/account/user/change_form.html @@ -1,4 +1,5 @@ -{% extends "admin_extra_urls/change_form.html" %}{% load staticfiles accounts i18n %} +{% extends "admin_extra_urls/change_form.html" %} +{% load static accounts i18n %} {% block object-tools-items %} {{ block.super }} {% if request|is_root %} diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 55c2199333..28126986a9 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -38,7 +38,7 @@ class AdminAreaFilter(FilterSet): business_area = CharFilter( field_name="admin_area_level__country__business_areas__slug", ) - title = CharFilter(field_name="title", lookup_expr=["exact", "istartswith"]) + title__istartswith = CharFilter(field_name="title", lookup_expr="istartswith") level = IntegerFilter( field_name="level", ) @@ -47,6 +47,7 @@ class Meta: model = AdminArea fields = [ "title", + "title__istartswith", # "business_area": ["exact"], ] @@ -88,9 +89,7 @@ class Meta: model = FlexibleAttributeChoice interfaces = (relay.Node,) connection_class = ExtendedConnection - exclude_fields = [ - "history", - ] + exclude_fields = [] class FlexibleAttributeNode(DjangoObjectType): diff --git a/backend/hct_mis_api/apps/grievance/schema.py b/backend/hct_mis_api/apps/grievance/schema.py index 854b1f77c5..76cb62b5a4 100644 --- a/backend/hct_mis_api/apps/grievance/schema.py +++ b/backend/hct_mis_api/apps/grievance/schema.py @@ -135,8 +135,8 @@ class GrievanceTicketFilter(FilterSet): "payment_verifications__payment_record__service_provider", ), ) - id = NumberFilter(field_name="id", lookup_expr=["exact", "startswith"]) - area = CharFilter(field_name="area", lookup_expr=["exact", "startswith"]) + id__startswith = CharFilter(field_name="id", lookup_expr="startswith") + area__startswith = CharFilter(field_name="area", lookup_expr="startswith") business_area = CharFilter(field_name="business_area__slug", required=True) search = CharFilter(method="search_filter") status = TypedMultipleChoiceFilter(field_name="status", choices=GrievanceTicket.STATUS_CHOICES, coerce=int) @@ -156,8 +156,10 @@ class GrievanceTicketFilter(FilterSet): class Meta: fields = [ "id", + "id__startswith", "category", "area", + "area__startswith", "assigned_to", "registration_data_import", ] diff --git a/backend/hct_mis_api/apps/household/admin.py b/backend/hct_mis_api/apps/household/admin.py index 6c8f3184ae..dd67da94e4 100644 --- a/backend/hct_mis_api/apps/household/admin.py +++ b/backend/hct_mis_api/apps/household/admin.py @@ -27,6 +27,7 @@ RelatedFieldComboFilter, TextFieldFilter, ) +from adminfilters.lookup import GenericLookupFieldFilter from advanced_filters.admin import AdminAdvancedFiltersMixin from jsoneditor.forms import JSONEditor from smart_admin.mixins import FieldsetMixin as SmartFieldsetMixin @@ -359,8 +360,8 @@ class IndividualAdmin( exclude = ("created_at", "updated_at") inlines = [IndividualRoleInHouseholdInline] list_filter = ( - ("unicef_id__iexact", TextFieldFilter.factory(title="UNICEF ID")), - ("household__unicef_id__iexact", TextFieldFilter.factory(title="Household ID")), + GenericLookupFieldFilter.factory(title="UNICEF ID", lookup="unicef_id__iexact"), + GenericLookupFieldFilter.factory(title="Household ID", lookup="household__unicef_id__iexact"), ("deduplication_golden_record_status", ChoicesFieldComboFilter), ("deduplication_batch_status", ChoicesFieldComboFilter), ("business_area", AutoCompleteFilter), diff --git a/backend/hct_mis_api/apps/household/schema.py b/backend/hct_mis_api/apps/household/schema.py index 0292d79d5c..2ffe6239e0 100644 --- a/backend/hct_mis_api/apps/household/schema.py +++ b/backend/hct_mis_api/apps/household/schema.py @@ -1,12 +1,13 @@ import re -from django.db.models import Prefetch, Q, Sum +from django.db.models import IntegerField, Prefetch, Q, Sum from django.db.models.functions import Coalesce, Lower import graphene from django_filters import ( BooleanFilter, CharFilter, + DateFilter, FilterSet, ModelMultipleChoiceFilter, MultipleChoiceFilter, @@ -105,12 +106,13 @@ class HouseholdFilter(FilterSet): business_area = CharFilter(field_name="business_area__slug") - country_origin = CharFilter(field_name="country_origin", lookup_expr=["exact", "startswith"]) - head_of_household__full_name = CharFilter( - field_name="head_of_household__full_name", lookup_expr=["exact", "startswith"] - ) size = IntegerRangeFilter(field_name="size") search = CharFilter(method="search_filter") + country_origin__startswith = CharFilter(field_name="country_origin", lookup_expr="startswith") + head_of_household__full_name__startswith = CharFilter( + field_name="head_of_household__full_name", lookup_expr="startswith" + ) + search = CharFilter(method="search_filter") last_registration_date = DateRangeFilter(field_name="last_registration_date") admin2 = ModelMultipleChoiceFilter(field_name="admin_area", queryset=AdminArea.objects.filter(level=2)) withdrawn = BooleanFilter(field_name="withdrawn") @@ -120,8 +122,10 @@ class Meta: fields = [ "business_area", "country_origin", + "country_origin__startswith", "address", "head_of_household__full_name", + "head_of_household__full_name__startswith", "size", "admin_area", "target_populations", @@ -171,7 +175,10 @@ class IndividualFilter(FilterSet): business_area = CharFilter( field_name="business_area__slug", ) - age = AgeRangeFilter(field_name="birth_date", lookup_expr=["range", "lte", "gte"]) + age = DateFilter(field_name="birth_date__date") + age__range = AgeRangeFilter(field_name="birth_date__date", lookup_expr="range") + age__lte = DateRangeFilter(field_name="birth_date__date", lookup_expr="lte") + age__gte = DateRangeFilter(field_name="birth_date__date", lookup_expr="gte") sex = MultipleChoiceFilter(field_name="sex", choices=SEX_CHOICE) programs = ModelMultipleChoiceFilter(field_name="household__programs", queryset=Program.objects.all()) search = CharFilter(method="search_filter") @@ -179,8 +186,9 @@ class IndividualFilter(FilterSet): admin2 = ModelMultipleChoiceFilter(field_name="household__admin_area", queryset=AdminArea.objects.filter(level=2)) status = MultipleChoiceFilter(choices=INDIVIDUAL_STATUS_CHOICES, method="status_filter") excluded_id = CharFilter(method="filter_excluded_id") + full_name__startswith = CharFilter(field_name="full_name", lookup_expr="startswith") + full_name__endswith = CharFilter(field_name="full_name", lookup_expr="endswith") withdrawn = BooleanFilter(field_name="withdrawn") - full_name = CharFilter(field_name="full_name", lookup_expr=["exact", "startswith", "endswith"]) flags = MultipleChoiceFilter(choices=INDIVIDUAL_FLAGS_CHOICES, method="flags_filter") class Meta: @@ -190,7 +198,12 @@ class Meta: "programs", "business_area", "full_name", + "full_name__startswith", + "full_name__endswith", "age", + "age__range", + "age__lte", + "age__gte", "sex", "household__admin_area", "withdrawn", @@ -655,9 +668,9 @@ def resolve_all_individuals_flex_fields_attributes(self, info, **kwargs): ).order_by("created_at") def resolve_all_households(self, info, **kwargs): - return Household.objects.annotate(total_cash=Coalesce(Sum("payment_records__delivered_quantity"), 0)).order_by( - "created_at" - ) + return Household.objects.annotate( + total_cash=Coalesce(Sum("payment_records__delivered_quantity"), 0, output_field=IntegerField()) + ).order_by("created_at") def resolve_residence_status_choices(self, info, **kwargs): return to_choice_object(RESIDENCE_STATUS_CHOICE) diff --git a/backend/hct_mis_api/apps/program/schema.py b/backend/hct_mis_api/apps/program/schema.py index 344c775579..6931a346a5 100644 --- a/backend/hct_mis_api/apps/program/schema.py +++ b/backend/hct_mis_api/apps/program/schema.py @@ -1,4 +1,14 @@ -from django.db.models import Case, Count, IntegerField, Q, Sum, Value, When +from django.db.models import ( + Case, + Count, + DecimalField, + FloatField, + IntegerField, + Q, + Sum, + Value, + When, +) from django.db.models.functions import Coalesce, Lower import graphene @@ -116,22 +126,32 @@ class CashPlanFilter(FilterSet): verification_status = MultipleChoiceFilter( field_name="verification_status", choices=CashPlanPaymentVerification.STATUS_CHOICES ) - assistance_through = CharFilter(field_name="assistance_through", lookup_expr=["exact", "startswith"]) - service_provider__full_name = CharFilter( - field_name="service_provider__full_name", lookup_expr=["exact", "startswith"] + assistance_through__startswith = CharFilter(field_name="assistance_through", lookup_expr="startswith") + service_provider__full_name__startswith = CharFilter( + field_name="service_provider__full_name", lookup_expr="startswith" ) - end_date = DateFilter(field_name="end_date", lookup_expr=["exact", "lte", "gte"]) - start_date = DateFilter(field_name="start_date", lookup_expr=["exact", "lte", "gte"]) - business_area = CharFilter(field_name="business_area__slug", lookup_expr=["exact", "startswith"]) + end_date__lte = DateFilter(field_name="end_date", lookup_expr="lte") + end_date__gte = DateFilter(field_name="end_date", lookup_expr="gte") + start_date__lte = DateFilter(field_name="start_date", lookup_expr="lte") + start_date__gte = DateFilter(field_name="start_date", lookup_expr="gte") + business_area__slug__startswith = CharFilter(field_name="business_area__slug", lookup_expr="startswith") class Meta: fields = ( "program", "assistance_through", + "assistance_through__startswith", "service_provider__full_name", + "service_provider__full_name__startswith", "start_date", + "start_date__lte", + "start_date__gte", "end_date", + "end_date__lte", + "end_date__gte", "business_area", + "business_area__slug", + "business_area__slug__startswith", ) model = CashPlan @@ -260,7 +280,9 @@ def resolve_all_programs(self, info, **kwargs): output_field=IntegerField(), ) ) - .annotate(households_count=Coalesce(Sum("cash_plans__total_persons_covered"), 0)) + .annotate( + households_count=Coalesce(Sum("cash_plans__total_persons_covered"), 0, output_field=IntegerField()) + ) .order_by("custom_order", "start_date") ) @@ -331,12 +353,16 @@ def resolve_chart_total_transferred_by_month(self, info, business_area_slug, yea .order_by("delivery_date__month") .annotate( total_delivered_cash=Sum( - "delivered_quantity_usd", filter=Q(delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_CASH) + "delivered_quantity_usd", + filter=Q(delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_CASH), + output_field=DecimalField(), ) ) .annotate( total_delivered_voucher=Sum( - "delivered_quantity_usd", filter=Q(delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_VOUCHER) + "delivered_quantity_usd", + filter=Q(delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_VOUCHER), + output_field=DecimalField(), ) ) ) diff --git a/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py b/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py index 86814241d5..74894e30b5 100644 --- a/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py +++ b/backend/hct_mis_api/apps/program/tests/test_cash_plan_queries.py @@ -139,7 +139,6 @@ def setUp(self): ) def test_cash_plans(self, name, permissions, query): self.create_user_role_with_permissions(self.user, permissions, self.business_area) - variables = {} if "single" in name: variables["id"] = self.id_to_base64("c7e768f1-5626-413e-a032-5fb18789f985", "CashPlanNode") diff --git a/backend/hct_mis_api/apps/registration_data/schema.py b/backend/hct_mis_api/apps/registration_data/schema.py index 6ce0a8d63e..1d3bac6974 100644 --- a/backend/hct_mis_api/apps/registration_data/schema.py +++ b/backend/hct_mis_api/apps/registration_data/schema.py @@ -26,11 +26,11 @@ class RegistrationDataImportFilter(FilterSet): import_date = DateFilter(field_name="import_date__date") business_area = CharFilter(field_name="business_area__slug") - name = CharFilter(field_name="name", lookup_expr=["exact", "startswith"]) + name__startswith = CharFilter(field_name="name", lookup_expr="startswith") class Meta: model = RegistrationDataImport - fields = ["imported_by__id", "import_date", "status", "name", "business_area"] + fields = ["imported_by__id", "import_date", "status", "name", "name__startswith", "business_area"] order_by = CustomOrderingFilter( fields=( diff --git a/backend/hct_mis_api/apps/registration_datahub/admin.py b/backend/hct_mis_api/apps/registration_datahub/admin.py index b6fcfca0a7..e4f91a3ae9 100644 --- a/backend/hct_mis_api/apps/registration_datahub/admin.py +++ b/backend/hct_mis_api/apps/registration_datahub/admin.py @@ -11,6 +11,7 @@ from adminactions.helpers import AdminActionPermMixin from adminfilters.autocomplete import AutoCompleteFilter from adminfilters.filters import ChoicesFieldComboFilter, NumberFilter, TextFieldFilter +from adminfilters.lookup import GenericLookupFieldFilter from advanced_filters.admin import AdminAdvancedFiltersMixin from hct_mis_api.apps.registration_datahub.models import ( @@ -162,13 +163,16 @@ class ImportedIndividualAdmin(ExtraUrlMixin, HOPEModelAdminBase): list_filter = ( ("deduplication_batch_results", ScoreFilter), ("deduplication_golden_record_results", ScoreFilter), - ("registration_data_import__name__istartswith", TextFieldFilter.factory(title="Registration data import")), - ("individual_id__istartswith", TextFieldFilter.factory(title="Individual Id")), + GenericLookupFieldFilter.factory( + "registration_data_import__name__istartswith", title="Registration data import name stat with" + ), + ("individual_id", TextFieldFilter), "deduplication_batch_status", "deduplication_golden_record_status", ) date_hierarchy = "updated_at" - raw_id_fields = ("household", "registration_data_import") + # raw_id_fields = ("household", "registration_data_import") + autocomplete_fields = ("household", "registration_data_import") actions = ["enrich_deduplication"] def score(self, obj): @@ -217,14 +221,17 @@ class ImportedIndividualIdentityAdmin(HOPEModelAdminBase): @admin.register(ImportedHousehold) class ImportedHouseholdAdmin(HOPEModelAdminBase): + search_fields = ("id", "registration_data_import") list_display = ("registration_data_import", "registration_method", "name_enumerator", "country", "country_origin") raw_id_fields = ("registration_data_import", "head_of_household") date_hierarchy = "registration_data_import__import_date" list_filter = ( ("country", ChoicesFieldComboFilter), ("country_origin", ChoicesFieldComboFilter), - ("registration_data_import__name__istartswith", TextFieldFilter.factory(title="Registration Data Import Name")), - ("kobo_submission_uuid__istartswith", TextFieldFilter.factory(title="Kobo Submission UUID")), + GenericLookupFieldFilter.factory( + title="Registration Data Import Name", lookup="registration_data_import__name__istartswith" + ), + GenericLookupFieldFilter.factory(title="Kobo Submission UUID", lookup="kobo_submission_uuid__istartswith"), ) @@ -244,6 +251,7 @@ class ImportedDocumentTypeAdmin(HOPEModelAdminBase): class ImportedDocumentAdmin(HOPEModelAdminBase): list_display = ("document_number", "type", "individual") raw_id_fields = ("individual", "type") + list_filter = (("type", AutoCompleteFilter),) @admin.register(ImportedIndividualRoleInHousehold) diff --git a/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py b/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py index c5ef9042bb..e2cae63a83 100644 --- a/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py +++ b/backend/hct_mis_api/apps/reporting/generate_dashboard_report_service.py @@ -8,7 +8,7 @@ from django.conf import settings from django.contrib.sites.models import Site from django.core.mail import EmailMultiAlternatives -from django.db.models import Count, F, Q, Sum +from django.db.models import Count, DecimalField, F, Q, Sum from django.template.loader import render_to_string from django.urls import reverse @@ -59,29 +59,17 @@ def get_beneficiaries(cls, report: DashboardReport): valid_households, individual_count_fields, ) - instance["total_children"] = cls._reduce_aggregate( - households_aggr, children_count_fields - ) - instance["total_individuals"] = cls._reduce_aggregate( - households_aggr, individual_count_fields - ) + instance["total_children"] = cls._reduce_aggregate(households_aggr, children_count_fields) + instance["total_individuals"] = cls._reduce_aggregate(households_aggr, individual_count_fields) instance["num_households"] = valid_households.count() # get total distincts (can't use the sum of column since some households might belong to multiple programs) - households = Household.objects.filter( - payment_records__in=valid_payment_records - ).distinct() - households_aggr = cls._aggregate_instances_sum( - households, individual_count_fields - ) + households = Household.objects.filter(payment_records__in=valid_payment_records).distinct() + households_aggr = cls._aggregate_instances_sum(households, individual_count_fields) totals = { "num_households": households.count(), - "total_individuals": cls._reduce_aggregate( - households_aggr, individual_count_fields - ), - "total_children": cls._reduce_aggregate( - households_aggr, children_count_fields - ), + "total_individuals": cls._reduce_aggregate(households_aggr, individual_count_fields), + "total_children": cls._reduce_aggregate(households_aggr, children_count_fields), } # return instances for rows and totals row info return instances, totals @@ -101,18 +89,14 @@ def get_individuals(cls, report: DashboardReport): **{valid_payment_records_in_instance_filter_key: instance["id"]} ) households_aggr = cls._aggregate_instances_sum( - Household.objects.filter( - payment_records__in=valid_payment_records_in_instance - ).distinct(), + Household.objects.filter(payment_records__in=valid_payment_records_in_instance).distinct(), individual_count_fields, ) instance.update(households_aggr) # get total distincts (can't use the sum of column since some households might belong to multiple programs) households_aggr = cls._aggregate_instances_sum( - Household.objects.filter( - payment_records__in=valid_payment_records - ).distinct(), + Household.objects.filter(payment_records__in=valid_payment_records).distinct(), individual_count_fields, ) # return instances for rows and totals row info @@ -133,7 +117,7 @@ def aggregate_by_delivery_type(payment_records): value = delivery_type[0] result[value] = ( payment_records.filter(delivery_type=value) - .aggregate(Sum("delivered_quantity_usd")) + .aggregate(Sum("delivered_quantity_usd", output_field=DecimalField())) .get("delivered_quantity_usd__sum") ) return result @@ -142,9 +126,7 @@ def aggregate_by_delivery_type(payment_records): valid_payment_records_in_instance = valid_payment_records.filter( **{valid_payment_records_in_instance_filter_key: instance["id"]} ) - aggregated_by_delivery_type = aggregate_by_delivery_type( - valid_payment_records_in_instance - ) + aggregated_by_delivery_type = aggregate_by_delivery_type(valid_payment_records_in_instance) instance.update(aggregated_by_delivery_type) totals = aggregate_by_delivery_type(valid_payment_records) @@ -182,6 +164,7 @@ def get_annotation(index_number: int, cash=True): label: Sum( "cash_plans__payment_records__delivered_quantity_usd", filter=get_filter_query(cash, index_number + 1), + output_field=DecimalField(), ) } @@ -231,23 +214,13 @@ def get_grievances(cls, report: DashboardReport): instances = ( BusinessArea.objects.filter(tickets__in=valid_grievances) .distinct() - .annotate( - total_grievances=Count( - "tickets", filter=~Q(tickets__category__in=feedback_categories) - ) - ) - .annotate( - total_feedback=Count( - "tickets", filter=Q(tickets__category__in=feedback_categories) - ) - ) + .annotate(total_grievances=Count("tickets", filter=~Q(tickets__category__in=feedback_categories))) + .annotate(total_feedback=Count("tickets", filter=Q(tickets__category__in=feedback_categories))) .annotate(total_resolved=Count("tickets", filter=status_closed_query)) .annotate( total_unresolved_lte_30=Count( "tickets", - filter=Q( - status_open_query, tickets__created_at__gte=days_30_from_now - ), + filter=Q(status_open_query, tickets__created_at__gte=days_30_from_now), ) ) .annotate( @@ -263,9 +236,7 @@ def get_grievances(cls, report: DashboardReport): .annotate( total_unresolved_60=Count( "tickets", - filter=Q( - status_open_query, tickets__created_at__lt=days_60_from_now - ), + filter=Q(status_open_query, tickets__created_at__lt=days_60_from_now), ) ) .annotate( @@ -301,23 +272,15 @@ def get_payment_verifications(cls, report: DashboardReport): if not cls._is_report_global(report): filter_vars["payment_record__business_area"] = report.business_area valid_verifications = PaymentVerification.objects.filter(**filter_vars) - path_to_payment_record_verifications = ( - "cash_plans__verifications__payment_record_verifications" - ) + path_to_payment_record_verifications = "cash_plans__verifications__payment_record_verifications" def format_status_filter(status): return Q(**{f"{path_to_payment_record_verifications}__status": status}) programs = ( - Program.objects.filter( - **{f"{path_to_payment_record_verifications}__in": valid_verifications} - ) + Program.objects.filter(**{f"{path_to_payment_record_verifications}__in": valid_verifications}) .distinct() - .annotate( - total_cash_plan_verifications=Count( - "cash_plans__verifications", distinct=True - ) - ) + .annotate(total_cash_plan_verifications=Count("cash_plans__verifications", distinct=True)) .annotate( total_households=Count( f"{path_to_payment_record_verifications}__payment_record__household", @@ -341,11 +304,7 @@ def format_status_filter(status): ), ) ) - .annotate( - total_verifications_done=Count( - path_to_payment_record_verifications, distinct=True - ) - ) + .annotate(total_verifications_done=Count(path_to_payment_record_verifications, distinct=True)) .annotate( received=Count( path_to_payment_record_verifications, @@ -356,18 +315,14 @@ def format_status_filter(status): .annotate( not_received=Count( path_to_payment_record_verifications, - filter=format_status_filter( - PaymentVerification.STATUS_NOT_RECEIVED - ), + filter=format_status_filter(PaymentVerification.STATUS_NOT_RECEIVED), distinct=True, ) ) .annotate( received_with_issues=Count( path_to_payment_record_verifications, - filter=format_status_filter( - PaymentVerification.STATUS_RECEIVED_WITH_ISSUES - ), + filter=format_status_filter(PaymentVerification.STATUS_RECEIVED_WITH_ISSUES), distinct=True, ) ) @@ -393,17 +348,15 @@ def get_total_transferred_by_country(cls, report: DashboardReport): .annotate( total_cash=Sum( "paymentrecord__delivered_quantity_usd", - filter=Q( - paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_CASH - ), + filter=Q(paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_CASH), + output_field=DecimalField(), ) ) .annotate( total_voucher=Sum( "paymentrecord__delivered_quantity_usd", - filter=Q( - paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_VOUCHER - ), + filter=Q(paymentrecord__delivery_type__in=PaymentRecord.DELIVERY_TYPES_IN_VOUCHER), + output_field=DecimalField(), ) ) ) @@ -422,38 +375,26 @@ def get_total_transferred_by_admin_area(cls, report: DashboardReport): household__payment_records__in=valid_payment_records, ) .distinct() - .annotate( - total_transferred=Sum( - "household__payment_records__delivered_quantity_usd" - ) - ) + .annotate(total_transferred=Sum("household__payment_records__delivered_quantity_usd")) .annotate(num_households=Count("household", distinct=True)) ) totals = admin_areas.aggregate(Sum("total_transferred"), Sum("num_households")) - admin_areas = admin_areas.values( - "id", "title", "p_code", "num_households", "total_transferred" - ) + admin_areas = admin_areas.values("id", "title", "p_code", "num_households", "total_transferred") individual_count_fields = cls._get_all_individual_count_fields() for admin_area in admin_areas: - valid_payment_records_in_instance = valid_payment_records.filter( - household__admin_area=admin_area["id"] - ) + valid_payment_records_in_instance = valid_payment_records.filter(household__admin_area=admin_area["id"]) households_aggr = cls._aggregate_instances_sum( - Household.objects.filter( - payment_records__in=valid_payment_records_in_instance - ).distinct(), + Household.objects.filter(payment_records__in=valid_payment_records_in_instance).distinct(), individual_count_fields, ) admin_area.update(households_aggr) totals.update( cls._aggregate_instances_sum( - Household.objects.filter( - payment_records__in=valid_payment_records - ).distinct(), + Household.objects.filter(payment_records__in=valid_payment_records).distinct(), individual_count_fields, ) ) @@ -513,9 +454,7 @@ def format_programs_row(cls, instance: Program, *args) -> tuple: return result @staticmethod - def format_total_transferred_by_country( - instance: BusinessArea, is_totals: bool, *args - ) -> tuple: + def format_total_transferred_by_country(instance: BusinessArea, is_totals: bool, *args) -> tuple: if is_totals: return ( "", @@ -571,13 +510,7 @@ def format_payment_verifications_row(instance: Program, *args): instance.name, instance.total_cash_plan_verifications, instance.total_households, - round( - ( - instance.total_verifications_done - / instance.all_possible_payment_records - ) - * 100 - ) + round((instance.total_verifications_done / instance.all_possible_payment_records) * 100) if instance.total_payment_records else 0, instance.received, @@ -587,14 +520,10 @@ def format_payment_verifications_row(instance: Program, *args): ) @classmethod - def format_total_transferred_by_admin_area_row( - cls, instance, is_totals: bool, *args - ): + def format_total_transferred_by_admin_area_row(cls, instance, is_totals: bool, *args): fields_list = cls._get_all_individual_count_fields() - shared_cells = tuple( - instance.get(f"{field_name}__sum", 0) for field_name in fields_list - ) + shared_cells = tuple(instance.get(f"{field_name}__sum", 0) for field_name in fields_list) if is_totals: return ( @@ -629,9 +558,7 @@ def _format_filters( if date_path: filter_vars.update({f"{date_path}__year": report.year}) if admin_area_path and report.admin_area: - filter_vars.update( - {admin_area_path: report.admin_area, f"{admin_area_path}__level": 2} - ) + filter_vars.update({admin_area_path: report.admin_area, f"{admin_area_path}__level": 2}) if program_path and report.program: filter_vars.update({program_path: report.program}) if not cls._is_report_global(report) and business_area_path: @@ -658,15 +585,11 @@ def _get_payment_records_for_report(self, report): def _get_business_areas_or_programs(cls, report, valid_payment_records): if cls._is_report_global(report): business_area_code_path = "code" - instances = BusinessArea.objects.filter( - paymentrecord__in=valid_payment_records - ) + instances = BusinessArea.objects.filter(paymentrecord__in=valid_payment_records) valid_payment_records_in_instance_filter_key = "business_area" else: business_area_code_path = "business_area__code" - instances = Program.objects.filter( - cash_plans__payment_records__in=valid_payment_records - ) + instances = Program.objects.filter(cash_plans__payment_records__in=valid_payment_records) valid_payment_records_in_instance_filter_key = "cash_plan__program" instances = ( @@ -939,9 +862,7 @@ def __init__(self, report: DashboardReport): self.report = report self.report_types = report.report_type self.business_area = report.business_area - self.hq_or_country = ( - self.HQ if report.business_area.slug == "global" else self.COUNTRY - ) + self.hq_or_country = self.HQ if report.business_area.slug == "global" else self.COUNTRY def _create_workbook(self) -> openpyxl.Workbook: wb = openpyxl.Workbook() @@ -963,10 +884,7 @@ def _format_meta_tab(self): self.ws_meta.append(info_row) def _add_headers(self, active_sheet, report_type) -> int: - headers_row = ( - self.HEADERS[report_type][self.hq_or_country] - + self.HEADERS[report_type][self.SHARED] - ) + headers_row = self.HEADERS[report_type][self.hq_or_country] + self.HEADERS[report_type][self.SHARED] headers_row = self._stringify_all_values(headers_row) active_sheet.append(headers_row) return len(headers_row) @@ -1016,9 +934,7 @@ def generate_report(self): try: self.generate_workbook() file_name = ( - self._report_type_to_str(self.report_types[0]) - if len(self.report_types) == 1 - else "Multiple reports" + self._report_type_to_str(self.report_types[0]) if len(self.report_types) == 1 else "Multiple reports" ) self.report.file.save( f"{file_name}-{self._format_date(self.report.created_at)}.xlsx", @@ -1056,9 +972,7 @@ def _send_email(self): @staticmethod def _adjust_column_width_from_col(ws, min_col, max_col, min_row): column_widths = [] - for i, col in enumerate( - ws.iter_cols(min_col=min_col, max_col=max_col, min_row=min_row) - ): + for i, col in enumerate(ws.iter_cols(min_col=min_col, max_col=max_col, min_row=min_row)): for cell in col: value = cell.value if value is not None: @@ -1098,9 +1012,7 @@ def _report_type_to_str(report_type) -> str: return label[:31] def _report_types_to_joined_str(self) -> str: - return ", ".join( - [self._report_type_to_str(report_type) for report_type in self.report_types] - ) + return ", ".join([self._report_type_to_str(report_type) for report_type in self.report_types]) @staticmethod def _stringify_all_values(row: tuple) -> tuple: @@ -1119,9 +1031,7 @@ def _format_date(date) -> str: @staticmethod def _format_user_name(user: User) -> str: return ( - f"{user.first_name} {user.last_name}" - if user.first_name or user.last_name - else user.email or user.username + f"{user.first_name} {user.last_name}" if user.first_name or user.last_name else user.email or user.username ) @staticmethod diff --git a/backend/hct_mis_api/apps/sanction_list/schema.py b/backend/hct_mis_api/apps/sanction_list/schema.py index 61e7d3e306..79b5d8863a 100644 --- a/backend/hct_mis_api/apps/sanction_list/schema.py +++ b/backend/hct_mis_api/apps/sanction_list/schema.py @@ -19,10 +19,10 @@ class SanctionListIndividualFilter(FilterSet): - full_name = CharFilter(field_name="full_name", lookup_expr=["exact", "startswith"]) + full_name__startswith = CharFilter(field_name="full_name", lookup_expr="startswith") class Meta: - fields = ("id", "full_name", "reference_number") + fields = ("id", "full_name", "full_name__startswith", "reference_number") model = SanctionListIndividual order_by = CustomOrderingFilter( From 16836142da4893469eb854774eaa661ffbabaa4c Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Fri, 11 Feb 2022 20:01:22 +0000 Subject: [PATCH 10/24] Added migrations for BigInt, JsonField ... required by Django 3.2 --- backend/hct_mis_api/apps/account/fixtures.py | 2 +- .../apps/account/migrations/0035_migration.py | 28 +++++++++ .../hct_mis_api/apps/account/permissions.py | 3 - .../activity_log/migrations/0004_migration.py | 23 +++++++ .../migrations/0015_migration.py | 43 +++++++++++++ .../apps/core/migrations/0044_migration.py | 43 +++++++++++++ backend/hct_mis_api/apps/core/schema.py | 2 - .../erp_datahub/migrations/0017_migration.py | 23 +++++++ .../apps/geo/migrations/0007_migration.py | 28 +++++++++ .../grievance/migrations/0035_migration.py | 58 +++++++++++++++++ .../household/migrations/0094_migration.py | 53 ++++++++++++++++ backend/hct_mis_api/apps/household/schema.py | 20 +----- .../mis_datahub/migrations/0037_migration.py | 63 +++++++++++++++++++ .../tests/test_data_send_tp_to_datahub.py | 10 ++- ...t_external_collector_send_tp_to_datahub.py | 6 +- .../apps/payment/migrations/0031_migration.py | 23 +++++++ .../power_query/migrations/0003_migration.py | 53 ++++++++++++++++ .../apps/power_query/tests/test_views.py | 2 +- .../apps/registration_data/schema.py | 3 +- .../migrations/0050_migration.py | 48 ++++++++++++++ .../steficon/migrations/0014_migration.py | 38 +++++++++++ .../targeting/migrations/0031_migration.py | 23 +++++++ backend/hct_mis_api/settings/base.py | 2 +- backend/hct_mis_api/settings/dev.py | 6 +- backend/hct_mis_api/settings/staging.py | 22 ++----- backend/hct_mis_api/settings/test.py | 6 +- 26 files changed, 569 insertions(+), 62 deletions(-) create mode 100644 backend/hct_mis_api/apps/account/migrations/0035_migration.py create mode 100644 backend/hct_mis_api/apps/activity_log/migrations/0004_migration.py create mode 100644 backend/hct_mis_api/apps/cash_assist_datahub/migrations/0015_migration.py create mode 100644 backend/hct_mis_api/apps/core/migrations/0044_migration.py create mode 100644 backend/hct_mis_api/apps/erp_datahub/migrations/0017_migration.py create mode 100644 backend/hct_mis_api/apps/geo/migrations/0007_migration.py create mode 100644 backend/hct_mis_api/apps/grievance/migrations/0035_migration.py create mode 100644 backend/hct_mis_api/apps/household/migrations/0094_migration.py create mode 100644 backend/hct_mis_api/apps/mis_datahub/migrations/0037_migration.py create mode 100644 backend/hct_mis_api/apps/payment/migrations/0031_migration.py create mode 100644 backend/hct_mis_api/apps/power_query/migrations/0003_migration.py create mode 100644 backend/hct_mis_api/apps/registration_datahub/migrations/0050_migration.py create mode 100644 backend/hct_mis_api/apps/steficon/migrations/0014_migration.py create mode 100644 backend/hct_mis_api/apps/targeting/migrations/0031_migration.py diff --git a/backend/hct_mis_api/apps/account/fixtures.py b/backend/hct_mis_api/apps/account/fixtures.py index a98a3f2cb5..bdc7cac81c 100644 --- a/backend/hct_mis_api/apps/account/fixtures.py +++ b/backend/hct_mis_api/apps/account/fixtures.py @@ -17,7 +17,7 @@ class Meta: class BusinessAreaFactory(factory.DjangoModelFactory): - name = factory.Sequence(lambda x: "BusinessArea%s" % x) + name = factory.Sequence(lambda x: "BusinessArea{}".format(x)) class Meta: model = BusinessArea diff --git a/backend/hct_mis_api/apps/account/migrations/0035_migration.py b/backend/hct_mis_api/apps/account/migrations/0035_migration.py new file mode 100644 index 0000000000..a25a16e749 --- /dev/null +++ b/backend/hct_mis_api/apps/account/migrations/0035_migration.py @@ -0,0 +1,28 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('account', '0034_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='partner', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='user', + name='custom_fields', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='user', + name='first_name', + field=models.CharField(blank=True, max_length=150, verbose_name='first name'), + ), + ] diff --git a/backend/hct_mis_api/apps/account/permissions.py b/backend/hct_mis_api/apps/account/permissions.py index e49458583b..f71da6c0dd 100644 --- a/backend/hct_mis_api/apps/account/permissions.py +++ b/backend/hct_mis_api/apps/account/permissions.py @@ -313,9 +313,6 @@ def filterset_class(self): @property def filtering_args(self): - import ipdb - - ipdb.set_trace() return get_filtering_args_from_filterset(self.filterset_class, self.node_type) @classmethod diff --git a/backend/hct_mis_api/apps/activity_log/migrations/0004_migration.py b/backend/hct_mis_api/apps/activity_log/migrations/0004_migration.py new file mode 100644 index 0000000000..62d7d47279 --- /dev/null +++ b/backend/hct_mis_api/apps/activity_log/migrations/0004_migration.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('activity_log', '0003_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='logentry', + name='changes', + field=models.JSONField(null=True, verbose_name='change message'), + ), + migrations.AlterField( + model_name='logentry', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/cash_assist_datahub/migrations/0015_migration.py b/backend/hct_mis_api/apps/cash_assist_datahub/migrations/0015_migration.py new file mode 100644 index 0000000000..5f8a6fe672 --- /dev/null +++ b/backend/hct_mis_api/apps/cash_assist_datahub/migrations/0015_migration.py @@ -0,0 +1,43 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('cash_assist_datahub', '0014_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='cashplan', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='paymentrecord', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='programme', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='serviceprovider', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='session', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='targetpopulation', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/core/migrations/0044_migration.py b/backend/hct_mis_api/apps/core/migrations/0044_migration.py new file mode 100644 index 0000000000..a438348c87 --- /dev/null +++ b/backend/hct_mis_api/apps/core/migrations/0044_migration.py @@ -0,0 +1,43 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0043_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='businessarea', + name='custom_fields', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='countrycodemap', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='flexibleattribute', + name='hint', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='flexibleattribute', + name='label', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='flexibleattributechoice', + name='label', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='flexibleattributegroup', + name='label', + field=models.JSONField(default=dict), + ), + ] diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 28126986a9..09bf6c9787 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -38,7 +38,6 @@ class AdminAreaFilter(FilterSet): business_area = CharFilter( field_name="admin_area_level__country__business_areas__slug", ) - title__istartswith = CharFilter(field_name="title", lookup_expr="istartswith") level = IntegerFilter( field_name="level", ) @@ -47,7 +46,6 @@ class Meta: model = AdminArea fields = [ "title", - "title__istartswith", # "business_area": ["exact"], ] diff --git a/backend/hct_mis_api/apps/erp_datahub/migrations/0017_migration.py b/backend/hct_mis_api/apps/erp_datahub/migrations/0017_migration.py new file mode 100644 index 0000000000..a40ea61126 --- /dev/null +++ b/backend/hct_mis_api/apps/erp_datahub/migrations/0017_migration.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('erp_datahub', '0016_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='downpayment', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='fundscommitment', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/geo/migrations/0007_migration.py b/backend/hct_mis_api/apps/geo/migrations/0007_migration.py new file mode 100644 index 0000000000..54fbd29d18 --- /dev/null +++ b/backend/hct_mis_api/apps/geo/migrations/0007_migration.py @@ -0,0 +1,28 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('geo', '0006_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='area', + name='extras', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='areatype', + name='extras', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='country', + name='extras', + field=models.JSONField(blank=True, default=dict), + ), + ] diff --git a/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py b/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py new file mode 100644 index 0000000000..a18cbd5577 --- /dev/null +++ b/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py @@ -0,0 +1,58 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('grievance', '0034_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='grievanceticket', + name='extras', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='ticketaddindividualdetails', + name='individual_data', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='ticketdeleteindividualdetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='tickethouseholddataupdatedetails', + name='household_data', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='ticketindividualdataupdatedetails', + name='individual_data', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='ticketindividualdataupdatedetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='ticketneedsadjudicationdetails', + name='extra_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='ticketneedsadjudicationdetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='ticketsystemflaggingdetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + ] diff --git a/backend/hct_mis_api/apps/household/migrations/0094_migration.py b/backend/hct_mis_api/apps/household/migrations/0094_migration.py new file mode 100644 index 0000000000..a388d1bfac --- /dev/null +++ b/backend/hct_mis_api/apps/household/migrations/0094_migration.py @@ -0,0 +1,53 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('household', '0093_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='agency', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='household', + name='flex_fields', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='household', + name='user_fields', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='individual', + name='deduplication_batch_results', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='individual', + name='deduplication_golden_record_results', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='individual', + name='flex_fields', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='individual', + name='user_fields', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='individualidentity', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/household/schema.py b/backend/hct_mis_api/apps/household/schema.py index 2ffe6239e0..cec272b604 100644 --- a/backend/hct_mis_api/apps/household/schema.py +++ b/backend/hct_mis_api/apps/household/schema.py @@ -108,11 +108,7 @@ class HouseholdFilter(FilterSet): business_area = CharFilter(field_name="business_area__slug") size = IntegerRangeFilter(field_name="size") search = CharFilter(method="search_filter") - country_origin__startswith = CharFilter(field_name="country_origin", lookup_expr="startswith") - head_of_household__full_name__startswith = CharFilter( - field_name="head_of_household__full_name", lookup_expr="startswith" - ) - search = CharFilter(method="search_filter") + head_of_household__full_name = CharFilter(field_name="head_of_household__full_name", lookup_expr="startswith") last_registration_date = DateRangeFilter(field_name="last_registration_date") admin2 = ModelMultipleChoiceFilter(field_name="admin_area", queryset=AdminArea.objects.filter(level=2)) withdrawn = BooleanFilter(field_name="withdrawn") @@ -122,10 +118,8 @@ class Meta: fields = [ "business_area", "country_origin", - "country_origin__startswith", "address", "head_of_household__full_name", - "head_of_household__full_name__startswith", "size", "admin_area", "target_populations", @@ -175,10 +169,7 @@ class IndividualFilter(FilterSet): business_area = CharFilter( field_name="business_area__slug", ) - age = DateFilter(field_name="birth_date__date") - age__range = AgeRangeFilter(field_name="birth_date__date", lookup_expr="range") - age__lte = DateRangeFilter(field_name="birth_date__date", lookup_expr="lte") - age__gte = DateRangeFilter(field_name="birth_date__date", lookup_expr="gte") + age = AgeRangeFilter(field_name="birth_date__date") sex = MultipleChoiceFilter(field_name="sex", choices=SEX_CHOICE) programs = ModelMultipleChoiceFilter(field_name="household__programs", queryset=Program.objects.all()) search = CharFilter(method="search_filter") @@ -186,8 +177,6 @@ class IndividualFilter(FilterSet): admin2 = ModelMultipleChoiceFilter(field_name="household__admin_area", queryset=AdminArea.objects.filter(level=2)) status = MultipleChoiceFilter(choices=INDIVIDUAL_STATUS_CHOICES, method="status_filter") excluded_id = CharFilter(method="filter_excluded_id") - full_name__startswith = CharFilter(field_name="full_name", lookup_expr="startswith") - full_name__endswith = CharFilter(field_name="full_name", lookup_expr="endswith") withdrawn = BooleanFilter(field_name="withdrawn") flags = MultipleChoiceFilter(choices=INDIVIDUAL_FLAGS_CHOICES, method="flags_filter") @@ -198,12 +187,7 @@ class Meta: "programs", "business_area", "full_name", - "full_name__startswith", - "full_name__endswith", "age", - "age__range", - "age__lte", - "age__gte", "sex", "household__admin_area", "withdrawn", diff --git a/backend/hct_mis_api/apps/mis_datahub/migrations/0037_migration.py b/backend/hct_mis_api/apps/mis_datahub/migrations/0037_migration.py new file mode 100644 index 0000000000..b1caa5d713 --- /dev/null +++ b/backend/hct_mis_api/apps/mis_datahub/migrations/0037_migration.py @@ -0,0 +1,63 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mis_datahub', '0036_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='document', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='downpayment', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='fundscommitment', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='household', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='individual', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='individualroleinhousehold', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='program', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='session', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='targetpopulation', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='targetpopulationentry', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/mis_datahub/tests/test_data_send_tp_to_datahub.py b/backend/hct_mis_api/apps/mis_datahub/tests/test_data_send_tp_to_datahub.py index 9c8acf150d..d08c8b0b3d 100644 --- a/backend/hct_mis_api/apps/mis_datahub/tests/test_data_send_tp_to_datahub.py +++ b/backend/hct_mis_api/apps/mis_datahub/tests/test_data_send_tp_to_datahub.py @@ -8,25 +8,23 @@ import hct_mis_api.apps.mis_datahub.models as dh_models from hct_mis_api.apps.core.fixtures import AdminAreaFactory, AdminAreaLevelFactory from hct_mis_api.apps.core.models import BusinessArea -from hct_mis_api.apps.household.fixtures import ( - HouseholdFactory, - IndividualFactory, -) +from hct_mis_api.apps.household.fixtures import HouseholdFactory, IndividualFactory from hct_mis_api.apps.household.models import ( ROLE_PRIMARY, + UNHCR, Agency, IndividualIdentity, IndividualRoleInHousehold, - UNHCR, ) from hct_mis_api.apps.mis_datahub.tasks.send_tp_to_datahub import SendTPToDatahubTask from hct_mis_api.apps.program.fixtures import ProgramFactory from hct_mis_api.apps.registration_data.fixtures import RegistrationDataImportFactory -from hct_mis_api.apps.targeting.models import TargetPopulation, HouseholdSelection +from hct_mis_api.apps.targeting.models import HouseholdSelection, TargetPopulation class TestDataSendTpToDatahub(TestCase): multi_db = True + databases = "__all__" @staticmethod def _pre_test_commands(): diff --git a/backend/hct_mis_api/apps/mis_datahub/tests/test_external_collector_send_tp_to_datahub.py b/backend/hct_mis_api/apps/mis_datahub/tests/test_external_collector_send_tp_to_datahub.py index c398193c72..7743361369 100644 --- a/backend/hct_mis_api/apps/mis_datahub/tests/test_external_collector_send_tp_to_datahub.py +++ b/backend/hct_mis_api/apps/mis_datahub/tests/test_external_collector_send_tp_to_datahub.py @@ -4,10 +4,7 @@ import hct_mis_api.apps.mis_datahub.models as dh_models from hct_mis_api.apps.core.fixtures import AdminAreaFactory, AdminAreaLevelFactory from hct_mis_api.apps.core.models import BusinessArea -from hct_mis_api.apps.household.fixtures import ( - HouseholdFactory, - IndividualFactory, -) +from hct_mis_api.apps.household.fixtures import HouseholdFactory, IndividualFactory from hct_mis_api.apps.household.models import ( ROLE_ALTERNATE, ROLE_PRIMARY, @@ -21,6 +18,7 @@ class TestExternalCollectorSendTpToDatahub(TestCase): multi_db = True + databases = "__all__" @staticmethod def _pre_test_commands(): diff --git a/backend/hct_mis_api/apps/payment/migrations/0031_migration.py b/backend/hct_mis_api/apps/payment/migrations/0031_migration.py new file mode 100644 index 0000000000..5369bed522 --- /dev/null +++ b/backend/hct_mis_api/apps/payment/migrations/0031_migration.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('payment', '0030_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='cashplanpaymentverification', + name='age_filter', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='cashplanpaymentverification', + name='excluded_admin_areas_filter', + field=models.JSONField(null=True), + ), + ] diff --git a/backend/hct_mis_api/apps/power_query/migrations/0003_migration.py b/backend/hct_mis_api/apps/power_query/migrations/0003_migration.py new file mode 100644 index 0000000000..22929f7b4c --- /dev/null +++ b/backend/hct_mis_api/apps/power_query/migrations/0003_migration.py @@ -0,0 +1,53 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('power_query', '0002_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='dataset', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='dataset', + name='info', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='formatter', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='query', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='query', + name='info', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='query', + name='query_args', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='report', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='report', + name='query_args', + field=models.JSONField(blank=True, default=dict), + ), + ] diff --git a/backend/hct_mis_api/apps/power_query/tests/test_views.py b/backend/hct_mis_api/apps/power_query/tests/test_views.py index 442f9223f9..97ba983271 100644 --- a/backend/hct_mis_api/apps/power_query/tests/test_views.py +++ b/backend/hct_mis_api/apps/power_query/tests/test_views.py @@ -72,7 +72,7 @@ def setUpTestData(cls): cls.USER_PASSWORD = "123" cls.formatter_json = FormatterFactory(name="Queryset To JSON") cls.user = UserFactory( - username="superuser-%s" % random.randint(1, 100), + username="superuser-{}".format(random.randint(1, 100)), is_superuser=True, is_staff=True, password=cls.USER_PASSWORD, diff --git a/backend/hct_mis_api/apps/registration_data/schema.py b/backend/hct_mis_api/apps/registration_data/schema.py index 1d3bac6974..a009e7b857 100644 --- a/backend/hct_mis_api/apps/registration_data/schema.py +++ b/backend/hct_mis_api/apps/registration_data/schema.py @@ -26,11 +26,10 @@ class RegistrationDataImportFilter(FilterSet): import_date = DateFilter(field_name="import_date__date") business_area = CharFilter(field_name="business_area__slug") - name__startswith = CharFilter(field_name="name", lookup_expr="startswith") class Meta: model = RegistrationDataImport - fields = ["imported_by__id", "import_date", "status", "name", "name__startswith", "business_area"] + fields = ["imported_by__id", "import_date", "status", "name", "business_area"] order_by = CustomOrderingFilter( fields=( diff --git a/backend/hct_mis_api/apps/registration_datahub/migrations/0050_migration.py b/backend/hct_mis_api/apps/registration_datahub/migrations/0050_migration.py new file mode 100644 index 0000000000..02221cfb11 --- /dev/null +++ b/backend/hct_mis_api/apps/registration_datahub/migrations/0050_migration.py @@ -0,0 +1,48 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('registration_datahub', '0049_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='importedagency', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='importedhousehold', + name='flex_fields', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='importedindividual', + name='deduplication_batch_results', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='importedindividual', + name='deduplication_golden_record_results', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='importedindividual', + name='flex_fields', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='importedindividualidentity', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='koboimportedsubmission', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py b/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py new file mode 100644 index 0000000000..e3f3e45b27 --- /dev/null +++ b/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('steficon', '0013_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='rule', + name='flags', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='rule', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + migrations.AlterField( + model_name='rulecommit', + name='after', + field=models.JSONField(editable=False, help_text='The record after apply changes'), + ), + migrations.AlterField( + model_name='rulecommit', + name='before', + field=models.JSONField(editable=False, help_text='The record before change'), + ), + migrations.AlterField( + model_name='rulecommit', + name='id', + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ] diff --git a/backend/hct_mis_api/apps/targeting/migrations/0031_migration.py b/backend/hct_mis_api/apps/targeting/migrations/0031_migration.py new file mode 100644 index 0000000000..e6a2d3d944 --- /dev/null +++ b/backend/hct_mis_api/apps/targeting/migrations/0031_migration.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('targeting', '0030_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='targetingcriteriarulefilter', + name='arguments', + field=models.JSONField(help_text='\n Array of arguments\n '), + ), + migrations.AlterField( + model_name='targetingindividualblockrulefilter', + name='arguments', + field=models.JSONField(help_text='\n Array of arguments\n '), + ), + ] diff --git a/backend/hct_mis_api/settings/base.py b/backend/hct_mis_api/settings/base.py index 2f0de9d2e3..c713fc349c 100644 --- a/backend/hct_mis_api/settings/base.py +++ b/backend/hct_mis_api/settings/base.py @@ -25,7 +25,7 @@ # domains/hosts etc. DOMAIN_NAME = env("DOMAIN") -WWW_ROOT = "http://%s/" % DOMAIN_NAME +WWW_ROOT = "http://{}/".format(DOMAIN_NAME) ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[DOMAIN_NAME]) FRONTEND_HOST = env("HCT_MIS_FRONTEND_HOST", default=DOMAIN_NAME) ADMIN_PANEL_URL = env("ADMIN_PANEL_URL") diff --git a/backend/hct_mis_api/settings/dev.py b/backend/hct_mis_api/settings/dev.py index f54236bd43..b2ab46c3d6 100644 --- a/backend/hct_mis_api/settings/dev.py +++ b/backend/hct_mis_api/settings/dev.py @@ -9,7 +9,7 @@ # domains/hosts etc. DOMAIN_NAME = os.getenv("DOMAIN", "localhost:8000") -WWW_ROOT = "http://%s/" % DOMAIN_NAME +WWW_ROOT = "http://{}/".format(DOMAIN_NAME) ALLOWED_HOSTS.extend(["localhost", "127.0.0.1", "10.0.2.2"]) # other @@ -32,6 +32,4 @@ "test": {"hosts": "elasticsearch_test:9200"}, } -EMAIL_BACKEND = os.getenv( - "EMAIL_BACKEND", "django.core.mail.backends.console.EmailBackend" -) +EMAIL_BACKEND = os.getenv("EMAIL_BACKEND", "django.core.mail.backends.console.EmailBackend") diff --git a/backend/hct_mis_api/settings/staging.py b/backend/hct_mis_api/settings/staging.py index 92c153e12e..8450f4c83a 100644 --- a/backend/hct_mis_api/settings/staging.py +++ b/backend/hct_mis_api/settings/staging.py @@ -9,7 +9,7 @@ # domains/hosts etc. DOMAIN_NAME = os.getenv("DOMAIN", "dev-hct.unitst.org") -WWW_ROOT = "http://%s/" % DOMAIN_NAME +WWW_ROOT = "http://{}/".format(DOMAIN_NAME) # other # EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" @@ -20,24 +20,14 @@ AZURE_ACCOUNT_NAME = os.getenv("STORAGE_AZURE_ACCOUNT_NAME", "") AZURE_ACCOUNT_KEY = os.getenv("STORAGE_AZURE_ACCOUNT_KEY", "") -MEDIA_STORAGE_AZURE_ACCOUNT_NAME = os.getenv( - "MEDIA_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME -) -MEDIA_STORAGE_AZURE_ACCOUNT_KEY = os.getenv( - "MEDIA_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY -) -STATIC_STORAGE_AZURE_ACCOUNT_NAME = os.getenv( - "STATIC_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME -) -STATIC_STORAGE_AZURE_ACCOUNT_KEY = os.getenv( - "STATIC_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY -) +MEDIA_STORAGE_AZURE_ACCOUNT_NAME = os.getenv("MEDIA_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME) +MEDIA_STORAGE_AZURE_ACCOUNT_KEY = os.getenv("MEDIA_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY) +STATIC_STORAGE_AZURE_ACCOUNT_NAME = os.getenv("STATIC_STORAGE_AZURE_ACCOUNT_NAME", AZURE_ACCOUNT_NAME) +STATIC_STORAGE_AZURE_ACCOUNT_KEY = os.getenv("STATIC_STORAGE_AZURE_ACCOUNT_KEY", AZURE_ACCOUNT_KEY) AZURE_URL_EXPIRATION_SECS = 10800 -AZURE_STATIC_CUSTOM_DOMAIN = ( - f"{STATIC_STORAGE_AZURE_ACCOUNT_NAME}.blob.core.windows.net" -) +AZURE_STATIC_CUSTOM_DOMAIN = f"{STATIC_STORAGE_AZURE_ACCOUNT_NAME}.blob.core.windows.net" AZURE_MEDIA_CUSTOM_DOMAIN = f"{MEDIA_STORAGE_AZURE_ACCOUNT_NAME}.blob.core.windows.net" STATIC_URL = f"https://{AZURE_STATIC_CUSTOM_DOMAIN}/{STATIC_LOCATION}/" MEDIA_URL = f"https://{AZURE_MEDIA_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/" diff --git a/backend/hct_mis_api/settings/test.py b/backend/hct_mis_api/settings/test.py index 64b6a80f57..5d6b1bb856 100644 --- a/backend/hct_mis_api/settings/test.py +++ b/backend/hct_mis_api/settings/test.py @@ -9,7 +9,7 @@ # domains/hosts etc. DOMAIN_NAME = "localhost:8000" -WWW_ROOT = "http://%s/" % DOMAIN_NAME +WWW_ROOT = "http://{}/".format(DOMAIN_NAME) ALLOWED_HOSTS = ["localhost", "127.0.0.1", "10.0.2.2", os.getenv("DOMAIN", "")] # other @@ -26,9 +26,7 @@ "version": 1, "disable_existing_loggers": False, "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s" - }, + "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s line %(lineno)d: %(message)s"}, "verbose": { "format": "[%(asctime)s][%(levelname)s][%(name)s] %(filename)s.%(funcName)s:%(lineno)d %(message)s", }, From 9aba0618fa7122dea7edb64d7f0a19cca5ceb8b1 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Mon, 14 Feb 2022 05:55:53 +0000 Subject: [PATCH 11/24] Added python3-dev for poetry to install django-compressor --- backend/Dockerfile | 1 + .../hct_mis_api/apps/steficon/migrations/0014_migration.py | 5 ----- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index d4a733bfe9..85952577f1 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -3,6 +3,7 @@ FROM python:3.9.1-slim-buster RUN apt-get update RUN apt-get install -y --no-install-recommends \ + python3-dev \ build-essential \ libjpeg-dev \ zlib1g-dev \ diff --git a/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py b/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py index e3f3e45b27..f30dab0baf 100644 --- a/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py +++ b/backend/hct_mis_api/apps/steficon/migrations/0014_migration.py @@ -15,11 +15,6 @@ class Migration(migrations.Migration): name='flags', field=models.JSONField(blank=True, default=dict), ), - migrations.AlterField( - model_name='rule', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), - ), migrations.AlterField( model_name='rulecommit', name='after', From bba05292456e029ea582c9b343ab20d9586ea69f Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Mon, 14 Feb 2022 15:55:23 +0000 Subject: [PATCH 12/24] Added tox and coverage --- backend/.coveragerc | 6 ++ backend/Dockerfile | 1 - backend/poetry.lock | 213 +++++++++++++++++++++++++++++++++++++---- backend/pyproject.toml | 6 +- backend/tox.ini | 27 ++++++ docker-compose.yml | 1 + 6 files changed, 235 insertions(+), 19 deletions(-) create mode 100644 backend/.coveragerc create mode 100644 backend/tox.ini diff --git a/backend/.coveragerc b/backend/.coveragerc new file mode 100644 index 0000000000..7bb6d12341 --- /dev/null +++ b/backend/.coveragerc @@ -0,0 +1,6 @@ +[run] +omit = + # omit all the migrations + *migrations* + # omit all the t snapshots + *snapshots* \ No newline at end of file diff --git a/backend/Dockerfile b/backend/Dockerfile index 85952577f1..d4a733bfe9 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -3,7 +3,6 @@ FROM python:3.9.1-slim-buster RUN apt-get update RUN apt-get install -y --no-install-recommends \ - python3-dev \ build-essential \ libjpeg-dev \ zlib1g-dev \ diff --git a/backend/poetry.lock b/backend/poetry.lock index c405204880..64b274f6d7 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -237,7 +237,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.11" +version = "2.0.12" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -303,6 +303,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "coverage" +version = "6.3.1" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "36.0.1" @@ -346,6 +357,14 @@ category = "main" optional = false python-versions = ">=2.7" +[[package]] +name = "distlib" +version = "0.3.4" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "django" version = "3.2" @@ -444,6 +463,7 @@ python-dateutil = "2.6.0" [package.source] type = "url" url = "https://github.com/Tivix/django-auditlog/archive/refs/heads/master.zip" + [[package]] name = "django-celery-beat" version = "2.2.1" @@ -603,11 +623,17 @@ tablib = {version = ">=3.0.0", extras = ["html", "ods", "xls", "xlsx", "yaml"]} [[package]] name = "django-js-asset" -version = "1.2.2" +version = "2.0.0" description = "script tag with additional attributes for django.forms.Media" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" + +[package.extras] +tests = ["coverage"] [[package]] name = "django-jsoneditor" @@ -711,14 +737,14 @@ full = ["django-adminfilters (>=1.7.1)", "django-admin-extra-urls (>=3.5.1)", "d [[package]] name = "django-sql-explorer" -version = "2.3" +version = "2.4" description = "A pluggable app that allows users (admins) to execute SQL, view, and export the results." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -Django = ">=2.2.14" +Django = ">=2.2.27" sqlparse = ">=0.4.0" xlsxwriter = {version = ">=1.2.1", optional = true, markers = "extra == \"xls\""} @@ -869,6 +895,18 @@ python-versions = "*" [package.dependencies] wasmer = {version = ">=0.3.0", markers = "python_version >= \"3.5\" and platform_machine == \"x86_64\" and sys_platform == \"darwin\" or python_version >= \"3.5\" and platform_machine == \"x86_64\" and sys_platform == \"linux\""} +[[package]] +name = "filelock" +version = "3.4.2" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] + [[package]] name = "freezegun" version = "0.3.14" @@ -1289,6 +1327,30 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "platformdirs" +version = "2.5.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "promise" version = "2.3" @@ -1352,6 +1414,14 @@ python-versions = "*" [package.extras] tests = ["pytest"] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "pycountry" version = "20.7.3" @@ -1778,6 +1848,28 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tox" +version = "3.24.5" +description = "tox is a generic virtualenv management and test command line tool" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} +filelock = ">=3.0.0" +packaging = ">=14" +pluggy = ">=0.12.0" +py = ">=1.4.17" +six = ">=1.14.0" +toml = ">=0.9.4" +virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" + +[package.extras] +docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] +testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] + [[package]] name = "traitlets" version = "5.1.1" @@ -1799,7 +1891,7 @@ python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "4.0.1" +version = "4.1.1" description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false @@ -1834,6 +1926,24 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "virtualenv" +version = "20.13.1" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +distlib = ">=0.3.1,<1" +filelock = ">=3.2,<4" +platformdirs = ">=2,<3" +six = ">=1.9.0,<2" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] + [[package]] name = "waitress" version = "2.0.0" @@ -1953,8 +2063,8 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" -python-versions = "3.9.1" -content-hash = "f2961476c0719f7719a405de9b78ac15f9a20756b8524d976e3f38c4143ce76a" +python-versions = "3.9.10" +content-hash = "2354b471062a32ffb4d2892c05dc0e537c4974bd492e53c7e424a8c527a89b10" [metadata.files] amqp = [ @@ -2077,8 +2187,8 @@ cffi = [ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, - {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, @@ -2100,6 +2210,49 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +coverage = [ + {file = "coverage-6.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525"}, + {file = "coverage-6.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c"}, + {file = "coverage-6.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145"}, + {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce"}, + {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167"}, + {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda"}, + {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27"}, + {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e"}, + {file = "coverage-6.3.1-cp310-cp310-win32.whl", hash = "sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217"}, + {file = "coverage-6.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb"}, + {file = "coverage-6.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0"}, + {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793"}, + {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd"}, + {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1"}, + {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554"}, + {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"}, + {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8"}, + {file = "coverage-6.3.1-cp37-cp37m-win32.whl", hash = "sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0"}, + {file = "coverage-6.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687"}, + {file = "coverage-6.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320"}, + {file = "coverage-6.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8"}, + {file = "coverage-6.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734"}, + {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4"}, + {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975"}, + {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa"}, + {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b"}, + {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a"}, + {file = "coverage-6.3.1-cp38-cp38-win32.whl", hash = "sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10"}, + {file = "coverage-6.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f"}, + {file = "coverage-6.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d"}, + {file = "coverage-6.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6"}, + {file = "coverage-6.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1"}, + {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c"}, + {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba"}, + {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed"}, + {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f"}, + {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38"}, + {file = "coverage-6.3.1-cp39-cp39-win32.whl", hash = "sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2"}, + {file = "coverage-6.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa"}, + {file = "coverage-6.3.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2"}, + {file = "coverage-6.3.1.tar.gz", hash = "sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8"}, +] cryptography = [ {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, @@ -2134,6 +2287,10 @@ diff-match-patch = [ {file = "diff-match-patch-20200713.tar.gz", hash = "sha256:da6f5a01aa586df23dfc89f3827e1cafbb5420be9d87769eeb079ddfd9477a18"}, {file = "diff_match_patch-20200713-py3-none-any.whl", hash = "sha256:8bf9d9c4e059d917b5c6312bac0c137971a32815ddbda9c682b949f2986b4d34"}, ] +distlib = [ + {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, + {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, +] django = [ {file = "Django-3.2-py3-none-any.whl", hash = "sha256:0604e84c4fb698a5e53e5857b5aea945b2f19a18f25f10b8748dbdf935788927"}, {file = "Django-3.2.tar.gz", hash = "sha256:21f0f9643722675976004eb683c55d33c05486f94506672df3d6a141546f389d"}, @@ -2209,8 +2366,8 @@ django-import-export = [ {file = "django_import_export-2.7.1-py3-none-any.whl", hash = "sha256:254ca359782efca932c398edabc15dd51d31da241e85cc03af5b720173e0b2fe"}, ] django-js-asset = [ - {file = "django-js-asset-1.2.2.tar.gz", hash = "sha256:c163ae80d2e0b22d8fb598047cd0dcef31f81830e127cfecae278ad574167260"}, - {file = "django_js_asset-1.2.2-py2.py3-none-any.whl", hash = "sha256:8ec12017f26eec524cab436c64ae73033368a372970af4cf42d9354fcb166bdd"}, + {file = "django_js_asset-2.0.0-py3-none-any.whl", hash = "sha256:86f9f300d682537ddaf0487dc2ab356581b8f50c069bdba91d334a46e449f923"}, + {file = "django_js_asset-2.0.0.tar.gz", hash = "sha256:adc1ee1efa853fad42054b540c02205344bb406c9bddf87c9e5377a41b7db90f"}, ] django-jsoneditor = [ {file = "django-jsoneditor-0.2.2.tar.gz", hash = "sha256:c4d9de033840e0b736ac1c4fcc56298a4a0470402db8aaf35d30567127d8397c"}, @@ -2244,8 +2401,8 @@ django-smart-admin = [ {file = "django-smart-admin-1.6.0.tar.gz", hash = "sha256:cbd9b0543453cdfac9e3da8fa5cdd9d4ddf9ac4db966d814f9dac04bd4ce1847"}, ] django-sql-explorer = [ - {file = "django-sql-explorer-2.3.tar.gz", hash = "sha256:031788a88938102c454679b0d97a41d0e177280fd4c39aebf73b157ece62c960"}, - {file = "django_sql_explorer-2.3-py3-none-any.whl", hash = "sha256:4fc032c8cb9abad491703854a5ff5ae4576b4aacf63100aa0356d301432d8373"}, + {file = "django-sql-explorer-2.4.tar.gz", hash = "sha256:6f27c8842600f19704ac20bef52d837a25da11c090cd290ee9ff40f79e48da30"}, + {file = "django_sql_explorer-2.4-py3-none-any.whl", hash = "sha256:2b20499a21e0b1faced83eb63f9962dcaea644429092d645583e2f2ea3983cdb"}, ] django-storages = [ {file = "django-storages-1.8.tar.gz", hash = "sha256:9322ab74ba6371e2e0fccc350c741686ade829e43085597b26b07ae8955a0a00"}, @@ -2290,6 +2447,10 @@ faker = [ fastdiff = [ {file = "fastdiff-0.2.0.tar.gz", hash = "sha256:623ad3d9055ab78e014d0d10767cb033d98d5d4f66052abf498350c8e42e29aa"}, ] +filelock = [ + {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, + {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, +] freezegun = [ {file = "freezegun-0.3.14-py2.py3-none-any.whl", hash = "sha256:10336fc80a235847c64033f9727f3847f37db4bd549be1d9f3b5ae0279256c69"}, {file = "freezegun-0.3.14.tar.gz", hash = "sha256:6262de2f4bab671f7189bb8a0b9d8751da69a53f0b9813fb8f412681662d872a"}, @@ -2540,6 +2701,14 @@ pillow = [ {file = "Pillow-7.0.0-pp373-pypy36_pp73-win32.whl", hash = "sha256:8453f914f4e5a3d828281a6628cf517832abfa13ff50679a4848926dac7c0358"}, {file = "Pillow-7.0.0.tar.gz", hash = "sha256:4d9ed9a64095e031435af120d3c910148067087541131e82b3e8db302f4c8946"}, ] +platformdirs = [ + {file = "platformdirs-2.5.0-py3-none-any.whl", hash = "sha256:30671902352e97b1eafd74ade8e4a694782bd3471685e78c32d0fdfd3aa7e7bb"}, + {file = "platformdirs-2.5.0.tar.gz", hash = "sha256:8ec11dfba28ecc0715eb5fb0147a87b1bf325f349f3da9aab2cd6b50b96b692b"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] promise = [ {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, ] @@ -2604,6 +2773,10 @@ pure-eval = [ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, ] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] pycountry = [ {file = "pycountry-20.7.3.tar.gz", hash = "sha256:81084a53d3454344c0292deebc20fcd0a1488c136d4900312cbd465cf552cb42"}, ] @@ -2937,6 +3110,10 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tox = [ + {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"}, + {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"}, +] traitlets = [ {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, @@ -2968,8 +3145,8 @@ typed-ast = [ {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] unittest-xml-reporting = [ {file = "unittest-xml-reporting-3.0.4.tar.gz", hash = "sha256:984cebba69e889401bfe3adb9088ca376b3a1f923f0590d005126c1bffd1a695"}, @@ -2983,6 +3160,10 @@ vine = [ {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] +virtualenv = [ + {file = "virtualenv-20.13.1-py2.py3-none-any.whl", hash = "sha256:45e1d053cad4cd453181ae877c4ffc053546ae99e7dd049b9ff1d9be7491abf7"}, + {file = "virtualenv-20.13.1.tar.gz", hash = "sha256:e0621bcbf4160e4e1030f05065c8834b4e93f4fcc223255db2a823440aca9c14"}, +] waitress = [ {file = "waitress-2.0.0-py3-none-any.whl", hash = "sha256:29af5a53e9fb4e158f525367678b50053808ca6c21ba585754c77d790008c746"}, {file = "waitress-2.0.0.tar.gz", hash = "sha256:69e1f242c7f80273490d3403c3976f3ac3b26e289856936d1f620ed48f321897"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 61e1175eaf..12e3303c9d 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -45,13 +45,13 @@ include_trailing_comma = true skip = "migrations" [tool.poetry] -name = "HCT MIS Backend" +name = "hct_mis_api" version = "2022.1.1" description = "HCT MIS is UNICEF's humanitarian cash transfer platform." authors = ["Tivix"] [tool.poetry.dependencies] -python = "3.9.1" +python = "3.9.10" Django = "3.2" Jinja2 = "2.11.2" #name = "HCT MIS Backend" @@ -131,6 +131,8 @@ watchdog = "^2.0.2" wasmer = "0.4.1" parso = "0.7.1" argh = "^0.26.2" +tox = "^3.24.5" +coverage = "^6.3.1" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/backend/tox.ini b/backend/tox.ini new file mode 100644 index 0000000000..1d175fac89 --- /dev/null +++ b/backend/tox.ini @@ -0,0 +1,27 @@ +[tox] +envlist = py39-django{32,40} +isolated_build = true + +[testenv] +passenv = * +deps = + django32: Django==3.2 + django40: Django==4.0 + poetry +#skipsdist = True + +setenv = + DJANGO_SETTINGS_MODULE=hct_mis_api.settings.test + #PYTHONPATH={toxinidir}{:}{toxinidir}/.. + +commands = + poetry install + poetry run ./manage.py test --settings hct_mis_api.settings.test + + +[tox:.package] +whitelist_externals = poetry +# note tox will use the same python version as under what tox is installed to package +# so unless this is python 3 you can require a given python version for the packaging +# environment via the basepython key +basepython = python3.9 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index addfc56103..5b37a71493 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -164,6 +164,7 @@ services: dockerfile: Dockerfile environment: - node.name=es02 + - discovery.type=single-node - cluster.name=es-docker-cluster-test - cluster.initial_master_nodes=es02 - bootstrap.memory_lock=true From daaabc757bed185fee9cad41b5c419e063c19c4c Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Tue, 15 Feb 2022 07:39:31 +0000 Subject: [PATCH 13/24] Downgrade to Python 3.9.1 --- backend/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 12e3303c9d..4640df5a80 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -51,7 +51,7 @@ description = "HCT MIS is UNICEF's humanitarian cash transfer platform." authors = ["Tivix"] [tool.poetry.dependencies] -python = "3.9.10" +python = "3.9.1" Django = "3.2" Jinja2 = "2.11.2" #name = "HCT MIS Backend" From 9d00412dc9e7cf03c7f46f336d660d2b3e13cb5d Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Tue, 15 Feb 2022 10:03:48 +0000 Subject: [PATCH 14/24] Updated authorization headers --- backend/hct_mis_api/apps/power_query/tests/test_views.py | 6 +++--- backend/hct_mis_api/apps/power_query/utils.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/hct_mis_api/apps/power_query/tests/test_views.py b/backend/hct_mis_api/apps/power_query/tests/test_views.py index 97ba983271..efac04b086 100644 --- a/backend/hct_mis_api/apps/power_query/tests/test_views.py +++ b/backend/hct_mis_api/apps/power_query/tests/test_views.py @@ -88,7 +88,7 @@ def test_pending_fetch(self): self.assertEqual(response.status_code, 401) username, password = self.report1.owner.username, self.USER_PASSWORD - headers = {"HTTP_AUTHORIZATION": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii")} + headers = {"Authorization": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii")} response = self.client.get(url, **headers) self.assertEqual(response.status_code, 400) self.assertContains(response, b"This report is not currently available", status_code=400) @@ -98,7 +98,7 @@ def test_valid_fetch(self): username, password = self.report2.owner.username, self.USER_PASSWORD assert password == "123", password headers = { - "HTTP_AUTHORIZATION": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii"), + "Authorization": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii"), } response = self.client.get(url, **headers) self.assertEqual(response.status_code, 200) @@ -121,7 +121,7 @@ def setUpTestData(cls): cls.USER_PASSWORD = "123" cls.formatter_json = FormatterFactory(name="Queryset To JSON") cls.user = UserFactory( - username="superuser-%s" % random.randint(1, 100), + username="superuser-{}".format(random.randint(1, 100)), is_superuser=True, is_staff=True, password=cls.USER_PASSWORD, diff --git a/backend/hct_mis_api/apps/power_query/utils.py b/backend/hct_mis_api/apps/power_query/utils.py index 098dcdc7b6..589d12c76f 100644 --- a/backend/hct_mis_api/apps/power_query/utils.py +++ b/backend/hct_mis_api/apps/power_query/utils.py @@ -59,7 +59,7 @@ def wrap(request, *args, **kwargs): if request.user.is_authenticated: return view(request, *args, **kwargs) - if "HTTP_AUTHORIZATION" in request.META: + if "Authorization" in request.META: auth = request.headers["Authorization"].split() if len(auth) == 2: if auth[0].lower() == "basic": From 086e94035377c5fe68bedea4de287e148e98573a Mon Sep 17 00:00:00 2001 From: Jan Romaniak <jan.romaniak@tivix.com> Date: Wed, 16 Feb 2022 10:53:08 +0100 Subject: [PATCH 15/24] fix merge migrations --- .../grievance/migrations/0035_migration.py | 48 +-------------- .../grievance/migrations/0036_migration.py | 58 +++++++++++++++++++ 2 files changed, 61 insertions(+), 45 deletions(-) create mode 100644 backend/hct_mis_api/apps/grievance/migrations/0036_migration.py diff --git a/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py b/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py index d6483109c5..3b3ab4ed91 100644 --- a/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py +++ b/backend/hct_mis_api/apps/grievance/migrations/0035_migration.py @@ -1,3 +1,5 @@ +# Generated by Django 2.2.26 on 2022-02-09 16:23 + import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion @@ -13,50 +15,6 @@ class Migration(migrations.Migration): ] operations = [ - migrations.AlterField( - model_name='grievanceticket', - name='extras', - field=models.JSONField(blank=True, default=dict), - ), - migrations.AlterField( - model_name='ticketaddindividualdetails', - name='individual_data', - field=models.JSONField(null=True), - ), - migrations.AlterField( - model_name='ticketdeleteindividualdetails', - name='role_reassign_data', - field=models.JSONField(default=dict), - ), - migrations.AlterField( - model_name='tickethouseholddataupdatedetails', - name='household_data', - field=models.JSONField(null=True), - ), - migrations.AlterField( - model_name='ticketindividualdataupdatedetails', - name='individual_data', - field=models.JSONField(null=True), - ), - migrations.AlterField( - model_name='ticketindividualdataupdatedetails', - name='role_reassign_data', - field=models.JSONField(default=dict), - ), - migrations.AlterField( - model_name='ticketneedsadjudicationdetails', - name='extra_data', - field=models.JSONField(default=dict), - ), - migrations.AlterField( - model_name='ticketneedsadjudicationdetails', - name='role_reassign_data', - field=models.JSONField(default=dict), - ), - migrations.AlterField( - model_name='ticketsystemflaggingdetails', - name='role_reassign_data', - field=models.JSONField(default=dict), migrations.CreateModel( name='TicketDeleteHouseholdDetails', fields=[ @@ -72,4 +30,4 @@ class Migration(migrations.Migration): 'abstract': False, }, ), - ] + ] \ No newline at end of file diff --git a/backend/hct_mis_api/apps/grievance/migrations/0036_migration.py b/backend/hct_mis_api/apps/grievance/migrations/0036_migration.py new file mode 100644 index 0000000000..4228d6b472 --- /dev/null +++ b/backend/hct_mis_api/apps/grievance/migrations/0036_migration.py @@ -0,0 +1,58 @@ +# Generated by Django 3.2 on 2022-02-11 19:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('grievance', '0035_migration'), + ] + + operations = [ + migrations.AlterField( + model_name='grievanceticket', + name='extras', + field=models.JSONField(blank=True, default=dict), + ), + migrations.AlterField( + model_name='ticketaddindividualdetails', + name='individual_data', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='ticketdeleteindividualdetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='tickethouseholddataupdatedetails', + name='household_data', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='ticketindividualdataupdatedetails', + name='individual_data', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='ticketindividualdataupdatedetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='ticketneedsadjudicationdetails', + name='extra_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='ticketneedsadjudicationdetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + migrations.AlterField( + model_name='ticketsystemflaggingdetails', + name='role_reassign_data', + field=models.JSONField(default=dict), + ), + ] \ No newline at end of file From ae284a822e875d3fddfa14c2dcaf7a2045ad202c Mon Sep 17 00:00:00 2001 From: Jan Romaniak <jan.romaniak@tivix.com> Date: Wed, 16 Feb 2022 16:29:35 +0100 Subject: [PATCH 16/24] fix tests --- backend/hct_mis_api/apps/household/schema.py | 2 +- .../snapshots/snap_test_household_query.py | 155 +++++++++--------- backend/hct_mis_api/apps/payment/schema.py | 2 +- ...chart_total_transferred_cash_by_country.py | 1 + .../apps/power_query/tests/test_views.py | 4 +- backend/hct_mis_api/apps/power_query/utils.py | 2 +- backend/hct_mis_api/apps/program/schema.py | 3 + .../snapshots/snap_test_cash_plan_queries.py | 1 + ...nap_test_registration_data_import_query.py | 1 + backend/hct_mis_api/urls.py | 4 +- docker-compose.yml | 1 - 11 files changed, 91 insertions(+), 85 deletions(-) diff --git a/backend/hct_mis_api/apps/household/schema.py b/backend/hct_mis_api/apps/household/schema.py index f13dca633e..e32a92013c 100644 --- a/backend/hct_mis_api/apps/household/schema.py +++ b/backend/hct_mis_api/apps/household/schema.py @@ -1,6 +1,6 @@ import re -from django.db.models import IntegerField, Prefetch, Q, Sum +from django.db.models import IntegerField, Prefetch, Q, Sum, DecimalField from django.db.models.functions import Coalesce, Lower import graphene diff --git a/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py b/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py index 1cb717e459..541ee6f666 100644 --- a/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py +++ b/backend/hct_mis_api/apps/household/tests/snapshots/snap_test_household_query.py @@ -4,8 +4,86 @@ from snapshottest import Snapshot + snapshots = Snapshot() +snapshots['TestHouseholdQuery::test_household_filter_by_programme_0_with_permission 1'] = { + 'data': { + 'allHouseholds': { + 'edges': [ + { + 'node': { + 'address': 'Lorem Ipsum', + 'countryOrigin': 'Poland', + 'programs': { + 'edges': [ + { + 'node': { + 'name': 'Test program ONE' + } + } + ] + }, + 'size': 4 + } + }, + { + 'node': { + 'address': 'Lorem Ipsum', + 'countryOrigin': 'Poland', + 'programs': { + 'edges': [ + { + 'node': { + 'name': 'Test program ONE' + } + } + ] + }, + 'size': 1 + } + }, + { + 'node': { + 'address': 'Lorem Ipsum', + 'countryOrigin': 'Poland', + 'programs': { + 'edges': [ + { + 'node': { + 'name': 'Test program ONE' + } + } + ] + }, + 'size': 11 + } + } + ] + } + } +} + +snapshots['TestHouseholdQuery::test_household_filter_by_programme_1_without_permission 1'] = { + 'data': { + 'allHouseholds': None + }, + 'errors': [ + { + 'locations': [ + { + 'column': 7, + 'line': 3 + } + ], + 'message': 'Permission Denied', + 'path': [ + 'allHouseholds' + ] + } + ] +} + snapshots['TestHouseholdQuery::test_household_query_all_0_all_with_permission 1'] = { 'data': { 'allHouseholds': { @@ -222,83 +300,6 @@ } } -snapshots['TestHouseholdQuery::test_household_filter_by_programme_0_with_permission 1'] = { - 'data': { - 'allHouseholds': { - 'edges': [ - { - 'node': { - 'address': 'Lorem Ipsum', - 'countryOrigin': 'Poland', - 'programs': { - 'edges': [ - { - 'node': { - 'name': 'Test program ONE' - } - } - ] - }, - 'size': 4 - } - }, - { - 'node': { - 'address': 'Lorem Ipsum', - 'countryOrigin': 'Poland', - 'programs': { - 'edges': [ - { - 'node': { - 'name': 'Test program ONE' - } - } - ] - }, - 'size': 1 - } - }, - { - 'node': { - 'address': 'Lorem Ipsum', - 'countryOrigin': 'Poland', - 'programs': { - 'edges': [ - { - 'node': { - 'name': 'Test program ONE' - } - } - ] - }, - 'size': 11 - } - } - ] - } - } -} - -snapshots['TestHouseholdQuery::test_household_filter_by_programme_1_without_permission 1'] = { - 'data': { - 'allHouseholds': None - }, - 'errors': [ - { - 'locations': [ - { - 'column': 7, - 'line': 3 - } - ], - 'message': 'Permission Denied', - 'path': [ - 'allHouseholds' - ] - } - ] -} - snapshots['TestHouseholdQuery::test_household_query_single_0_with_permission 1'] = { 'data': { 'household': { diff --git a/backend/hct_mis_api/apps/payment/schema.py b/backend/hct_mis_api/apps/payment/schema.py index d08b0cd4ac..a36ef7edcc 100644 --- a/backend/hct_mis_api/apps/payment/schema.py +++ b/backend/hct_mis_api/apps/payment/schema.py @@ -1,4 +1,4 @@ -from django.db.models import Count, Q, Sum +from django.db.models import Count, Q, Sum, DecimalField from django.db.models.functions import Lower from django.shortcuts import get_object_or_404 diff --git a/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py b/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py index 9ba57d067d..70f862e5c5 100644 --- a/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py +++ b/backend/hct_mis_api/apps/payment/tests/snapshots/snap_test_chart_total_transferred_cash_by_country.py @@ -4,6 +4,7 @@ from snapshottest import Snapshot + snapshots = Snapshot() snapshots['TestChartTotalTransferredCashByCountry::test_resolving_chart_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/power_query/tests/test_views.py b/backend/hct_mis_api/apps/power_query/tests/test_views.py index efac04b086..0400654674 100644 --- a/backend/hct_mis_api/apps/power_query/tests/test_views.py +++ b/backend/hct_mis_api/apps/power_query/tests/test_views.py @@ -88,7 +88,7 @@ def test_pending_fetch(self): self.assertEqual(response.status_code, 401) username, password = self.report1.owner.username, self.USER_PASSWORD - headers = {"Authorization": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii")} + headers = {"HTTP_AUTHORIZATION": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii")} response = self.client.get(url, **headers) self.assertEqual(response.status_code, 400) self.assertContains(response, b"This report is not currently available", status_code=400) @@ -98,7 +98,7 @@ def test_valid_fetch(self): username, password = self.report2.owner.username, self.USER_PASSWORD assert password == "123", password headers = { - "Authorization": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii"), + "HTTP_AUTHORIZATION": "Basic " + base64.b64encode(f"{username}:{password}".encode()).decode("ascii"), } response = self.client.get(url, **headers) self.assertEqual(response.status_code, 200) diff --git a/backend/hct_mis_api/apps/power_query/utils.py b/backend/hct_mis_api/apps/power_query/utils.py index 589d12c76f..098dcdc7b6 100644 --- a/backend/hct_mis_api/apps/power_query/utils.py +++ b/backend/hct_mis_api/apps/power_query/utils.py @@ -59,7 +59,7 @@ def wrap(request, *args, **kwargs): if request.user.is_authenticated: return view(request, *args, **kwargs) - if "Authorization" in request.META: + if "HTTP_AUTHORIZATION" in request.META: auth = request.headers["Authorization"].split() if len(auth) == 2: if auth[0].lower() == "basic": diff --git a/backend/hct_mis_api/apps/program/schema.py b/backend/hct_mis_api/apps/program/schema.py index 6931a346a5..48203a0f8c 100644 --- a/backend/hct_mis_api/apps/program/schema.py +++ b/backend/hct_mis_api/apps/program/schema.py @@ -202,6 +202,9 @@ class CashPlanNode(BaseNodePermissionMixin, DjangoObjectType): delivery_type = graphene.String() total_number_of_households = graphene.Int() currency = graphene.String(source="currency") + total_delivered_quantity = graphene.Float() + total_entitled_quantity = graphene.Float() + total_undelivered_quantity = graphene.Float() class Meta: model = CashPlan diff --git a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py index 90a20b6eec..6586924512 100644 --- a/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py +++ b/backend/hct_mis_api/apps/program/tests/snapshots/snap_test_cash_plan_queries.py @@ -4,6 +4,7 @@ from snapshottest import Snapshot + snapshots = Snapshot() snapshots['TestCashPlanQueries::test_cash_plans_0_all_with_permission 1'] = { diff --git a/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py b/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py index ad72a5464a..dca318bcc6 100644 --- a/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py +++ b/backend/hct_mis_api/apps/registration_data/tests/snapshots/snap_test_registration_data_import_query.py @@ -4,6 +4,7 @@ from snapshottest import Snapshot + snapshots = Snapshot() snapshots['TestRegistrationDataImportQuery::test_registration_data_import_datahub_query_all_0_with_permission 1'] = { diff --git a/backend/hct_mis_api/urls.py b/backend/hct_mis_api/urls.py index fad02994f3..327eade9f7 100644 --- a/backend/hct_mis_api/urls.py +++ b/backend/hct_mis_api/urls.py @@ -26,7 +26,6 @@ actions.add_to_site(site, exclude=["export_delete_tree"]) urlpatterns = [ - path(f"api/{settings.ADMIN_PANEL_URL}/", admin.site.urls), path("api/explorer/", include("explorer.urls")), path(f"api/{settings.ADMIN_PANEL_URL}/hijack/", include("hijack.urls")), path(f"api/{settings.ADMIN_PANEL_URL}/adminactions/", include("adminactions.urls")), @@ -63,7 +62,7 @@ hct_mis_api.apps.sanction_list.views.download_sanction_template, ), path( - "api/unicorn/download-target-population-xlsx/<uuid:target_population_id>/", + f"api/{settings.ADMIN_PANEL_URL}/download-target-population-xlsx/<uuid:target_population_id>/", hct_mis_api.apps.targeting.views.download_xlsx_households, name="admin-download-target-population", ), @@ -72,6 +71,7 @@ hct_mis_api.apps.core.views.download_dashboard_report, name="dashboard_report", ), + path(f"api/{settings.ADMIN_PANEL_URL}/", admin.site.urls), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += staticfiles_urlpatterns() diff --git a/docker-compose.yml b/docker-compose.yml index 5b37a71493..addfc56103 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -164,7 +164,6 @@ services: dockerfile: Dockerfile environment: - node.name=es02 - - discovery.type=single-node - cluster.name=es-docker-cluster-test - cluster.initial_master_nodes=es02 - bootstrap.memory_lock=true From f0a78b07ae98b5de869072b86f742a5dfc5c5ea9 Mon Sep 17 00:00:00 2001 From: Jan Romaniak <jan.romaniak@tivix.com> Date: Wed, 16 Feb 2022 16:32:52 +0100 Subject: [PATCH 17/24] fix formating --- backend/.flake8 | 5 - backend/hct_mis_api/apps/account/admin.py | 185 ++++-------------- .../hct_mis_api/apps/activity_log/models.py | 12 +- .../hct_mis_api/apps/core/base_test_case.py | 12 +- backend/hct_mis_api/apps/core/countries.py | 1 - backend/hct_mis_api/apps/core/datamart/api.py | 28 +-- .../apps/core/exchange_rates/models.py | 29 +-- .../apps/core/flex_fields_importer.py | 105 +++------- backend/hct_mis_api/apps/core/kobo/common.py | 19 +- .../apps/core/management/commands/upgrade.py | 26 ++- .../hct_mis_api/apps/core/mis_test_runner.py | 30 +-- backend/hct_mis_api/apps/geo/models.py | 4 +- .../hct_mis_api/apps/grievance/mutations.py | 158 ++++----------- backend/hct_mis_api/apps/household/forms.py | 12 +- .../apps/mis_datahub/celery_tasks.py | 1 + backend/hct_mis_api/apps/payment/utils.py | 51 ++--- .../hct_mis_api/apps/power_query/models.py | 20 +- .../apps/registration_datahub/celery_tasks.py | 3 +- .../apps/registration_datahub/mutations.py | 3 +- .../template_generator.py | 4 +- .../tests/test_kobo_validators_methods.py | 3 - backend/hct_mis_api/apps/reporting/models.py | 28 +-- .../apps/sanction_list/tasks/load_xml.py | 128 +++--------- backend/hct_mis_api/apps/steficon/forms.py | 1 - backend/hct_mis_api/apps/steficon/models.py | 36 +--- backend/hct_mis_api/apps/targeting/models.py | 126 ++++-------- .../hct_mis_api/apps/targeting/steficon.py | 1 - backend/hct_mis_api/apps/utils/admin.py | 4 +- 28 files changed, 271 insertions(+), 764 deletions(-) delete mode 100644 backend/.flake8 diff --git a/backend/.flake8 b/backend/.flake8 deleted file mode 100644 index 6190681ca8..0000000000 --- a/backend/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -max-line-length = 120 -select = C,E,F,W,B,B950 -ignore = E203, E501, W503 -exclude = *migrations/*,__init__.py,*settings/*,*/snapshots/* diff --git a/backend/hct_mis_api/apps/account/admin.py b/backend/hct_mis_api/apps/account/admin.py index 6735b12879..01a982cdf2 100644 --- a/backend/hct_mis_api/apps/account/admin.py +++ b/backend/hct_mis_api/apps/account/admin.py @@ -86,9 +86,7 @@ def clean(self): user = self.cleaned_data["user"] business_area = self.cleaned_data["business_area"] - account_models.IncompatibleRoles.objects.validate_user_role( - user, business_area, role - ) + account_models.IncompatibleRoles.objects.validate_user_role(user, business_area, role) class UserRoleInlineFormSet(BaseInlineFormSet): @@ -111,13 +109,9 @@ def clean(self): business_area = form.cleaned_data["business_area"] role = form.cleaned_data["role"] incompatible_roles = list( - account_models.IncompatibleRoles.objects.filter( - role_one=role - ).values_list("role_two", flat=True) + account_models.IncompatibleRoles.objects.filter(role_one=role).values_list("role_two", flat=True) ) + list( - account_models.IncompatibleRoles.objects.filter( - role_two=role - ).values_list("role_one", flat=True) + account_models.IncompatibleRoles.objects.filter(role_two=role).values_list("role_one", flat=True) ) error_forms = [ form_two.cleaned_data["role"].name @@ -130,9 +124,7 @@ def clean(self): if error_forms: if "role" not in form._errors: form._errors["role"] = ErrorList() - form._errors["role"].append( - _(f"{role.name} is incompatible with {', '.join(error_forms)}.") - ) + form._errors["role"].append(_(f"{role.name} is incompatible with {', '.join(error_forms)}.")) class UserRoleInline(admin.TabularInline): @@ -155,9 +147,7 @@ class UserRoleInline(admin.TabularInline): def get_valid_kobo_username(user: User): - return ( - user.username.replace("@", "_at_").replace(".", "_").replace("+", "_").lower() - ) + return user.username.replace("@", "_at_").replace(".", "_").replace("+", "_").lower() class DjAdminManager: @@ -192,11 +182,7 @@ def assert_response(self, status: [int], location: str = None, custom_error=""): self._last_error = self._last_response raise self.ResponseException(msg) - if ( - location - and (redir_to := self._last_response.headers.get("location", "N/A")) - != location - ): + if location and (redir_to := self._last_response.headers.get("location", "N/A")) != location: msg = f"Unexpected redirect:{redir_to} <> {location}: {custom_error}" self._last_error = self._last_response raise self.ResponseException(msg) @@ -290,10 +276,7 @@ def delete_user(self, username, pk): ]: self._get(url) self.assert_response([200, 404, 302], custom_error=url) - if ( - self._last_response.status_code == 302 - and "/login/" in self._last_response.headers["Location"] - ): + if self._last_response.status_code == 302 and "/login/" in self._last_response.headers["Location"]: raise Exception(f"Cannot access to {url}") if self._last_response.status_code == 200: @@ -316,9 +299,7 @@ def queryset(self, request, queryset): Q(custom_fields__kobo_pk__isnull=True) | Q(custom_fields__kobo_pk=None), ) elif self.value() == "1": - return queryset.filter(custom_fields__kobo_pk__isnull=False).exclude( - custom_fields__kobo_pk=None - ) + return queryset.filter(custom_fields__kobo_pk__isnull=False).exclude(custom_fields__kobo_pk=None) return queryset @@ -328,11 +309,7 @@ class BusinessAreaFilter(SimpleListFilter): template = "adminfilters/combobox.html" def lookups(self, request, model_admin): - return ( - BusinessArea.objects.filter(user_roles__isnull=False) - .values_list("id", "name") - .distinct() - ) + return BusinessArea.objects.filter(user_roles__isnull=False).values_list("id", "name").distinct() def queryset(self, request, queryset): if self.value(): @@ -457,9 +434,7 @@ def kobo_user(self, obj): return obj.custom_fields.get("kobo_username") def get_deleted_objects(self, objs, request): - to_delete, model_count, perms_needed, protected = super().get_deleted_objects( - objs, request - ) + to_delete, model_count, perms_needed, protected = super().get_deleted_objects(objs, request) user = objs[0] kobo_pk = user.custom_fields.get("kobo_pk", None) kobo_username = user.custom_fields.get("kobo_username", None) @@ -496,9 +471,7 @@ def delete_model(self, request, obj): if "kobo_username" in obj.custom_fields: api = DjAdminManager() api.login(request) - api.delete_user( - obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"] - ) + api.delete_user(obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"]) super().delete_model(request, obj) except Exception as e: logger.exception(e) @@ -516,9 +489,7 @@ def privileges(self, request, pk): for role in user.user_roles.all(): ba_roles[role.business_area.slug].append(role.role) - for role in user.user_roles.values_list( - "business_area__slug", flat=True - ).distinct("business_area"): + for role in user.user_roles.values_list("business_area__slug", flat=True).distinct("business_area"): ba_perms[role].extend(user.permissions_in_business_area(role)) context["business_ares_permissions"] = dict(ba_perms) @@ -550,31 +521,21 @@ def add_business_area_role(self, request, queryset): for role in roles: if crud == "ADD": try: - IncompatibleRoles.objects.validate_user_role( - u, ba, role - ) - ur, is_new = u.user_roles.get_or_create( - business_area=ba, role=role - ) + IncompatibleRoles.objects.validate_user_role(u, ba, role) + ur, is_new = u.user_roles.get_or_create(business_area=ba, role=role) if is_new: added += 1 self.log_addition(request, ur, "Role added") except ValidationError as e: self.message_user(request, str(e), messages.ERROR) elif crud == "REMOVE": - to_delete = u.user_roles.filter( - business_area=ba, role=role - ).first() + to_delete = u.user_roles.filter(business_area=ba, role=role).first() if to_delete: removed += 1 - self.log_deletion( - request, to_delete, str(to_delete) - ) + self.log_deletion(request, to_delete, str(to_delete)) to_delete.delete() else: - raise ValueError( - "Bug found. {} not valid operation for add/rem role" - ) + raise ValueError("Bug found. {} not valid operation for add/rem role") if removed: msg = f"{removed} roles removed from {users} users" elif added: @@ -587,9 +548,7 @@ def add_business_area_role(self, request, queryset): else: ctx = self.get_common_context(request, title="Add Role", selection=queryset) ctx["form"] = AddRoleForm() - return render( - request, "admin/account/user/business_area_role.html", context=ctx - ) + return render(request, "admin/account/user/business_area_role.html", context=ctx) add_business_area_role.short_description = "Add/Remove Business Area roles" @@ -624,9 +583,7 @@ def _grant_kobo_accesss_to_user(self, user, notify=True, sync=True): if res.status_code == 201 and notify: send_mail( "Kobo credentials", - KOBO_ACCESS_EMAIL.format( - email=user.email, password=password, kobo_url=settings.KOBO_KF_URL - ), + KOBO_ACCESS_EMAIL.format(email=user.email, password=password, kobo_url=settings.KOBO_KF_URL), settings.DEFAULT_FROM_EMAIL, [user.email], ) @@ -639,9 +596,7 @@ def create_kobo_user_qs(self, request, queryset): self._grant_kobo_accesss_to_user(request, user) except Exception as e: logger.exception(e) - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) self.message_user( request, f"User successfully `{user.username}` created on Kobo", @@ -655,14 +610,10 @@ def create_kobo_user_qs(self, request, queryset): def create_kobo_user(self, request, pk): try: self._grant_kobo_accesss_to_user(self.get_queryset(request).get(pk=pk)) - self.message_user( - request, f"Granted access to {settings.KOBO_KF_URL}", messages.SUCCESS - ) + self.message_user(request, f"Granted access to {settings.KOBO_KF_URL}", messages.SUCCESS) except Exception as e: logger.exception(e) - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) @button( permission="account.can_create_kobo_user", @@ -672,9 +623,7 @@ def remove_kobo_access(self, request, pk): try: obj = self.get_object(request, pk) api = DjAdminManager() - api.delete_user( - obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"] - ) + api.delete_user(obj.custom_fields["kobo_username"], obj.custom_fields["kobo_pk"]) obj.custom_fields["kobo_username"] = None obj.custom_fields["kobo_pk"] = None obj.save() @@ -685,9 +634,7 @@ def remove_kobo_access(self, request, pk): ) except Exception as e: logger.exception(e) - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) @button(label="Import CSV", permission="account.can_upload_to_kobo") def import_csv(self, request): @@ -709,10 +656,7 @@ def import_csv(self, request): role = form.cleaned_data["role"] if csv_file.multiple_chunks(): - raise Exception( - "Uploaded file is too big (%.2f MB)" - % (csv_file.size(1000 * 1000)) - ) + raise Exception("Uploaded file is too big (%.2f MB)" % (csv_file.size(1000 * 1000))) data_set = csv_file.read().decode("utf-8-sig").splitlines() reader = csv.DictReader( data_set, @@ -730,9 +674,7 @@ def import_csv(self, request): try: email = row["email"].strip() except Exception as e: - raise Exception( - f"{e.__class__.__name__}: {e} on `{row}`" - ) + raise Exception(f"{e.__class__.__name__}: {e} on `{row}`") user_info = { "email": email, @@ -743,36 +685,21 @@ def import_csv(self, request): if "username" in row: username = row["username"].strip() else: - username = ( - row["email"] - .replace("@", "_") - .replace(".", "_") - .lower() - ) + username = row["email"].replace("@", "_").replace(".", "_").lower() u, isnew = account_models.User.objects.get_or_create( email=email, partner=partner, defaults={"username": username}, ) if isnew: - ur = u.user_roles.create( - business_area=business_area, role=role - ) - self.log_addition( - request, u, "User imported by CSV" - ) + ur = u.user_roles.create(business_area=business_area, role=role) + self.log_addition(request, u, "User imported by CSV") self.log_addition(request, ur, "User Role added") else: # check role validity try: - IncompatibleRoles.objects.validate_user_role( - u, business_area, role - ) - u.user_roles.get_or_create( - business_area=business_area, role=role - ) - self.log_addition( - request, ur, "User Role added" - ) + IncompatibleRoles.objects.validate_user_role(u, business_area, role) + u.user_roles.get_or_create(business_area=business_area, role=role) + self.log_addition(request, ur, "User Role added") except ValidationError as e: self.message_user( request, @@ -790,13 +717,9 @@ def import_csv(self, request): logger.exception(e) context["form"] = form context["errors"] = [str(e)] - self.message_user( - request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR - ) + self.message_user(request, f"{e.__class__.__name__}: {str(e)}", messages.ERROR) else: - self.message_user( - request, "Please correct errors below", messages.ERROR - ) + self.message_user(request, "Please correct errors below", messages.ERROR) context["form"] = form fs = form._fieldsets or [(None, {"fields": form.base_fields})] context["adminform"] = AdminForm(form, fieldsets=fs, prepopulated_fields={}) @@ -895,9 +818,7 @@ def sync_multi(self, request): def sync_single(self, request, pk): try: self._sync_ad_data(self.get_object(request, pk)) - self.message_user( - request, "Active Directory data successfully fetched", messages.SUCCESS - ) + self.message_user(request, "Active Directory data successfully fetched", messages.SUCCESS) except Exception as e: logger.exception(e) self.message_user(request, str(e), messages.ERROR) @@ -926,11 +847,7 @@ def load_ad_users(self, request): business_area = form.cleaned_data["business_area"] users_to_bulk_create = [] users_role_to_bulk_create = [] - existing = set( - account_models.User.objects.filter(email__in=emails).values_list( - "email", flat=True - ) - ) + existing = set(account_models.User.objects.filter(email__in=emails).values_list("email", flat=True)) results = self.Results([], [], [], []) try: ms_graph = MicrosoftGraphAPI() @@ -942,9 +859,7 @@ def load_ad_users(self, request): results.updated.append(user) else: user_data = ms_graph.get_user_data(email=email) - user_args = build_arg_dict_from_dict( - user_data, DJANGO_USER_MAP - ) + user_args = build_arg_dict_from_dict(user_data, DJANGO_USER_MAP) user = account_models.User(**user_args) if user.first_name is None: user.first_name = "" @@ -955,12 +870,8 @@ def load_ad_users(self, request): user.job_title = job_title user.set_unusable_password() users_to_bulk_create.append(user) - global_business_area = BusinessArea.objects.filter( - slug="global" - ).first() - basic_role = account_models.Role.objects.filter( - name="Basic User" - ).first() + global_business_area = BusinessArea.objects.filter(slug="global").first() + basic_role = account_models.Role.objects.filter(name="Basic User").first() if global_business_area and basic_role: users_role_to_bulk_create.append( account_models.UserRole( @@ -972,9 +883,7 @@ def load_ad_users(self, request): results.created.append(user) users_role_to_bulk_create.append( - account_models.UserRole( - role=role, business_area=business_area, user=user - ) + account_models.UserRole(role=role, business_area=business_area, user=user) ) except HTTPError as e: if e.response.status_code != 404: @@ -983,9 +892,7 @@ def load_ad_users(self, request): except Http404: results.missing.append(email) account_models.User.objects.bulk_create(users_to_bulk_create) - account_models.UserRole.objects.bulk_create( - users_role_to_bulk_create, ignore_conflicts=True - ) + account_models.UserRole.objects.bulk_create(users_role_to_bulk_create, ignore_conflicts=True) ctx["results"] = results return TemplateResponse(request, "admin/load_users.html", ctx) except Exception as e: @@ -1124,9 +1031,7 @@ class IncompatibleRolesAdmin(HOPEModelAdminBase): class GroupResource(resources.ModelResource): - permissions = fields.Field( - widget=ManyToManyWidget(Permission, field="codename"), attribute="permissions" - ) + permissions = fields.Field(widget=ManyToManyWidget(Permission, field="codename"), attribute="permissions") class Meta: model = Group @@ -1146,11 +1051,7 @@ def import_fixture(self, request): return _import_fixture(self, request) def _perms(self, request, object_id) -> set: - return set( - self.get_object(request, object_id).permissions.values_list( - "codename", flat=True - ) - ) + return set(self.get_object(request, object_id).permissions.values_list("codename", flat=True)) @button() def users(self, request, pk): diff --git a/backend/hct_mis_api/apps/activity_log/models.py b/backend/hct_mis_api/apps/activity_log/models.py index 9b7dfa8db2..10cbd1e8c1 100644 --- a/backend/hct_mis_api/apps/activity_log/models.py +++ b/backend/hct_mis_api/apps/activity_log/models.py @@ -11,9 +11,7 @@ from hct_mis_api.apps.core.utils import nested_getattr -def log_create( - mapping, business_area_field, user=None, old_object=None, new_object=None -): +def log_create(mapping, business_area_field, user=None, old_object=None, new_object=None): if new_object: instance = new_object else: @@ -80,13 +78,9 @@ class LogEntry(models.Model): related_name="logs", verbose_name=_("actor"), ) - business_area = models.ForeignKey( - "core.BusinessArea", on_delete=models.SET_NULL, null=True - ) + business_area = models.ForeignKey("core.BusinessArea", on_delete=models.SET_NULL, null=True) - timestamp = models.DateTimeField( - auto_now_add=True, verbose_name=_("timestamp"), db_index=True - ) + timestamp = models.DateTimeField(auto_now_add=True, verbose_name=_("timestamp"), db_index=True) class Meta: get_latest_by = "timestamp" diff --git a/backend/hct_mis_api/apps/core/base_test_case.py b/backend/hct_mis_api/apps/core/base_test_case.py index fc9ddd84ec..1664374409 100644 --- a/backend/hct_mis_api/apps/core/base_test_case.py +++ b/backend/hct_mis_api/apps/core/base_test_case.py @@ -50,15 +50,11 @@ def generate_context(self, user=None, files=None): return context_value def generate_document_types_for_all_countries(self): - identification_type_choice = tuple( - (doc_type, label) for doc_type, label in IDENTIFICATION_TYPE_CHOICE - ) + identification_type_choice = tuple((doc_type, label) for doc_type, label in IDENTIFICATION_TYPE_CHOICE) document_types = [] for alpha2 in COUNTRIES: for doc_type, label in identification_type_choice: - document_types.append( - DocumentType(country=alpha2, label=label, type=doc_type) - ) + document_types.append(DocumentType(country=alpha2, label=label, type=doc_type)) DocumentType.objects.bulk_create(document_types, ignore_conflicts=True) @@ -78,9 +74,7 @@ def create_user_role_with_permissions(user, permissions, business_area): role, created = Role.objects.update_or_create( name="Role with Permissions", defaults={"permissions": permission_list} ) - user_role, _ = UserRole.objects.get_or_create( - user=user, role=role, business_area=business_area - ) + user_role, _ = UserRole.objects.get_or_create(user=user, role=role, business_area=business_area) return user_role diff --git a/backend/hct_mis_api/apps/core/countries.py b/backend/hct_mis_api/apps/core/countries.py index e105e89736..2c824dec31 100644 --- a/backend/hct_mis_api/apps/core/countries.py +++ b/backend/hct_mis_api/apps/core/countries.py @@ -10,7 +10,6 @@ class Countries: - @classmethod @lru_cache(maxsize=None) def get_countries(cls): diff --git a/backend/hct_mis_api/apps/core/datamart/api.py b/backend/hct_mis_api/apps/core/datamart/api.py index d9d74da0fc..5de8b6bcef 100644 --- a/backend/hct_mis_api/apps/core/datamart/api.py +++ b/backend/hct_mis_api/apps/core/datamart/api.py @@ -14,9 +14,7 @@ class DatamartAPI: PAGE_SIZE = 100 - LOCATIONS_ENDPOINT = ( - "/api/latest/datamart/locations/?-serializer=geo&format=json&ordering=id" - ) + LOCATIONS_ENDPOINT = "/api/latest/datamart/locations/?-serializer=geo&format=json&ordering=id" def __init__(self): self._client = requests.session() @@ -45,9 +43,7 @@ def get_location(self, id): url = f"/api/latest/datamart/locations/{id}/" return self._handle_get_request(url) - def get_locations( - self, *, country=None, gis=False, max_records=None, page_size=None - ): + def get_locations(self, *, country=None, gis=False, max_records=None, page_size=None): url = f"/api/latest/datamart/locations/?&ordering=id,page_size={page_size or self.PAGE_SIZE}" if country: url = f"{url}&country_name={country}" @@ -81,9 +77,7 @@ def _features_to_multi_polygon(self, geometry): if geometry_type != "MultiPolygon": logger.error("Geometry type should be MultiPolygon") raise ValidationError("Geometry type should be MultiPolygon") - return MultiPolygon( - [Polygon(polygon) for polygon in geometry.get("coordinates")[0]] - ) + return MultiPolygon([Polygon(polygon) for polygon in geometry.get("coordinates")[0]]) def generate_admin_areas(self, locations, business_area): self.generate_admin_areas_old_models(locations, business_area) @@ -117,9 +111,7 @@ def generate_admin_areas_old_models(self, locations, business_area): admin_area.title = properties.get("name") admin_area.admin_area_level = admin_area_level admin_area.p_code = properties.get("p_code") - admin_area.point = Point( - properties.get("longitude"), properties.get("latitude") - ) + admin_area.point = Point(properties.get("longitude"), properties.get("latitude")) admin_area.geom = self._features_to_multi_polygon(location.get("geometry")) admin_areas_to_create.append(admin_area) admin_areas_external_id_dict[external_id] = admin_area @@ -151,9 +143,7 @@ def generate_admin_areas_new_models(self, locations, business_area): external_id = location.get("id") admin_area_level = admin_area_level_dict.get(gateway) if admin_area_level is None: - admin_area_level = AreaType.objects.filter( - area_level=gateway, country__name=business_area.name - ).first() + admin_area_level = AreaType.objects.filter(area_level=gateway, country__name=business_area.name).first() if admin_area_level is None: country = Country.objects.get(name=business_area.name) admin_area_level = AreaType( @@ -163,17 +153,13 @@ def generate_admin_areas_new_models(self, locations, business_area): ) admin_area_level_dict[gateway] = admin_area_level - admin_area = Area.objects.filter( - area_type=admin_area_level, name=properties.get("name") - ).first() + admin_area = Area.objects.filter(area_type=admin_area_level, name=properties.get("name")).first() if admin_area is None: admin_area = Area() admin_area.name = properties.get("name") admin_area.area_type = admin_area_level admin_area.p_code = properties.get("p_code") - admin_area.point = Point( - properties.get("longitude"), properties.get("latitude") - ) + admin_area.point = Point(properties.get("longitude"), properties.get("latitude")) admin_area.geom = self._features_to_multi_polygon(location.get("geometry")) admin_areas_to_create.append(admin_area) admin_areas_external_id_dict[external_id] = admin_area diff --git a/backend/hct_mis_api/apps/core/exchange_rates/models.py b/backend/hct_mis_api/apps/core/exchange_rates/models.py index be74f4f519..3d18f4abdd 100644 --- a/backend/hct_mis_api/apps/core/exchange_rates/models.py +++ b/backend/hct_mis_api/apps/core/exchange_rates/models.py @@ -7,9 +7,7 @@ class HistoryExchangeRate: - def __init__( - self, VALID_FROM: str, VALID_TO: str, PAST_XRATE: str, PAST_RATIO: str - ): + def __init__(self, VALID_FROM: str, VALID_TO: str, PAST_XRATE: str, PAST_RATIO: str): self.valid_from = parse(VALID_FROM) self.valid_to = parse(VALID_TO) self.past_xrate = float(PAST_XRATE) @@ -41,9 +39,7 @@ def __init__( self.currency_name = CURRENCY_NAME self.x_rate = float(X_RATE) self.valid_from = parse(VALID_FROM) - self.valid_to = ( - datetime(9999, 12, 31) if VALID_TO == "31-DEC-99" else parse(VALID_TO) - ) + self.valid_to = datetime(9999, 12, 31) if VALID_TO == "31-DEC-99" else parse(VALID_TO) self.ratio = float(RATIO) self.no_of_decimal = int(NO_OF_DECIMAL) @@ -53,16 +49,12 @@ def __init__( else: past_xrates.reverse() - self.historical_exchange_rates = [ - HistoryExchangeRate(**past_xrate) for past_xrate in past_xrates - ] + self.historical_exchange_rates = [HistoryExchangeRate(**past_xrate) for past_xrate in past_xrates] def __repr__(self): return f"SingleExchangeRate(currency_code: {self.currency_code}, ratio: {self.ratio}, x_rate: {self.x_rate})" - def get_exchange_rate_by_dispersion_date( - self, dispersion_date: datetime - ) -> Optional[float]: + def get_exchange_rate_by_dispersion_date(self, dispersion_date: datetime) -> Optional[float]: today = datetime.now() dispersion_date_is_not_provided = dispersion_date is None @@ -70,21 +62,14 @@ def get_exchange_rate_by_dispersion_date( return self.x_rate * self.ratio dispersion_date_is_in_current_date_range = ( - self.valid_from - <= dispersion_date - <= (today if self.valid_to is None else self.valid_to) + self.valid_from <= dispersion_date <= (today if self.valid_to is None else self.valid_to) ) if dispersion_date_is_in_current_date_range: return self.x_rate * self.ratio for historical_exchange_rate in self.historical_exchange_rates: - if historical_exchange_rate.is_valid_for_provided_dispersion_date( - dispersion_date - ): - return ( - historical_exchange_rate.past_xrate - * historical_exchange_rate.past_ratio - ) + if historical_exchange_rate.is_valid_for_provided_dispersion_date(dispersion_date): + return historical_exchange_rate.past_xrate * historical_exchange_rate.past_ratio return None diff --git a/backend/hct_mis_api/apps/core/flex_fields_importer.py b/backend/hct_mis_api/apps/core/flex_fields_importer.py index 638eaf0ec7..a643b9f60c 100644 --- a/backend/hct_mis_api/apps/core/flex_fields_importer.py +++ b/backend/hct_mis_api/apps/core/flex_fields_importer.py @@ -45,17 +45,11 @@ class FlexibleAttributeImporter: } # Constants for xls import - ATTRIBUTE_MODEL_FIELDS = [ - field.name for field in FlexibleAttribute._meta.get_fields() - ] + ATTRIBUTE_MODEL_FIELDS = [field.name for field in FlexibleAttribute._meta.get_fields()] - GROUP_MODEL_FIELDS = [ - field.name for field in FlexibleAttributeGroup._meta.get_fields() - ] + GROUP_MODEL_FIELDS = [field.name for field in FlexibleAttributeGroup._meta.get_fields()] - CHOICE_MODEL_FIELDS = [ - field.name for field in FlexibleAttributeChoice._meta.get_fields() - ] + CHOICE_MODEL_FIELDS = [field.name for field in FlexibleAttributeChoice._meta.get_fields()] CORE_FIELD_SUFFIXES = ( "_h_c", @@ -85,9 +79,7 @@ def _get_model_fields(self, object_type_to_add): "choice": self.CHOICE_MODEL_FIELDS, }.get(object_type_to_add) - def _assign_field_values( - self, value, header_name, object_type_to_add, row, row_number - ): + def _assign_field_values(self, value, header_name, object_type_to_add, row, row_number): model_fields = self._get_model_fields(object_type_to_add) if any(header_name.startswith(i) for i in self.JSON_MODEL_FIELDS): @@ -109,27 +101,16 @@ def _assign_field_values( field_suffix = row[1].value[-4:] is_empty_and_not_index_field = not value and not is_index_field is_core_or_flex_field = ( - field_suffix in self.CORE_FIELD_SUFFIXES - or field_suffix in self.FLEX_FIELD_SUFFIXES + field_suffix in self.CORE_FIELD_SUFFIXES or field_suffix in self.FLEX_FIELD_SUFFIXES ) if is_empty_and_not_index_field and is_core_or_flex_field: - logger.error( - f"Survey Sheet: Row {row_number + 1}: English label cannot be empty" - ) - raise ValidationError( - f"Survey Sheet: Row {row_number + 1}: English label cannot be empty" - ) + logger.error(f"Survey Sheet: Row {row_number + 1}: English label cannot be empty") + raise ValidationError(f"Survey Sheet: Row {row_number + 1}: English label cannot be empty") if object_type_to_add == "choice" and not value: - logger.error( - f"Choices Sheet: Row {row_number + 1}: English label cannot be empty" - ) - raise ValidationError( - f"Choices Sheet: Row {row_number + 1}: English label cannot be empty" - ) + logger.error(f"Choices Sheet: Row {row_number + 1}: English label cannot be empty") + raise ValidationError(f"Choices Sheet: Row {row_number + 1}: English label cannot be empty") - self.json_fields_to_create[label].update( - {language: cleared_value if value else ""} - ) + self.json_fields_to_create[label].update({language: cleared_value if value else ""}) return if header_name == "required": @@ -142,19 +123,13 @@ def _assign_field_values( if header_name in model_fields: if header_name == "type": if not value: - logger.error( - f"Survey Sheet: Row {row_number + 1}: Type is required" - ) - raise ValidationError( - f"Survey Sheet: Row {row_number + 1}: Type is required" - ) + logger.error(f"Survey Sheet: Row {row_number + 1}: Type is required") + raise ValidationError(f"Survey Sheet: Row {row_number + 1}: Type is required") choice_key = value.split(" ")[0] if choice_key == "calculate": self.object_fields_to_create["type"] = "calculate" elif choice_key in self.TYPE_CHOICE_MAP.keys(): - self.object_fields_to_create["type"] = self.TYPE_CHOICE_MAP.get( - choice_key - ) + self.object_fields_to_create["type"] = self.TYPE_CHOICE_MAP.get(choice_key) else: is_attribute_name_empty = header_name == "name" and value in (None, "") is_choice_list_name_empty = ( @@ -162,29 +137,18 @@ def _assign_field_values( ) and not value if is_attribute_name_empty: - logger.error( - f"Survey Sheet: Row {row_number + 1}: Name is required" - ) - raise ValidationError( - f"Survey Sheet: Row {row_number + 1}: Name is required" - ) + logger.error(f"Survey Sheet: Row {row_number + 1}: Name is required") + raise ValidationError(f"Survey Sheet: Row {row_number + 1}: Name is required") if is_choice_list_name_empty: - logger.error( - f"Survey Sheet: Row {row_number + 1}: List Name is required" - ) - raise ValidationError( - f"Survey Sheet: Row {row_number + 1}: List Name is required" - ) + logger.error(f"Survey Sheet: Row {row_number + 1}: List Name is required") + raise ValidationError(f"Survey Sheet: Row {row_number + 1}: List Name is required") self.object_fields_to_create[header_name] = value if value else "" is_valid_calculate_field_and_header_is_calculate_field_type = ( object_type_to_add == "attribute" and header_name == "calculated_result_field_type" and row[0].value == "calculate" - and any( - self.object_fields_to_create["name"].endswith(i) - for i in self.FLEX_FIELD_SUFFIXES - ) + and any(self.object_fields_to_create["name"].endswith(i) for i in self.FLEX_FIELD_SUFFIXES) ) if is_valid_calculate_field_and_header_is_calculate_field_type: choice_key = value.strip() if value and isinstance(value, str) else None @@ -204,14 +168,12 @@ def _assign_field_values( logger.error(validation_error_message) raise ValidationError(validation_error_message) else: - self.object_fields_to_create["type"] = self.CALCULATE_TYPE_CHOICE_MAP[ - choice_key - ] + self.object_fields_to_create["type"] = self.CALCULATE_TYPE_CHOICE_MAP[choice_key] def _can_add_row(self, row): - is_core_field = any( - row[1].value.endswith(i) for i in self.CORE_FIELD_SUFFIXES - ) and not row[0].value.endswith("_group") + is_core_field = any(row[1].value.endswith(i) for i in self.CORE_FIELD_SUFFIXES) and not row[0].value.endswith( + "_group" + ) is_in_excluded = row[0].value in self.EXCLUDED_MODEL_FIELDS @@ -300,9 +262,7 @@ def _handle_choices(self, sheets): to_create_choices, ) - choices_to_delete = set(choices_from_db).difference( - set(created_choices + updated_choices) - ) + choices_to_delete = set(choices_from_db).difference(set(created_choices + updated_choices)) for choice in choices_to_delete: choice.delete() @@ -325,11 +285,7 @@ def _handle_groups_and_fields(self, sheet): if all([cell.ctype == xlrd.XL_CELL_EMPTY for cell in row]): continue - object_type_to_add = ( - "group" - if row[0].value in ("begin_group", "begin_repeat") - else "attribute" - ) + object_type_to_add = "group" if row[0].value in ("begin_group", "begin_repeat") else "attribute" repeatable = True if row[0].value == "begin_repeat" else False self._reset_model_fields_variables() @@ -347,10 +303,7 @@ def _handle_groups_and_fields(self, sheet): row_number, ) - is_flex_field = any( - self.object_fields_to_create["name"].endswith(i) - for i in self.FLEX_FIELD_SUFFIXES - ) + is_flex_field = any(self.object_fields_to_create["name"].endswith(i) for i in self.FLEX_FIELD_SUFFIXES) if object_type_to_add == "group": obj = FlexibleAttributeGroup.all_objects.filter( @@ -396,13 +349,9 @@ def _handle_groups_and_fields(self, sheet): parent = None if obj: - if ( - obj.type != self.object_fields_to_create["type"] - and not obj.is_removed - ): + if obj.type != self.object_fields_to_create["type"] and not obj.is_removed: logger.error( - f"Survey Sheet: Row {row_number + 1}: Type of the " - f"attribute cannot be changed!" + f"Survey Sheet: Row {row_number + 1}: Type of the " f"attribute cannot be changed!" ) raise ValidationError( f"Survey Sheet: Row {row_number + 1}: Type of the attribute cannot be changed!" diff --git a/backend/hct_mis_api/apps/core/kobo/common.py b/backend/hct_mis_api/apps/core/kobo/common.py index 5882f118c3..fd3521bda2 100644 --- a/backend/hct_mis_api/apps/core/kobo/common.py +++ b/backend/hct_mis_api/apps/core/kobo/common.py @@ -62,11 +62,7 @@ def get_field_name(field_name: str) -> str: def reduce_assets_list(assets: list, deployed: bool = True, *args, **kwarg) -> list: if deployed: - return [ - reduce_asset(asset) - for asset in assets - if asset["has_deployment"] and asset["deployment__active"] - ] + return [reduce_asset(asset) for asset in assets if asset["has_deployment"] and asset["deployment__active"]] return [reduce_asset(asset) for asset in assets] @@ -88,9 +84,7 @@ def count_population(results: list, business_area: BusinessArea) -> tuple[int, i if business_area.get_sys_option("ignore_amended_kobo_submissions"): submission_meta_data["amended"] = False - submission_exists = KoboImportedSubmission.objects.filter( - **submission_meta_data - ).exists() + submission_exists = KoboImportedSubmission.objects.filter(**submission_meta_data).exists() if submission_exists is False: total_households_count += 1 for individual_data in result[KOBO_FORM_INDIVIDUALS_COLUMN_NAME]: @@ -112,10 +106,7 @@ def count_population(results: list, business_area: BusinessArea) -> tuple[int, i seen_hash_keys.append(hash_key) total_individuals_count += 1 if ( - reduced_submission.get( - "relationship_i_c", RELATIONSHIP_UNKNOWN - ).upper() - == NON_BENEFICIARY + reduced_submission.get("relationship_i_c", RELATIONSHIP_UNKNOWN).upper() == NON_BENEFICIARY and seen_hash_keys.count(hash_key) > 1 ): total_individuals_count -= 1 @@ -126,8 +117,6 @@ def count_population(results: list, business_area: BusinessArea) -> tuple[int, i def filter_by_owner(data, business_area): kobo_username = business_area.kobo_username if isinstance(data, list): - return [ - element for element in data if element["owner__username"] == kobo_username - ] + return [element for element in data if element["owner__username"] == kobo_username] if data["owner__username"] == kobo_username: return data diff --git a/backend/hct_mis_api/apps/core/management/commands/upgrade.py b/backend/hct_mis_api/apps/core/management/commands/upgrade.py index 48ba6f8e9a..08488ef679 100644 --- a/backend/hct_mis_api/apps/core/management/commands/upgrade.py +++ b/backend/hct_mis_api/apps/core/management/commands/upgrade.py @@ -12,18 +12,24 @@ def handle(self, *args, **options): from hct_mis_api.apps.power_query.models import Formatter - Formatter.objects.get_or_create(name='Dataset To HTML', - defaults={"code": ''' + Formatter.objects.get_or_create( + name="Dataset To HTML", + defaults={ + "code": """ <h1>{{dataset.query.name}}</h1> <table> <tr>{% for fname in dataset.data.headers %}<th>{{ fname }}</th>{% endfor %}</tr> {% for row in dataset.data %}<tr>{% for col in row %}<td>{{ col }}</td>{% endfor %}</tr> {% endfor %} </table> -'''}) +""" + }, + ) - Formatter.objects.get_or_create(name='Queryset To HTML', - defaults={"code": ''' + Formatter.objects.get_or_create( + name="Queryset To HTML", + defaults={ + "code": """ <h1>{{dataset.query.name}}</h1> <table> <tr><th>id</th><th>str</th></tr> @@ -33,9 +39,9 @@ def handle(self, *args, **options): </tr> {% endfor %} </table> -''', - 'content_type': 'html'}) +""", + "content_type": "html", + }, + ) - Formatter.objects.get_or_create(name='Dataset To XLS', - defaults={"code": '', - 'content_type': 'xls'}) + Formatter.objects.get_or_create(name="Dataset To XLS", defaults={"code": "", "content_type": "xls"}) diff --git a/backend/hct_mis_api/apps/core/mis_test_runner.py b/backend/hct_mis_api/apps/core/mis_test_runner.py index 32527210f4..6d0536d70f 100644 --- a/backend/hct_mis_api/apps/core/mis_test_runner.py +++ b/backend/hct_mis_api/apps/core/mis_test_runner.py @@ -7,9 +7,7 @@ from snapshottest.django import TestRunner -def create_test_db_and_schemas( - creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False -): +def create_test_db_and_schemas(creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False): """ Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created. @@ -61,9 +59,7 @@ def create_test_db_and_schemas( # who are testing on databases without transactions or who are using # a TransactionTestCase still get a clean database on every test run. if serialize: - creation.connection._test_serialized_contents = ( - creation.serialize_db_to_string() - ) + creation.connection._test_serialized_contents = creation.serialize_db_to_string() call_command("createcachetable", database=creation.connection.alias) @@ -73,9 +69,7 @@ def create_test_db_and_schemas( return test_database_name -def create_fake_test_db( - creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False -): +def create_fake_test_db(creation, verbosity=1, autoclobber=False, serialize=True, keepdb=False): """ Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created. @@ -123,15 +117,7 @@ def create_fake_test_db( return test_database_name -def _setup_schema_database( - verbosity, - interactive, - keepdb=False, - debug_sql=False, - parallel=0, - alias=None, - **kwargs -): +def _setup_schema_database(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, alias=None, **kwargs): """Create the test databases.""" connection = connections[alias] @@ -214,9 +200,7 @@ def setup_databases(self, **kwargs): read_only = connection.settings_dict.get("TEST", {}).get("READ_ONLY", False) if read_only: if self.verbosity >= 1: - connection.creation.log( - "Skipping ReadOnly test database for alias '%s'..." % alias - ) + connection.creation.log("Skipping ReadOnly test database for alias '%s'..." % alias) aliases = kwargs.get("aliases") aliases.discard(alias) continue @@ -247,9 +231,7 @@ def setup_databases(self, **kwargs): verbosity=self.verbosity, autoclobber=not self.interactive, keepdb=self.keepdb, - serialize=connection.settings_dict.get("TEST", {}).get( - "SERIALIZE", True - ), + serialize=connection.settings_dict.get("TEST", {}).get("SERIALIZE", True), ) old_names.extend(super().setup_databases(**kwargs)) diff --git a/backend/hct_mis_api/apps/geo/models.py b/backend/hct_mis_api/apps/geo/models.py index 50ccc19ed5..a2af5cc4c8 100644 --- a/backend/hct_mis_api/apps/geo/models.py +++ b/backend/hct_mis_api/apps/geo/models.py @@ -94,9 +94,7 @@ class Area(MPTTModel, UpgradeModel, TimeStampedUUIDModel): on_delete=models.CASCADE, verbose_name=_("Parent"), ) - p_code = models.CharField( - max_length=32, blank=True, null=True, verbose_name="P Code" - ) + p_code = models.CharField(max_length=32, blank=True, null=True, verbose_name="P Code") area_type = models.ForeignKey(AreaType, on_delete=models.CASCADE) geom = models.MultiPolygonField(null=True, blank=True) diff --git a/backend/hct_mis_api/apps/grievance/mutations.py b/backend/hct_mis_api/apps/grievance/mutations.py index 5b9e1188ea..4930ea6c86 100644 --- a/backend/hct_mis_api/apps/grievance/mutations.py +++ b/backend/hct_mis_api/apps/grievance/mutations.py @@ -275,9 +275,7 @@ def mutate(cls, root, info, input, **kwargs): save_extra_method = save_extra_methods.get(category) grievances = [grievance_ticket] if save_extra_method: - grievances = save_extra_method( - root, info, input, grievance_ticket, extras, **kwargs - ) + grievances = save_extra_method(root, info, input, grievance_ticket, extras, **kwargs) for grievance in grievances: log_create( GrievanceTicket.ACTIVITY_LOG_MAPPING, @@ -294,14 +292,10 @@ def save_basic_data(cls, root, info, input, **kwargs): user = info.context.user assigned_to_id = decode_id_string(arg("assigned_to")) linked_tickets_encoded_ids = arg("linked_tickets", []) - linked_tickets = [ - decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids - ] + linked_tickets = [decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids] business_area_slug = arg("business_area") extras = arg("extras", {}) - remove_parsed_data_fields( - input, ("linked_tickets", "extras", "business_area", "assigned_to") - ) + remove_parsed_data_fields(input, ("linked_tickets", "extras", "business_area", "assigned_to")) admin = input.pop("admin", None) admin_object = None admin_object_new = None @@ -321,9 +315,7 @@ def save_basic_data(cls, root, info, input, **kwargs): status=GrievanceTicket.STATUS_ASSIGNED, ) GrievanceNotification.send_all_notifications( - GrievanceNotification.prepare_notification_for_ticket_creation( - grievance_ticket - ) + GrievanceNotification.prepare_notification_for_ticket_creation(grievance_ticket) ) grievance_ticket.linked_tickets.set(linked_tickets) return grievance_ticket, extras @@ -404,23 +396,15 @@ class Arguments: @transaction.atomic def mutate(cls, root, info, input, **kwargs): arg = lambda name, default=None: input.get(name, default) - old_grievance_ticket = get_object_or_404( - GrievanceTicket, id=decode_id_string(arg("ticket_id")) - ) - grievance_ticket = get_object_or_404( - GrievanceTicket, id=decode_id_string(arg("ticket_id")) - ) + old_grievance_ticket = get_object_or_404(GrievanceTicket, id=decode_id_string(arg("ticket_id"))) + grievance_ticket = get_object_or_404(GrievanceTicket, id=decode_id_string(arg("ticket_id"))) household, individual = None, None if arg("household") is not None: - household = get_object_or_404( - Household, id=decode_id_string(arg("household")) - ) + household = get_object_or_404(Household, id=decode_id_string(arg("household"))) if arg("individual") is not None: - individual = get_object_or_404( - Individual, id=decode_id_string(arg("individual")) - ) + individual = get_object_or_404(Individual, id=decode_id_string(arg("individual"))) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) business_area = grievance_ticket.business_area @@ -440,9 +424,7 @@ def mutate(cls, root, info, input, **kwargs): if grievance_ticket.issue_type: verify_required_arguments(input, "issue_type", cls.EXTRAS_OPTIONS) - grievance_ticket, extras = cls.update_basic_data( - root, info, input, grievance_ticket, **kwargs - ) + grievance_ticket, extras = cls.update_basic_data(root, info, input, grievance_ticket, **kwargs) if cls.has_creator_or_owner_permission( info, @@ -460,9 +442,7 @@ def mutate(cls, root, info, input, **kwargs): category = grievance_ticket.category update_extra_method = update_extra_methods.get(category) if update_extra_method: - grievance_ticket = update_extra_method( - root, info, input, grievance_ticket, extras, **kwargs - ) + grievance_ticket = update_extra_method(root, info, input, grievance_ticket, extras, **kwargs) update_extra_methods = { GrievanceTicket.CATEGORY_REFERRAL: update_referral_extras, @@ -471,9 +451,7 @@ def mutate(cls, root, info, input, **kwargs): } update_extra_method = update_extra_methods.get(grievance_ticket.category) if update_extra_method: - grievance_ticket = update_extra_method( - root, info, input, grievance_ticket, extras, **kwargs - ) + grievance_ticket = update_extra_method(root, info, input, grievance_ticket, extras, **kwargs) if grievance_ticket.category in [ GrievanceTicket.CATEGORY_SENSITIVE_GRIEVANCE, @@ -508,9 +486,7 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): arg = lambda name, default=None: input.get(name, default) assigned_to_id = decode_id_string(arg("assigned_to")) linked_tickets_encoded_ids = arg("linked_tickets", []) - linked_tickets = [ - decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids - ] + linked_tickets = [decode_id_string(encoded_id) for encoded_id in linked_tickets_encoded_ids] extras = arg("extras", {}) remove_parsed_data_fields(input, ("linked_tickets", "extras", "assigned_to")) assigned_to = get_object_or_404(get_user_model(), id=assigned_to_id) @@ -520,10 +496,7 @@ def update_basic_data(cls, root, info, input, grievance_ticket, **kwargs): setattr(grievance_ticket, field, value) if assigned_to != grievance_ticket.assigned_to: - if ( - grievance_ticket.status == GrievanceTicket.STATUS_NEW - and grievance_ticket.assigned_to is None - ): + if grievance_ticket.status == GrievanceTicket.STATUS_NEW and grievance_ticket.assigned_to is None: grievance_ticket.status = GrievanceTicket.STATUS_ASSIGNED grievance_ticket.assigned_to = assigned_to if grievance_ticket.status in ( @@ -687,9 +660,7 @@ class Arguments: @classmethod def get_close_function(cls, category, issue_type): - function_or_nested_issue_types = ( - cls.CATEGORY_ISSUE_TYPE_TO_CLOSE_FUNCTION_MAPPING.get(category) - ) + function_or_nested_issue_types = cls.CATEGORY_ISSUE_TYPE_TO_CLOSE_FUNCTION_MAPPING.get(category) if isinstance(function_or_nested_issue_types, dict) and issue_type: return function_or_nested_issue_types.get(issue_type) return function_or_nested_issue_types @@ -699,9 +670,7 @@ def get_close_function(cls, category, issue_type): @transaction.atomic def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): grievance_ticket_id = decode_id_string(grievance_ticket_id) - old_grievance_ticket = get_object_or_404( - GrievanceTicket, id=grievance_ticket_id - ) + old_grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) if grievance_ticket.status == status: @@ -710,13 +679,9 @@ def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): if cls.MOVE_TO_STATUS_PERMISSION_MAPPING.get(status): if cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get("feedback"): if grievance_ticket.is_feedback: - permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[ - status - ].get("feedback") + permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get("feedback") else: - permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[ - status - ].get("any") + permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get("any") else: permissions_to_use = cls.MOVE_TO_STATUS_PERMISSION_MAPPING[status].get( grievance_ticket.status @@ -739,18 +704,11 @@ def mutate(cls, root, info, grievance_ticket_id, status, **kwargs): logger.error("New status is incorrect") raise GraphQLError("New status is incorrect") if status == GrievanceTicket.STATUS_CLOSED: - close_function = cls.get_close_function( - grievance_ticket.category, grievance_ticket.issue_type - ) + close_function = cls.get_close_function(grievance_ticket.category, grievance_ticket.issue_type) close_function(grievance_ticket, info) grievance_ticket.refresh_from_db() - if ( - status == GrievanceTicket.STATUS_ASSIGNED - and not grievance_ticket.assigned_to - ): - cls.has_permission( - info, Permissions.GRIEVANCE_ASSIGN, grievance_ticket.business_area - ) + if status == GrievanceTicket.STATUS_ASSIGNED and not grievance_ticket.assigned_to: + cls.has_permission(info, Permissions.GRIEVANCE_ASSIGN, grievance_ticket.business_area) grievance_ticket.assigned_to = info.context.user grievance_ticket.status = status grievance_ticket.save() @@ -815,9 +773,7 @@ def mutate(cls, root, info, note_input, **kwargs): description = note_input["description"] created_by = info.context.user - ticket_note = TicketNote.objects.create( - ticket=grievance_ticket, description=description, created_by=created_by - ) + ticket_note = TicketNote.objects.create(ticket=grievance_ticket, description=description, created_by=created_by) notification = GrievanceNotification( grievance_ticket, GrievanceNotification.ACTION_NOTES_ADDED, @@ -879,17 +835,11 @@ def mutate( ) cls.verify_approve_data(individual_approve_data) cls.verify_approve_data(flex_fields_approve_data) - individual_approve_data = { - to_snake_case(key): value for key, value in individual_approve_data.items() - } + individual_approve_data = {to_snake_case(key): value for key, value in individual_approve_data.items()} individual_data_details = grievance_ticket.individual_data_update_ticket_details individual_data = individual_data_details.individual_data - cls.verify_approve_data_against_object_data( - individual_data, individual_approve_data - ) - cls.verify_approve_data_against_object_data( - individual_data.get("flex_fields"), flex_fields_approve_data - ) + cls.verify_approve_data_against_object_data(individual_data, individual_approve_data) + cls.verify_approve_data_against_object_data(individual_data.get("flex_fields"), flex_fields_approve_data) documents_mapping = { "documents": approved_documents_to_create, @@ -905,14 +855,12 @@ def mutate( if field_name in documents_mapping: for index, document_data in enumerate(individual_data[field_name]): approved_documents_indexes = documents_mapping.get(field_name, []) - document_data["approve_status"] = ( - index in approved_documents_indexes - ) + document_data["approve_status"] = index in approved_documents_indexes elif field_name == "flex_fields": for flex_field_name in item.keys(): - individual_data["flex_fields"][flex_field_name][ - "approve_status" - ] = flex_fields_approve_data.get(flex_field_name) + individual_data["flex_fields"][flex_field_name]["approve_status"] = flex_fields_approve_data.get( + flex_field_name + ) elif field_to_approve: individual_data[field_name]["approve_status"] = True else: @@ -964,24 +912,18 @@ def mutate( ) cls.verify_approve_data(household_approve_data) cls.verify_approve_data(flex_fields_approve_data) - household_approve_data = { - to_snake_case(key): value for key, value in household_approve_data.items() - } + household_approve_data = {to_snake_case(key): value for key, value in household_approve_data.items()} household_data_details = grievance_ticket.household_data_update_ticket_details household_data = household_data_details.household_data - cls.verify_approve_data_against_object_data( - household_data, household_approve_data - ) - cls.verify_approve_data_against_object_data( - household_data.get("flex_fields"), flex_fields_approve_data - ) + cls.verify_approve_data_against_object_data(household_data, household_approve_data) + cls.verify_approve_data_against_object_data(household_data.get("flex_fields"), flex_fields_approve_data) for field_name, item in household_data.items(): if field_name == "flex_fields": for flex_field_name in item.keys(): - household_data["flex_fields"][flex_field_name][ - "approve_status" - ] = flex_fields_approve_data.get(flex_field_name) + household_data["flex_fields"][flex_field_name]["approve_status"] = flex_fields_approve_data.get( + flex_field_name + ) elif household_approve_data.get(field_name): household_data[field_name]["approve_status"] = True else: @@ -1056,21 +998,15 @@ class Arguments: @classmethod def verify_role_choices(cls, role): if role not in [ROLE_PRIMARY, ROLE_ALTERNATE, HEAD]: - logger.error( - "Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD" - ) - raise GraphQLError( - "Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD" - ) + logger.error("Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD") + raise GraphQLError("Provided role is invalid! Please provide one of those: PRIMARY, ALTERNATE, HEAD") @classmethod def verify_if_role_exists(cls, household, current_individual, role): if role == HEAD: if household.head_of_household.id != current_individual.id: logger.error("This individual is not a head of provided household") - raise GraphQLError( - "This individual is not a head of provided household" - ) + raise GraphQLError("This individual is not a head of provided household") else: get_object_or_404( IndividualRoleInHousehold, @@ -1100,13 +1036,9 @@ def mutate( grievance_ticket = get_object_or_404(GrievanceTicket, id=grievance_ticket_id) check_concurrency_version_in_mutation(kwargs.get("version"), grievance_ticket) household = get_object_or_404(Household, id=decoded_household_id) - check_concurrency_version_in_mutation( - kwargs.get("household_version"), household - ) + check_concurrency_version_in_mutation(kwargs.get("household_version"), household) individual = get_object_or_404(Individual, id=decoded_individual_id) - check_concurrency_version_in_mutation( - kwargs.get("individual_version"), individual - ) + check_concurrency_version_in_mutation(kwargs.get("individual_version"), individual) ticket_details = grievance_ticket.ticket_details if grievance_ticket.category == GrievanceTicket.CATEGORY_NEEDS_ADJUDICATION: @@ -1169,20 +1101,14 @@ def mutate(cls, root, info, grievance_ticket_id, **kwargs): if selected_individual_id: decoded_selected_individual_id = decode_id_string(selected_individual_id) - selected_individual = get_object_or_404( - Individual, id=decoded_selected_individual_id - ) + selected_individual = get_object_or_404(Individual, id=decoded_selected_individual_id) if selected_individual not in ( ticket_details.golden_records_individual, ticket_details.possible_duplicate, ): - logger.error( - "The selected individual is not valid, must be one of those attached to the ticket" - ) - raise GraphQLError( - "The selected individual is not valid, must be one of those attached to the ticket" - ) + logger.error("The selected individual is not valid, must be one of those attached to the ticket") + raise GraphQLError("The selected individual is not valid, must be one of those attached to the ticket") ticket_details.selected_individual = selected_individual ticket_details.role_reassign_data = {} diff --git a/backend/hct_mis_api/apps/household/forms.py b/backend/hct_mis_api/apps/household/forms.py index bd058ebed8..c4c36fffa4 100644 --- a/backend/hct_mis_api/apps/household/forms.py +++ b/backend/hct_mis_api/apps/household/forms.py @@ -10,9 +10,7 @@ class UpdateByXlsxStage1Form(forms.Form): business_area = forms.ModelChoiceField(queryset=BusinessArea.objects.all()) - registration_data_import = forms.ModelChoiceField( - queryset=RegistrationDataImport.objects.all() - ) + registration_data_import = forms.ModelChoiceField(queryset=RegistrationDataImport.objects.all()) file = forms.FileField() def clean_registration_data_import(self) -> Optional[RegistrationDataImport]: @@ -34,18 +32,14 @@ def _change_rdi_has_correct_business_area(self, registration_data_import) -> Non def _retrieve_rdi_by_name(self) -> RegistrationDataImport: data = self.cleaned_data.get("registration_data_import") - registration_data_import = RegistrationDataImport.objects.filter( - name=data - ).first() + registration_data_import = RegistrationDataImport.objects.filter(name=data).first() if not registration_data_import: raise ValidationError(f"Rdi with the name {data} doesn't exist") return registration_data_import class UpdateByXlsxStage2Form(forms.Form): - xlsx_update_file = forms.ModelChoiceField( - queryset=XlsxUpdateFile.objects.all(), widget=forms.HiddenInput() - ) + xlsx_update_file = forms.ModelChoiceField(queryset=XlsxUpdateFile.objects.all(), widget=forms.HiddenInput()) def __init__(self, *args, **kwargs): self.xlsx_columns = kwargs.pop("xlsx_columns", []) diff --git a/backend/hct_mis_api/apps/mis_datahub/celery_tasks.py b/backend/hct_mis_api/apps/mis_datahub/celery_tasks.py index c45cd8c3a0..07705e10d4 100644 --- a/backend/hct_mis_api/apps/mis_datahub/celery_tasks.py +++ b/backend/hct_mis_api/apps/mis_datahub/celery_tasks.py @@ -14,6 +14,7 @@ def send_target_population_task(target_population_id): SendTPToDatahubTask, ) from hct_mis_api.apps.targeting.models import TargetPopulation + target_population = TargetPopulation.objects.select_related("program").get(id=target_population_id) return SendTPToDatahubTask().execute(target_population) except Exception as e: diff --git a/backend/hct_mis_api/apps/payment/utils.py b/backend/hct_mis_api/apps/payment/utils.py index 427f2cc62d..9d16be41e4 100644 --- a/backend/hct_mis_api/apps/payment/utils.py +++ b/backend/hct_mis_api/apps/payment/utils.py @@ -7,23 +7,14 @@ from hct_mis_api.apps.payment.models import PaymentRecord, PaymentVerification -def get_number_of_samples( - payment_records_sample_count, confidence_interval, margin_of_error -): +def get_number_of_samples(payment_records_sample_count, confidence_interval, margin_of_error): from statistics import NormalDist variable = 0.5 z_score = NormalDist().inv_cdf(confidence_interval + (1 - confidence_interval) / 2) - theoretical_sample = ( - (z_score ** 2) * variable * (1 - variable) / margin_of_error ** 2 - ) + theoretical_sample = (z_score**2) * variable * (1 - variable) / margin_of_error**2 actual_sample = ceil( - ( - payment_records_sample_count - * theoretical_sample - / (theoretical_sample + payment_records_sample_count) - ) - * 1.5 + (payment_records_sample_count * theoretical_sample / (theoretical_sample + payment_records_sample_count)) * 1.5 ) return min(actual_sample, payment_records_sample_count) @@ -59,31 +50,21 @@ def from_received_yes_no_to_status(received, received_amount, delivered_amount): def calculate_counts(cash_plan_verification): - cash_plan_verification.responded_count = ( - cash_plan_verification.payment_record_verifications.filter( - ~Q(status=PaymentVerification.STATUS_PENDING) - ).count() - ) - cash_plan_verification.received_count = ( - cash_plan_verification.payment_record_verifications.filter( - Q(status=PaymentVerification.STATUS_RECEIVED) - ).count() - ) - cash_plan_verification.not_received_count = ( - cash_plan_verification.payment_record_verifications.filter( - Q(status=PaymentVerification.STATUS_NOT_RECEIVED) - ).count() - ) - cash_plan_verification.received_with_problems_count = ( - cash_plan_verification.payment_record_verifications.filter( - Q(status=PaymentVerification.STATUS_RECEIVED_WITH_ISSUES) - ).count() - ) + cash_plan_verification.responded_count = cash_plan_verification.payment_record_verifications.filter( + ~Q(status=PaymentVerification.STATUS_PENDING) + ).count() + cash_plan_verification.received_count = cash_plan_verification.payment_record_verifications.filter( + Q(status=PaymentVerification.STATUS_RECEIVED) + ).count() + cash_plan_verification.not_received_count = cash_plan_verification.payment_record_verifications.filter( + Q(status=PaymentVerification.STATUS_NOT_RECEIVED) + ).count() + cash_plan_verification.received_with_problems_count = cash_plan_verification.payment_record_verifications.filter( + Q(status=PaymentVerification.STATUS_RECEIVED_WITH_ISSUES) + ).count() -def get_payment_records_for_dashboard( - year, business_area_slug, filters, only_with_delivered_quantity=False -): +def get_payment_records_for_dashboard(year, business_area_slug, filters, only_with_delivered_quantity=False): additional_filters = {} if only_with_delivered_quantity: additional_filters["delivered_quantity_usd__gt"] = 0 diff --git a/backend/hct_mis_api/apps/power_query/models.py b/backend/hct_mis_api/apps/power_query/models.py index 583fb10aee..e1d3580ef4 100644 --- a/backend/hct_mis_api/apps/power_query/models.py +++ b/backend/hct_mis_api/apps/power_query/models.py @@ -31,9 +31,7 @@ class Query(models.Model): name = models.CharField(max_length=255, blank=True, null=True, unique=True) description = models.TextField(blank=True, null=True) - owner = models.ForeignKey( - User, on_delete=models.CASCADE, related_name="power_queries" - ) + owner = models.ForeignKey(User, on_delete=models.CASCADE, related_name="power_queries") target = models.ForeignKey(ContentType, on_delete=models.CASCADE, default="") code = models.TextField(default="qs=conn.all()", blank=True) info = JSONField(default=dict, blank=True) @@ -48,9 +46,7 @@ class Meta: verbose_name_plural = "Power Queries" ordering = ("name",) - def save( - self, force_insert=False, force_update=False, using=None, update_fields=None - ): + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.code: self.code = "qs=conn.all().order_by('id')" self.error = None @@ -80,9 +76,7 @@ def execute(self, persist=False, query_args=None): _error = None try: locals_ = dict() - locals_["conn"] = model._default_manager.using( - settings.POWER_QUERY_DB_ALIAS - ) + locals_["conn"] = model._default_manager.using(settings.POWER_QUERY_DB_ALIAS) locals_["query"] = self locals_["query_filters"] = filters locals_["invoke"] = self._invoke @@ -129,9 +123,7 @@ def data(self): class Formatter(models.Model): name = models.CharField(max_length=255, blank=True, null=True, unique=True) - content_type = models.CharField( - max_length=5, choices=list(map(list, mimetype_map.items())) - ) + content_type = models.CharField(max_length=5, choices=list(map(list, mimetype_map.items()))) code = models.TextField(blank=True, null=True) def __str__(self): @@ -159,9 +151,7 @@ class Report(models.Model): query = models.ForeignKey(Query, on_delete=models.CASCADE) formatter = models.ForeignKey(Formatter, on_delete=models.CASCADE) refresh = models.BooleanField(default=False) - owner = models.ForeignKey( - User, blank=True, null=True, on_delete=models.CASCADE, related_name="+" - ) + owner = models.ForeignKey(User, blank=True, null=True, on_delete=models.CASCADE, related_name="+") available_to = models.ManyToManyField(User, blank=True, related_name="+") query_args = JSONField(default=dict, blank=True) diff --git a/backend/hct_mis_api/apps/registration_datahub/celery_tasks.py b/backend/hct_mis_api/apps/registration_datahub/celery_tasks.py index 3f023887fb..64c7ef522f 100644 --- a/backend/hct_mis_api/apps/registration_datahub/celery_tasks.py +++ b/backend/hct_mis_api/apps/registration_datahub/celery_tasks.py @@ -212,6 +212,7 @@ def pull_kobo_submissions_task(import_data_id): finally: logger.info("pull_kobo_submissions_task end") + @app.task def validate_xlsx_import_task(import_data_id): logger.info("validate_xlsx_import_task start") @@ -231,4 +232,4 @@ def validate_xlsx_import_task(import_data_id): ).update(status=ImportData.STATUS_ERROR, error=str(e)) raise finally: - logger.info("validate_xlsx_import_task end") \ No newline at end of file + logger.info("validate_xlsx_import_task end") diff --git a/backend/hct_mis_api/apps/registration_datahub/mutations.py b/backend/hct_mis_api/apps/registration_datahub/mutations.py index 819c2af4e7..2c6c4ef549 100644 --- a/backend/hct_mis_api/apps/registration_datahub/mutations.py +++ b/backend/hct_mis_api/apps/registration_datahub/mutations.py @@ -32,7 +32,8 @@ rdi_deduplication_task, registration_kobo_import_task, registration_xlsx_import_task, - pull_kobo_submissions_task, validate_xlsx_import_task, + pull_kobo_submissions_task, + validate_xlsx_import_task, ) from hct_mis_api.apps.registration_datahub.models import ( ImportData, diff --git a/backend/hct_mis_api/apps/registration_datahub/template_generator.py b/backend/hct_mis_api/apps/registration_datahub/template_generator.py index dd5dc7cadd..79cf925d33 100644 --- a/backend/hct_mis_api/apps/registration_datahub/template_generator.py +++ b/backend/hct_mis_api/apps/registration_datahub/template_generator.py @@ -23,9 +23,7 @@ def _create_workbook(cls) -> openpyxl.Workbook: @classmethod def _handle_choices(cls, fields: dict) -> list[list[str]]: - rows: list[list[str]] = [ - ["Field Name", "Label", "Value to be used in template"] - ] + rows: list[list[str]] = [["Field Name", "Label", "Value to be used in template"]] for field_name, field_value in fields.items(): is_admin_level = field_name in ("admin1_h_c", "admin2_h_c") diff --git a/backend/hct_mis_api/apps/registration_datahub/tests/test_kobo_validators_methods.py b/backend/hct_mis_api/apps/registration_datahub/tests/test_kobo_validators_methods.py index 62c653f9ff..25197e7fec 100644 --- a/backend/hct_mis_api/apps/registration_datahub/tests/test_kobo_validators_methods.py +++ b/backend/hct_mis_api/apps/registration_datahub/tests/test_kobo_validators_methods.py @@ -617,6 +617,3 @@ def test_validate_everything(self): {"header": "size_h_c", "message": "Missing household required field size_h_c"}, ] self.assertEqual(result, expected) - - - diff --git a/backend/hct_mis_api/apps/reporting/models.py b/backend/hct_mis_api/apps/reporting/models.py index 949ddbba34..ce95febcc7 100644 --- a/backend/hct_mis_api/apps/reporting/models.py +++ b/backend/hct_mis_api/apps/reporting/models.py @@ -36,13 +36,9 @@ class Report(TimeStampedUUIDModel): (INDIVIDUALS_AND_PAYMENT, _("Individuals & Payment")), ) - business_area = models.ForeignKey( - "core.BusinessArea", related_name="reports", on_delete=models.CASCADE - ) + business_area = models.ForeignKey("core.BusinessArea", related_name="reports", on_delete=models.CASCADE) file = models.FileField(blank=True, null=True) - created_by = models.ForeignKey( - "account.User", related_name="reports", on_delete=models.CASCADE - ) + created_by = models.ForeignKey("account.User", related_name="reports", on_delete=models.CASCADE) status = models.IntegerField(choices=STATUSES, default=IN_PROGRESS) report_type = models.IntegerField(choices=REPORT_TYPES) date_from = models.DateField() @@ -56,12 +52,8 @@ class Report(TimeStampedUUIDModel): null=True, related_name="reports", ) - admin_area = models.ManyToManyField( - "core.AdminArea", blank=True, related_name="reports" - ) - admin_area_new = models.ManyToManyField( - "geo.Area", blank=True, related_name="reports" - ) + admin_area = models.ManyToManyField("core.AdminArea", blank=True, related_name="reports") + admin_area_new = models.ManyToManyField("geo.Area", blank=True, related_name="reports") def __str__(self): return f"[{self.report_type}] Report for [{self.business_area}]" @@ -99,17 +91,11 @@ class DashboardReport(TimeStampedUUIDModel): (PAYMENT_VERIFICATION, _("Payment verification")), ) - business_area = models.ForeignKey( - "core.BusinessArea", related_name="dashboard_reports", on_delete=models.CASCADE - ) + business_area = models.ForeignKey("core.BusinessArea", related_name="dashboard_reports", on_delete=models.CASCADE) file = models.FileField(blank=True, null=True) - created_by = models.ForeignKey( - "account.User", related_name="dashboard_reports", on_delete=models.CASCADE - ) + created_by = models.ForeignKey("account.User", related_name="dashboard_reports", on_delete=models.CASCADE) status = models.PositiveSmallIntegerField(choices=STATUSES, default=IN_PROGRESS) - report_type = ChoiceArrayField( - models.CharField(choices=REPORT_TYPES, max_length=255) - ) + report_type = ChoiceArrayField(models.CharField(choices=REPORT_TYPES, max_length=255)) # filters year = models.PositiveSmallIntegerField(default=datetime.now().year) diff --git a/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py b/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py index 68d3a1fe0e..42c2b488a7 100644 --- a/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py +++ b/backend/hct_mis_api/apps/sanction_list/tasks/load_xml.py @@ -28,9 +28,7 @@ class LoadSanctionListXMLTask: - SANCTION_LIST_XML_URL = ( - "https://scsanctions.un.org/resources/xml/en/consolidated.xml" - ) + SANCTION_LIST_XML_URL = "https://scsanctions.un.org/resources/xml/en/consolidated.xml" INDIVIDUAL_TAG_PATH = "INDIVIDUALS/INDIVIDUAL" @@ -69,16 +67,11 @@ def _get_text_from_path(individual_tag: ET.Element, path: str) -> str: return tag.text @staticmethod - def _get_designation( - individual_tag: ET.Element, *args, **kwargs - ) -> Union[str, None]: + def _get_designation(individual_tag: ET.Element, *args, **kwargs) -> Union[str, None]: designation_tag_name = "DESIGNATION" designation_tag = individual_tag.find(designation_tag_name) if isinstance(designation_tag, ET.Element): - designations = [ - value_tag.text - for value_tag in individual_tag.find(designation_tag_name) - ] + designations = [value_tag.text for value_tag in individual_tag.find(designation_tag_name)] return " ".join(designations) return "" @@ -110,14 +103,10 @@ def _get_date_of_births( elif isinstance(note_tag, ET.Element) and note_tag.text: value = note_tag.text try: - parsed_date = dateutil.parser.parse( - value, default=default_datetime - ) + parsed_date = dateutil.parser.parse(value, default=default_datetime) dates_of_birth.add( SanctionListIndividualDateOfBirth( - individual=self._get_individual_from_db_or_file( - individual - ), + individual=self._get_individual_from_db_or_file(individual), date=parsed_date.date(), ) ) @@ -151,17 +140,10 @@ def _get_alias_names( for tag in alias_names_tags: quality_tag = tag.find("QUALITY") alias_name_tag = tag.find("ALIAS_NAME") - is_valid_quality_tag = ( - isinstance(quality_tag, ET.Element) and quality_tag.text - ) - is_valid_name_tag = ( - isinstance(alias_name_tag, ET.Element) and alias_name_tag.text - ) + is_valid_quality_tag = isinstance(quality_tag, ET.Element) and quality_tag.text + is_valid_name_tag = isinstance(alias_name_tag, ET.Element) and alias_name_tag.text if is_valid_quality_tag and is_valid_name_tag: - if ( - quality_tag.text.lower() in ("good", "a.k.a") - and alias_name_tag.text - ): + if quality_tag.text.lower() in ("good", "a.k.a") and alias_name_tag.text: aliases.add( SanctionListIndividualAliasName( individual=self._get_individual_from_db_or_file(individual), @@ -172,9 +154,7 @@ def _get_alias_names( return aliases @staticmethod - def _get_country_field( - individual_tag: ET.Element, path: str, *args, **kwargs - ) -> Union[str, None, set]: + def _get_country_field(individual_tag: ET.Element, path: str, *args, **kwargs) -> Union[str, None, set]: tags = individual_tag.findall(path) countries = set() @@ -270,9 +250,7 @@ def _get_documents( "note", document_tag.find("NOTE"), ) - if isinstance(document_number_tag, ET.Element) and isinstance( - type_of_document_tag, ET.Element - ): + if isinstance(document_number_tag, ET.Element) and isinstance(type_of_document_tag, ET.Element): document = SanctionListIndividualDocument( individual=self._get_individual_from_db_or_file(individual), type_of_document=type_of_document_tag.text, @@ -301,14 +279,9 @@ def _get_individual_data(self, individual_tag: ET.Element) -> dict: value = path_or_func(individual_tag, individual) else: raw_value = self._get_text_from_path(individual_tag, path_or_func) - value = self._cast_field_value_to_correct_type( - SanctionListIndividual, field_name, raw_value - ) + value = self._cast_field_value_to_correct_type(SanctionListIndividual, field_name, raw_value) - if ( - hasattr(individual, field_name) - and field_name not in individual_data_dict.keys() - ): + if hasattr(individual, field_name) and field_name not in individual_data_dict.keys(): setattr(individual, field_name, value) elif field_name in individual_data_dict.keys(): individual_data_dict[field_name] = value @@ -330,20 +303,14 @@ def _get_individual_fields(self) -> list[str]: # "country_of_birth", } all_fields = SanctionListIndividual._meta.get_fields(include_parents=False) - return [ - field.name - for field in all_fields - if field.name not in excluded_fields and field.concrete is True - ] + return [field.name for field in all_fields if field.name not in excluded_fields and field.concrete is True] @staticmethod def _get_individual_from_db_or_file( individual: SanctionListIndividual, ) -> SanctionListIndividual: try: - return SanctionListIndividual.all_objects.get( - reference_number=individual.reference_number - ) + return SanctionListIndividual.all_objects.get(reference_number=individual.reference_number) except ObjectDoesNotExist: return individual @@ -351,63 +318,45 @@ def _get_individual_from_db_or_file( def _get_all_individuals_from_db(self) -> QuerySet: return SanctionListIndividual.all_objects.defer("documents") - def _get_existing_individuals( - self, individuals_reference_numbers: set[str] - ) -> QuerySet: - return self._get_all_individuals_from_db.filter( - reference_number__in=individuals_reference_numbers - ) + def _get_existing_individuals(self, individuals_reference_numbers: set[str]) -> QuerySet: + return self._get_all_individuals_from_db.filter(reference_number__in=individuals_reference_numbers) def _get_individuals_to_create( self, individuals_from_file: Iterable[SanctionListIndividual] ) -> set[SanctionListIndividual]: - individuals_reference_numbers = self._get_reference_numbers_list( - individuals_from_file - ) + individuals_reference_numbers = self._get_reference_numbers_list(individuals_from_file) return { individual for individual in individuals_from_file if individual.reference_number - not in self._get_existing_individuals( - individuals_reference_numbers - ).values_list("reference_number", flat=True) + not in self._get_existing_individuals(individuals_reference_numbers).values_list( + "reference_number", flat=True + ) } def _get_individuals_to_update( self, individuals_from_file: Iterable[SanctionListIndividual] ) -> set[SanctionListIndividual]: individuals_to_update = set() - individuals_reference_numbers = self._get_reference_numbers_list( - individuals_from_file - ) + individuals_reference_numbers = self._get_reference_numbers_list(individuals_from_file) for individual in individuals_from_file: - new_individual_data_dict = model_to_dict( - individual, fields=self._get_individual_fields - ) + new_individual_data_dict = model_to_dict(individual, fields=self._get_individual_fields) old_individual = ( self._get_existing_individuals(individuals_reference_numbers) .filter(reference_number=new_individual_data_dict["reference_number"]) .first() ) if old_individual: - old_individual_data_dict = model_to_dict( - old_individual, fields=self._get_individual_fields - ) + old_individual_data_dict = model_to_dict(old_individual, fields=self._get_individual_fields) if new_individual_data_dict != old_individual_data_dict: - obj = SanctionListIndividual.all_objects.get( - reference_number=individual.reference_number - ) + obj = SanctionListIndividual.all_objects.get(reference_number=individual.reference_number) individual.id = obj.id individuals_to_update.add(individual) return individuals_to_update - def _get_individuals_to_deactivate( - self, individuals_from_file: Iterable[SanctionListIndividual] - ) -> list[str]: - individuals_reference_numbers = self._get_reference_numbers_list( - individuals_from_file - ) + def _get_individuals_to_deactivate(self, individuals_from_file: Iterable[SanctionListIndividual]) -> list[str]: + individuals_reference_numbers = self._get_reference_numbers_list(individuals_from_file) ids = self._get_all_individuals_from_db.difference( self._get_existing_individuals(individuals_reference_numbers) ).values_list("id", flat=True) @@ -505,9 +454,7 @@ def execute(self): self._get_individual_fields, 1000, ) - individuals_ids_to_delete = self._get_individuals_to_deactivate( - individuals_from_file - ) + individuals_ids_to_delete = self._get_individuals_to_deactivate(individuals_from_file) SanctionListIndividual.objects.filter(id__in=individuals_ids_to_delete).delete() # SanctionListIndividualDocument @@ -525,9 +472,7 @@ def execute(self): note=single_doc.note, ) if created is True: - individuals_to_check_against_sanction_list.append( - doc_obj.individual - ) + individuals_to_check_against_sanction_list.append(doc_obj.individual) # SanctionListIndividualCountries SanctionListIndividualCountries.objects.all().delete() @@ -538,9 +483,7 @@ def execute(self): SanctionListIndividualNationalities.objects.all().delete() if nationalities_from_file: - SanctionListIndividualNationalities.objects.bulk_create( - nationalities_from_file - ) + SanctionListIndividualNationalities.objects.bulk_create(nationalities_from_file) # SanctionListIndividualAliasName SanctionListIndividualAliasName.objects.all().delete() @@ -550,22 +493,15 @@ def execute(self): # SanctionListIndividualDateOfBirth if dob_from_file: for single_dob in dob_from_file: - ( - dob_obj, - created, - ) = SanctionListIndividualDateOfBirth.objects.get_or_create( + (dob_obj, created,) = SanctionListIndividualDateOfBirth.objects.get_or_create( individual=single_dob.individual, date=single_dob.date, ) if created is True: - individuals_to_check_against_sanction_list.append( - dob_obj.individual - ) + individuals_to_check_against_sanction_list.append(dob_obj.individual) individuals_to_check_against_sanction_list.extend(individuals_to_create) individuals_to_check_against_sanction_list.extend(individuals_to_update) if individuals_to_check_against_sanction_list: - CheckAgainstSanctionListPreMergeTask.execute( - individuals_to_check_against_sanction_list - ) + CheckAgainstSanctionListPreMergeTask.execute(individuals_to_check_against_sanction_list) diff --git a/backend/hct_mis_api/apps/steficon/forms.py b/backend/hct_mis_api/apps/steficon/forms.py index daabd216af..9d2a0b0b8e 100644 --- a/backend/hct_mis_api/apps/steficon/forms.py +++ b/backend/hct_mis_api/apps/steficon/forms.py @@ -28,7 +28,6 @@ def format_code(code): return black.format_file_contents(code, fast=False, mode=mode) - except ImportError as ex: if config.USE_BLACK: logger.warning(f"Steficon is configured to use Black, but was unable to import it: {ex}") diff --git a/backend/hct_mis_api/apps/steficon/models.py b/backend/hct_mis_api/apps/steficon/models.py index bae9f0bfb3..939eb3ba85 100644 --- a/backend/hct_mis_api/apps/steficon/models.py +++ b/backend/hct_mis_api/apps/steficon/models.py @@ -40,9 +40,7 @@ class Rule(models.Model): description = models.TextField(blank=True, null=True) enabled = models.BooleanField(default=False) deprecated = models.BooleanField(default=False) - language = models.CharField( - max_length=10, default=LANGUAGES[0][0], choices=LANGUAGES - ) + language = models.CharField(max_length=10, default=LANGUAGES[0][0], choices=LANGUAGES) security = models.IntegerField( choices=( (SAFETY_NONE, "Low"), @@ -51,12 +49,8 @@ class Rule(models.Model): ), default=SAFETY_STANDARD, ) - created_by = models.ForeignKey( - settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT - ) - updated_by = models.ForeignKey( - settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT - ) + created_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT) + updated_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT) created_at = models.DateTimeField(auto_now_add=True, db_index=True) updated_at = models.DateTimeField(auto_now=True, db_index=True) @@ -100,9 +94,7 @@ def get_changes(self): diff = set(data1.items()).symmetric_difference(data2.items()) return data1, list(dict(diff).keys()) - def save( - self, force_insert=False, force_update=False, using=None, update_fields=None - ): + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if "individual_data_needed" not in self.flags: self.flags["individual_data_needed"] = False with atomic(): @@ -122,13 +114,9 @@ def commit(self, is_release=False, force=False): "affected_fields": changes, } if changes: - release = RuleCommit.objects.create( - rule=self, version=self.version, **values - ) + release = RuleCommit.objects.create(rule=self, version=self.version, **values) elif force: - release, __ = RuleCommit.objects.update_or_create( - rule=self, version=self.version, defaults=values - ) + release, __ = RuleCommit.objects.update_or_create(rule=self, version=self.version, defaults=values) if is_release: self.history.exclude(pk=release.pk).update(deprecated=True) return release @@ -196,19 +184,13 @@ class RuleCommit(models.Model): timestamp = models.DateTimeField(auto_now=True) version = models.IntegerField() - rule = models.ForeignKey( - Rule, null=True, related_name="history", on_delete=models.SET_NULL - ) - updated_by = models.ForeignKey( - settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT - ) + rule = models.ForeignKey(Rule, null=True, related_name="history", on_delete=models.SET_NULL) + updated_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, on_delete=models.PROTECT) definition = models.TextField(blank=True, default="result.value=0") is_release = models.BooleanField(default=False) enabled = models.BooleanField(default=False) deprecated = models.BooleanField(default=False) - language = models.CharField( - max_length=10, default=Rule.LANGUAGES[0][0], choices=Rule.LANGUAGES - ) + language = models.CharField(max_length=10, default=Rule.LANGUAGES[0][0], choices=Rule.LANGUAGES) affected_fields = ArrayField(models.CharField(max_length=100)) before = JSONField(help_text="The record before change", editable=False) diff --git a/backend/hct_mis_api/apps/targeting/models.py b/backend/hct_mis_api/apps/targeting/models.py index e5ac4873cd..f8be6ccee1 100644 --- a/backend/hct_mis_api/apps/targeting/models.py +++ b/backend/hct_mis_api/apps/targeting/models.py @@ -193,12 +193,8 @@ class TargetPopulation(SoftDeletableModel, TimeStampedUUIDModel, ConcurrencyMode null=True, blank=True, ) - business_area = models.ForeignKey( - "core.BusinessArea", null=True, on_delete=models.CASCADE - ) - status = models.CharField( - max_length=_MAX_LEN, choices=STATUS_CHOICES, default=STATUS_DRAFT, db_index=True - ) + business_area = models.ForeignKey("core.BusinessArea", null=True, on_delete=models.CASCADE) + status = models.CharField(max_length=_MAX_LEN, choices=STATUS_CHOICES, default=STATUS_DRAFT, db_index=True) households = models.ManyToManyField( "household.Household", related_name="target_populations", @@ -282,22 +278,16 @@ class TargetPopulation(SoftDeletableModel, TimeStampedUUIDModel, ConcurrencyMode @property def excluded_household_ids(self): - excluded_household_ids_array = map_unicef_ids_to_households_unicef_ids( - self.excluded_ids - ) + excluded_household_ids_array = map_unicef_ids_to_households_unicef_ids(self.excluded_ids) return excluded_household_ids_array @property def vulnerability_score_filtered_households(self): queryset = self.households if self.vulnerability_score_max is not None: - queryset = queryset.filter( - selections__vulnerability_score__lte=self.vulnerability_score_max - ) + queryset = queryset.filter(selections__vulnerability_score__lte=self.vulnerability_score_max) if self.vulnerability_score_min is not None: - queryset = queryset.filter( - selections__vulnerability_score__gte=self.vulnerability_score_min - ) + queryset = queryset.filter(selections__vulnerability_score__gte=self.vulnerability_score_min) queryset = queryset.filter(~Q(unicef_id__in=self.excluded_household_ids)) return queryset.distinct() @@ -306,9 +296,9 @@ def vulnerability_score_filtered_households(self): def candidate_list(self): if self.status != TargetPopulation.STATUS_DRAFT: return [] - return Household.objects.filter( - self.candidate_list_targeting_criteria.get_query() - ).filter(business_area=self.business_area) + return Household.objects.filter(self.candidate_list_targeting_criteria.get_query()).filter( + business_area=self.business_area + ) @property def final_list(self): @@ -325,26 +315,14 @@ def candidate_stats(self): if self.status == TargetPopulation.STATUS_DRAFT: households_ids = self.candidate_list.values_list("id") else: - households_ids = self.vulnerability_score_filtered_households.values_list( - "id" - ) + households_ids = self.vulnerability_score_filtered_households.values_list("id") delta18 = relativedelta(years=+18) date18ago = datetime.datetime.now() - delta18 - targeted_individuals = Individual.objects.filter( - household__id__in=households_ids - ).aggregate( - child_male=Count( - "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE) - ), - child_female=Count( - "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE) - ), - adult_male=Count( - "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE) - ), - adult_female=Count( - "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE) - ), + targeted_individuals = Individual.objects.filter(household__id__in=households_ids).aggregate( + child_male=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE)), + child_female=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE)), + adult_male=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE)), + adult_female=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE)), ) return { "child_male": targeted_individuals.get("child_male"), @@ -374,9 +352,7 @@ def final_stats(self): return None elif self.status == TargetPopulation.STATUS_LOCKED: households_ids = ( - self.vulnerability_score_filtered_households.filter( - self.final_list_targeting_criteria.get_query() - ) + self.vulnerability_score_filtered_households.filter(self.final_list_targeting_criteria.get_query()) .filter(business_area=self.business_area) .values_list("id") .distinct() @@ -386,21 +362,11 @@ def final_stats(self): delta18 = relativedelta(years=+18) date18ago = datetime.datetime.now() - delta18 - targeted_individuals = Individual.objects.filter( - household__id__in=households_ids - ).aggregate( - child_male=Count( - "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE) - ), - child_female=Count( - "id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE) - ), - adult_male=Count( - "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE) - ), - adult_female=Count( - "id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE) - ), + targeted_individuals = Individual.objects.filter(household__id__in=households_ids).aggregate( + child_male=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=MALE)), + child_female=Count("id", distinct=True, filter=Q(birth_date__gt=date18ago, sex=FEMALE)), + adult_male=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=MALE)), + adult_female=Count("id", distinct=True, filter=Q(birth_date__lte=date18ago, sex=FEMALE)), ) return { @@ -467,9 +433,7 @@ class HouseholdSelection(TimeStampedUUIDModel): on_delete=models.CASCADE, related_name="selections", ) - target_population = models.ForeignKey( - "TargetPopulation", on_delete=models.CASCADE, related_name="selections" - ) + target_population = models.ForeignKey("TargetPopulation", on_delete=models.CASCADE, related_name="selections") vulnerability_score = models.DecimalField( blank=True, null=True, @@ -570,16 +534,12 @@ def get_criteria_string(self): if isinstance(self.individuals_filters_blocks, list) else self.individuals_filters_blocks.all() ) - individuals_filters_blocks_strings = [ - x.get_criteria_string() for x in individuals_filters_blocks - ] + individuals_filters_blocks_strings = [x.get_criteria_string() for x in individuals_filters_blocks] all_strings = [] if len(filters_strings): all_strings.append(f"H({' AND '.join(filters_strings).strip()})") if len(individuals_filters_blocks_strings): - all_strings.append( - f"I({' AND '.join(individuals_filters_blocks_strings).strip()})" - ) + all_strings.append(f"I({' AND '.join(individuals_filters_blocks_strings).strip()})") return " AND ".join(all_strings).strip() def get_query(self): @@ -646,9 +606,7 @@ def get_query(self): if self.target_only_hoh: # only filtering against heads of household individuals_query &= Q(heading_household__isnull=False) - households_id = Individual.objects.filter(individuals_query).values_list( - "household_id", flat=True - ) + households_id = Individual.objects.filter(individuals_query).values_list("household_id", flat=True) return Q(id__in=households_id) @@ -751,25 +709,17 @@ def get_query_for_lookup( field_attr, ): select_many = get_attr_value("type", field_attr, None) == TYPE_SELECT_MANY - comparision_attribute = TargetingCriteriaRuleFilter.COMPARISION_ATTRIBUTES.get( - self.comparision_method - ) + comparision_attribute = TargetingCriteriaRuleFilter.COMPARISION_ATTRIBUTES.get(self.comparision_method) args_count = comparision_attribute.get("arguments") if self.arguments is None: - logger.error( - f"{self.field_name} {self.comparision_method} filter query expect {args_count} " - f"arguments" - ) + logger.error(f"{self.field_name} {self.comparision_method} filter query expect {args_count} " f"arguments") raise ValidationError( - f"{self.field_name} {self.comparision_method} filter query expect {args_count} " - f"arguments" + f"{self.field_name} {self.comparision_method} filter query expect {args_count} " f"arguments" ) args_input_count = len(self.arguments) if select_many: if args_input_count < 1: - logger.error( - f"{self.field_name} SELECT MULTIPLE CONTAINS filter query expect at least 1 argument" - ) + logger.error(f"{self.field_name} SELECT MULTIPLE CONTAINS filter query expect at least 1 argument") raise ValidationError( f"{self.field_name} SELECT MULTIPLE CONTAINS filter query expect at least 1 argument" ) @@ -800,13 +750,9 @@ def get_query_for_lookup( def get_query_for_core_field(self): core_fields = self.get_core_fields() - core_field_attrs = [ - attr for attr in core_fields if attr.get("name") == self.field_name - ] + core_field_attrs = [attr for attr in core_fields if attr.get("name") == self.field_name] if len(core_field_attrs) != 1: - logger.error( - f"There are no Core Field Attributes associated with this fieldName {self.field_name}" - ) + logger.error(f"There are no Core Field Attributes associated with this fieldName {self.field_name}") raise ValidationError( f"There are no Core Field Attributes associated with this fieldName {self.field_name}" ) @@ -830,15 +776,11 @@ def get_query_for_core_field(self): def get_query_for_flex_field(self): flex_field_attr = FlexibleAttribute.objects.get(name=self.field_name) if not flex_field_attr: - logger.error( - f"There are no Flex Field Attributes associated with this fieldName {self.field_name}" - ) + logger.error(f"There are no Flex Field Attributes associated with this fieldName {self.field_name}") raise ValidationError( f"There are no Flex Field Attributes associated with this fieldName {self.field_name}" ) - lookup_prefix = self.get_lookup_prefix( - _INDIVIDUAL if flex_field_attr.associated_with == 1 else _HOUSEHOLD - ) + lookup_prefix = self.get_lookup_prefix(_INDIVIDUAL if flex_field_attr.associated_with == 1 else _HOUSEHOLD) lookup = f"{lookup_prefix}flex_fields__{flex_field_attr.name}" return self.get_query_for_lookup(lookup, flex_field_attr) @@ -882,9 +824,7 @@ def get_core_fields(self): ) -class TargetingIndividualBlockRuleFilter( - TimeStampedUUIDModel, TargetingCriteriaFilterMixin -): +class TargetingIndividualBlockRuleFilter(TimeStampedUUIDModel, TargetingCriteriaFilterMixin): """ This is one explicit filter like: :Age <> 10-20 diff --git a/backend/hct_mis_api/apps/targeting/steficon.py b/backend/hct_mis_api/apps/targeting/steficon.py index 6cfb42ccd9..b1bfa4c42a 100644 --- a/backend/hct_mis_api/apps/targeting/steficon.py +++ b/backend/hct_mis_api/apps/targeting/steficon.py @@ -90,7 +90,6 @@ def test_steficon(self, request, pk): context["form"] = form return TemplateResponse(request, "admin/targeting/targetpopulation/steficon_test.html", context) - except ImportError: class SteficonExecutorMixin: diff --git a/backend/hct_mis_api/apps/utils/admin.py b/backend/hct_mis_api/apps/utils/admin.py index 2e57172be7..4e721411a2 100644 --- a/backend/hct_mis_api/apps/utils/admin.py +++ b/backend/hct_mis_api/apps/utils/admin.py @@ -65,9 +65,7 @@ def reset_sync_date_single(self, request, pk): ) -class HOPEModelAdminBase( - SmartDisplayAllMixin, AdminActionPermMixin, JSONWidgetMixin, admin.ModelAdmin -): +class HOPEModelAdminBase(SmartDisplayAllMixin, AdminActionPermMixin, JSONWidgetMixin, admin.ModelAdmin): list_per_page = 50 def get_fields(self, request, obj=None): From b40ccc5d454ddcdf9589302b769fd53b06f29fc2 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Wed, 16 Feb 2022 22:09:11 +0000 Subject: [PATCH 18/24] Removed tox --- .../apps/reporting/generate_report_service.py | 2 +- backend/poetry.lock | 21 +++++++-------- backend/pyproject.toml | 4 +-- backend/tox.ini | 27 ------------------- 4 files changed, 13 insertions(+), 41 deletions(-) delete mode 100644 backend/tox.ini diff --git a/backend/hct_mis_api/apps/reporting/generate_report_service.py b/backend/hct_mis_api/apps/reporting/generate_report_service.py index 1c11138395..d2bb1afc46 100644 --- a/backend/hct_mis_api/apps/reporting/generate_report_service.py +++ b/backend/hct_mis_api/apps/reporting/generate_report_service.py @@ -7,7 +7,7 @@ from django.contrib.postgres.aggregates.general import ArrayAgg from django.core.files import File from django.core.mail import EmailMultiAlternatives -from django.db.models import Count, Max, Min, Q, Sum +from django.db.models import Count, DecimalField, Max, Min, Q, Sum from django.template.loader import render_to_string import openpyxl diff --git a/backend/poetry.lock b/backend/poetry.lock index 3cf4c57ec1..8e0657eeb2 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -896,7 +896,7 @@ wasmer = {version = ">=0.3.0", markers = "python_version >= \"3.5\" and platform [[package]] name = "filelock" -version = "3.5.0" +version = "3.5.1" description = "A platform independent file lock." category = "dev" optional = false @@ -960,7 +960,7 @@ test = ["pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "fastdiff (== [[package]] name = "graphene-django" -version = "2.15.0" +version = "2.14.0" description = "Graphene Django integration" category = "main" optional = false @@ -982,7 +982,7 @@ test = ["pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-d [[package]] name = "graphene-file-upload" -version = "1.3.0" +version = "1.2.2" description = "Lib for adding file upload functionality to GraphQL mutations in Graphene Django and Flask-Graphql" category = "main" optional = false @@ -995,7 +995,6 @@ six = ">=1.11.0" all = ["Flask (>=1.0.2)", "graphene (>=2.1.2)", "Flask-Graphql (>=2.0.0)", "graphene-django (>=2.0.0)"] django = ["graphene-django (>=2.0.0)"] flask = ["Flask (>=1.0.2)", "graphene (>=2.1.2)", "Flask-Graphql (>=2.0.0)"] -tests = ["coverage", "pytest", "pytest-cov", "pytest-django"] [[package]] name = "graphql-core" @@ -2062,7 +2061,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.1" -content-hash = "be635f8705002a41e4ea0a844a4bd29e1cac30bc5dd157fc1d265a2d9f86936c" +content-hash = "5a4378473fbad779a44ad17fe287af8fb542584c569ffeefd4894e2ce2366207" [metadata.files] amqp = [ @@ -2446,8 +2445,8 @@ fastdiff = [ {file = "fastdiff-0.2.0.tar.gz", hash = "sha256:623ad3d9055ab78e014d0d10767cb033d98d5d4f66052abf498350c8e42e29aa"}, ] filelock = [ - {file = "filelock-3.5.0-py3-none-any.whl", hash = "sha256:a7141afb4feca60925cfc090b411fb9faaf542d06d58ece4f93d940265e6b995"}, - {file = "filelock-3.5.0.tar.gz", hash = "sha256:137b661e657f7850eec9def2a001efadba3414be523b87cd3f9a037372d80a15"}, + {file = "filelock-3.5.1-py3-none-any.whl", hash = "sha256:7b23620a293cf3e19924e469cb96672dc72b36c26e8f80f85668310117fcbe4e"}, + {file = "filelock-3.5.1.tar.gz", hash = "sha256:d1eccb164ed020bc84edd9e45bf6cdb177f64749f6b8fe066648832d2e98726d"}, ] freezegun = [ {file = "freezegun-0.3.14-py2.py3-none-any.whl", hash = "sha256:10336fc80a235847c64033f9727f3847f37db4bd549be1d9f3b5ae0279256c69"}, @@ -2484,12 +2483,12 @@ graphene = [ {file = "graphene-2.1.9.tar.gz", hash = "sha256:b9f2850e064eebfee9a3ef4a1f8aa0742848d97652173ab44c82cc8a62b9ed93"}, ] graphene-django = [ - {file = "graphene-django-2.15.0.tar.gz", hash = "sha256:b78c9b05bc899016b9cc5bf13faa1f37fe1faa8c5407552c6ddd1a28f46fc31a"}, - {file = "graphene_django-2.15.0-py2.py3-none-any.whl", hash = "sha256:02671d195f0c09c8649acff2a8f4ad4f297d0f7d98ea6e6cdf034b81bab92880"}, + {file = "graphene-django-2.14.0.tar.gz", hash = "sha256:37b399a983e6e70d26696d0052b416a030d14f6d8a591d67711916e1fea95861"}, + {file = "graphene_django-2.14.0-py2.py3-none-any.whl", hash = "sha256:68d5c52f775dea6f5e0578b2c09c0a7cddeb48a957a0000b289d6e6145d6a86d"}, ] graphene-file-upload = [ - {file = "graphene_file_upload-1.3.0-py3-none-any.whl", hash = "sha256:5afe50f409f50e3d198fd92c883d98d868e6c6aaadf5df3a3f4d88ecad90ed97"}, - {file = "graphene_file_upload-1.3.0.tar.gz", hash = "sha256:6898480b0556826472c80971032917c01968ade5800d84054008fe598795b063"}, + {file = "graphene_file_upload-1.2.2-py3-none-any.whl", hash = "sha256:034ff72d2834b7aebd06fda412fa88a10aba34bac604317f7552457e47040654"}, + {file = "graphene_file_upload-1.2.2.tar.gz", hash = "sha256:b5f47a81f5614d1184ba9480a63835769b26303cb7759316122401784858e9c7"}, ] graphql-core = [ {file = "graphql-core-2.3.2.tar.gz", hash = "sha256:aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index b03112fa09..0c5e118804 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -91,8 +91,8 @@ django_compressor = ">=2.4" elasticsearch ="<7.14" elasticsearch-dsl ="^7.0.0" gevent = "20.9.0" -graphene-django = "^2.7.1" -graphene-file-upload = "^1.2.2" +graphene-django = "2.14.0" +graphene-file-upload = "1.2.2" greenlet = "0.4.17" gunicorn = "20.0.4" jedi = "0.17.2" diff --git a/backend/tox.ini b/backend/tox.ini deleted file mode 100644 index 1d175fac89..0000000000 --- a/backend/tox.ini +++ /dev/null @@ -1,27 +0,0 @@ -[tox] -envlist = py39-django{32,40} -isolated_build = true - -[testenv] -passenv = * -deps = - django32: Django==3.2 - django40: Django==4.0 - poetry -#skipsdist = True - -setenv = - DJANGO_SETTINGS_MODULE=hct_mis_api.settings.test - #PYTHONPATH={toxinidir}{:}{toxinidir}/.. - -commands = - poetry install - poetry run ./manage.py test --settings hct_mis_api.settings.test - - -[tox:.package] -whitelist_externals = poetry -# note tox will use the same python version as under what tox is installed to package -# so unless this is python 3 you can require a given python version for the packaging -# environment via the basepython key -basepython = python3.9 \ No newline at end of file From 978772a92332db1bbed8eefacf0cdb08810b3e48 Mon Sep 17 00:00:00 2001 From: Jan Romaniak <jan.romaniak@tivix.com> Date: Thu, 17 Feb 2022 11:10:59 +0100 Subject: [PATCH 19/24] fix some filters --- backend/hct_mis_api/apps/core/schema.py | 8 +++----- backend/hct_mis_api/apps/registration_data/schema.py | 6 +++++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 09bf6c9787..10bead2bcd 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -44,11 +44,9 @@ class AdminAreaFilter(FilterSet): class Meta: model = AdminArea - fields = [ - "title", - # "business_area": ["exact"], - ] - + fields = { + "title": ["exact", "istartswith"], + } class ChoiceObject(graphene.ObjectType): name = String() diff --git a/backend/hct_mis_api/apps/registration_data/schema.py b/backend/hct_mis_api/apps/registration_data/schema.py index a009e7b857..cfca7abd9a 100644 --- a/backend/hct_mis_api/apps/registration_data/schema.py +++ b/backend/hct_mis_api/apps/registration_data/schema.py @@ -29,7 +29,11 @@ class RegistrationDataImportFilter(FilterSet): class Meta: model = RegistrationDataImport - fields = ["imported_by__id", "import_date", "status", "name", "business_area"] + fields = { + "imported_by__id": ["exact"], + "status": ["exact"], + "name": ["exact", "startswith"], + } order_by = CustomOrderingFilter( fields=( From adcbb4a17abc594b20dba610c75972f01b68895e Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Thu, 17 Feb 2022 17:03:12 +0000 Subject: [PATCH 20/24] Downgraded django-filter and re-added extra lookups --- backend/hct_mis_api/apps/account/schema.py | 12 +- backend/hct_mis_api/apps/grievance/schema.py | 18 +- backend/hct_mis_api/apps/household/schema.py | 44 ++--- backend/hct_mis_api/apps/program/schema.py | 34 +--- .../apps/registration_data/schema.py | 2 + .../hct_mis_api/apps/sanction_list/schema.py | 7 +- backend/poetry.lock | 182 +----------------- backend/pyproject.toml | 4 +- 8 files changed, 57 insertions(+), 246 deletions(-) diff --git a/backend/hct_mis_api/apps/account/schema.py b/backend/hct_mis_api/apps/account/schema.py index a662e35340..06cd03c75d 100644 --- a/backend/hct_mis_api/apps/account/schema.py +++ b/backend/hct_mis_api/apps/account/schema.py @@ -54,12 +54,12 @@ class UsersFilter(FilterSet): class Meta: model = get_user_model() - fields = [ - "search", - "status", - "partner", - "roles", - ] + fields = { + "search": ["exact", "startswith"], + "status": ["exact"], + "partner": ["exact"], + "roles": ["exact"], + } order_by = CustomOrderingFilter( fields=( diff --git a/backend/hct_mis_api/apps/grievance/schema.py b/backend/hct_mis_api/apps/grievance/schema.py index bde6d80e4d..ef2f582851 100644 --- a/backend/hct_mis_api/apps/grievance/schema.py +++ b/backend/hct_mis_api/apps/grievance/schema.py @@ -136,8 +136,6 @@ class GrievanceTicketFilter(FilterSet): "payment_verifications__payment_record__service_provider", ), ) - id__startswith = CharFilter(field_name="id", lookup_expr="startswith") - area__startswith = CharFilter(field_name="area", lookup_expr="startswith") business_area = CharFilter(field_name="business_area__slug", required=True) search = CharFilter(method="search_filter") status = TypedMultipleChoiceFilter(field_name="status", choices=GrievanceTicket.STATUS_CHOICES, coerce=int) @@ -155,15 +153,13 @@ class GrievanceTicketFilter(FilterSet): permissions = MultipleChoiceFilter(choices=Permissions.choices(), method="permissions_filter") class Meta: - fields = [ - "id", - "id__startswith", - "category", - "area", - "area__startswith", - "assigned_to", - "registration_data_import", - ] + fields = { + "id": ["exact", "startswith"], + "category": ["exact"], + "area": ["exact", "startswith"], + "assigned_to": ["exact"], + "registration_data_import": ["exact"], + } model = GrievanceTicket order_by = OrderingFilter( diff --git a/backend/hct_mis_api/apps/household/schema.py b/backend/hct_mis_api/apps/household/schema.py index e32a92013c..e7fa6966af 100644 --- a/backend/hct_mis_api/apps/household/schema.py +++ b/backend/hct_mis_api/apps/household/schema.py @@ -115,18 +115,18 @@ class HouseholdFilter(FilterSet): class Meta: model = Household - fields = [ - "business_area", - "country_origin", - "address", - "head_of_household__full_name", - "size", - "admin_area", - "target_populations", - "programs", - "residence_status", - "withdrawn", - ] + fields = { + "business_area": ["exact"], + "country_origin": ["exact", "startswith"], + "address": ["exact", "startswith"], + "head_of_household__full_name": ["exact", "startswith"], + "size": ["range", "lte", "gte"], + "admin_area": ["exact"], + "target_populations": ["exact"], + "programs": ["exact"], + "residence_status": ["exact"], + "withdrawn": ["exact"], + } order_by = CustomOrderingFilter( fields=( @@ -182,16 +182,16 @@ class IndividualFilter(FilterSet): class Meta: model = Individual - fields = [ - "household__id", - "programs", - "business_area", - "full_name", - "age", - "sex", - "household__admin_area", - "withdrawn", - ] + fields = { + "household__id": ["exact"], + "programs": ["exact"], + "business_area": ["exact"], + "full_name": ["exact", "startswith", "endswith"], + "age": ["range", "lte", "gte"], + "sex": ["exact"], + "household__admin_area": ["exact"], + "withdrawn": ["exact"], + } order_by = CustomOrderingFilter( fields=( diff --git a/backend/hct_mis_api/apps/program/schema.py b/backend/hct_mis_api/apps/program/schema.py index 48203a0f8c..9601d6cf46 100644 --- a/backend/hct_mis_api/apps/program/schema.py +++ b/backend/hct_mis_api/apps/program/schema.py @@ -126,33 +126,19 @@ class CashPlanFilter(FilterSet): verification_status = MultipleChoiceFilter( field_name="verification_status", choices=CashPlanPaymentVerification.STATUS_CHOICES ) - assistance_through__startswith = CharFilter(field_name="assistance_through", lookup_expr="startswith") - service_provider__full_name__startswith = CharFilter( - field_name="service_provider__full_name", lookup_expr="startswith" + business_area = CharFilter( + field_name="business_area__slug", ) - end_date__lte = DateFilter(field_name="end_date", lookup_expr="lte") - end_date__gte = DateFilter(field_name="end_date", lookup_expr="gte") - start_date__lte = DateFilter(field_name="start_date", lookup_expr="lte") - start_date__gte = DateFilter(field_name="start_date", lookup_expr="gte") - business_area__slug__startswith = CharFilter(field_name="business_area__slug", lookup_expr="startswith") class Meta: - fields = ( - "program", - "assistance_through", - "assistance_through__startswith", - "service_provider__full_name", - "service_provider__full_name__startswith", - "start_date", - "start_date__lte", - "start_date__gte", - "end_date", - "end_date__lte", - "end_date__gte", - "business_area", - "business_area__slug", - "business_area__slug__startswith", - ) + fields = { + "program": ["exact"], + "assistance_through": ["exact", "startswith"], + "service_provider__full_name": ["exact", "startswith"], + "start_date": ["exact", "lte", "gte"], + "end_date": ["exact", "lte", "gte"], + "business_area": ["exact"], + } model = CashPlan order_by = OrderingFilter( diff --git a/backend/hct_mis_api/apps/registration_data/schema.py b/backend/hct_mis_api/apps/registration_data/schema.py index cfca7abd9a..1d4289d39f 100644 --- a/backend/hct_mis_api/apps/registration_data/schema.py +++ b/backend/hct_mis_api/apps/registration_data/schema.py @@ -31,8 +31,10 @@ class Meta: model = RegistrationDataImport fields = { "imported_by__id": ["exact"], + "import_date": ["exact"], "status": ["exact"], "name": ["exact", "startswith"], + "business_area": ["exact"], } order_by = CustomOrderingFilter( diff --git a/backend/hct_mis_api/apps/sanction_list/schema.py b/backend/hct_mis_api/apps/sanction_list/schema.py index 79b5d8863a..ef8649f374 100644 --- a/backend/hct_mis_api/apps/sanction_list/schema.py +++ b/backend/hct_mis_api/apps/sanction_list/schema.py @@ -19,10 +19,13 @@ class SanctionListIndividualFilter(FilterSet): - full_name__startswith = CharFilter(field_name="full_name", lookup_expr="startswith") class Meta: - fields = ("id", "full_name", "full_name__startswith", "reference_number") + fields = fields = { + "id": ["exact"], + "full_name": ["exact", "startswith"], + "reference_number": ["exact"], + } model = SanctionListIndividual order_by = CustomOrderingFilter( diff --git a/backend/poetry.lock b/backend/poetry.lock index 8e0657eeb2..31dab9ed22 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -303,17 +303,6 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "coverage" -version = "6.3.1" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -toml = ["tomli"] - [[package]] name = "cryptography" version = "36.0.1" @@ -357,14 +346,6 @@ category = "main" optional = false python-versions = ">=2.7" -[[package]] -name = "distlib" -version = "0.3.4" -description = "Distribution utilities" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "django" version = "3.2" @@ -587,7 +568,7 @@ six = ">=1.2" [[package]] name = "django-filter" -version = "2.4.0" +version = "2.3.0" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." category = "main" optional = false @@ -894,18 +875,6 @@ python-versions = "*" [package.dependencies] wasmer = {version = ">=0.3.0", markers = "python_version >= \"3.5\" and platform_machine == \"x86_64\" and sys_platform == \"darwin\" or python_version >= \"3.5\" and platform_machine == \"x86_64\" and sys_platform == \"linux\""} -[[package]] -name = "filelock" -version = "3.5.1" -description = "A platform independent file lock." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] - [[package]] name = "freezegun" version = "0.3.14" @@ -1325,30 +1294,6 @@ category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "platformdirs" -version = "2.5.0" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - [[package]] name = "promise" version = "2.3" @@ -1412,14 +1357,6 @@ python-versions = "*" [package.extras] tests = ["pytest"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "pycountry" version = "20.7.3" @@ -1845,28 +1782,6 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -[[package]] -name = "tox" -version = "3.24.5" -description = "tox is a generic virtualenv management and test command line tool" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} -filelock = ">=3.0.0" -packaging = ">=14" -pluggy = ">=0.12.0" -py = ">=1.4.17" -six = ">=1.14.0" -toml = ">=0.9.4" -virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" - -[package.extras] -docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] - [[package]] name = "traitlets" version = "5.1.1" @@ -1923,24 +1838,6 @@ category = "main" optional = false python-versions = ">=3.6" -[[package]] -name = "virtualenv" -version = "20.13.1" -description = "Virtual Python Environment builder" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" -platformdirs = ">=2,<3" -six = ">=1.9.0,<2" - -[package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] - [[package]] name = "waitress" version = "2.0.0" @@ -2061,7 +1958,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.1" -content-hash = "5a4378473fbad779a44ad17fe287af8fb542584c569ffeefd4894e2ce2366207" +content-hash = "40ff4fbf156dc9e8ac0a64bc9f4ef93680c2d8965a011e46539f40b9de5eca7c" [metadata.files] amqp = [ @@ -2207,49 +2104,6 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] -coverage = [ - {file = "coverage-6.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525"}, - {file = "coverage-6.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e"}, - {file = "coverage-6.3.1-cp310-cp310-win32.whl", hash = "sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217"}, - {file = "coverage-6.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb"}, - {file = "coverage-6.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8"}, - {file = "coverage-6.3.1-cp37-cp37m-win32.whl", hash = "sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0"}, - {file = "coverage-6.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687"}, - {file = "coverage-6.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320"}, - {file = "coverage-6.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a"}, - {file = "coverage-6.3.1-cp38-cp38-win32.whl", hash = "sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10"}, - {file = "coverage-6.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f"}, - {file = "coverage-6.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d"}, - {file = "coverage-6.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38"}, - {file = "coverage-6.3.1-cp39-cp39-win32.whl", hash = "sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2"}, - {file = "coverage-6.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa"}, - {file = "coverage-6.3.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2"}, - {file = "coverage-6.3.1.tar.gz", hash = "sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8"}, -] cryptography = [ {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, @@ -2284,10 +2138,6 @@ diff-match-patch = [ {file = "diff-match-patch-20200713.tar.gz", hash = "sha256:da6f5a01aa586df23dfc89f3827e1cafbb5420be9d87769eeb079ddfd9477a18"}, {file = "diff_match_patch-20200713-py3-none-any.whl", hash = "sha256:8bf9d9c4e059d917b5c6312bac0c137971a32815ddbda9c682b949f2986b4d34"}, ] -distlib = [ - {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, - {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, -] django = [ {file = "Django-3.2-py3-none-any.whl", hash = "sha256:0604e84c4fb698a5e53e5857b5aea945b2f19a18f25f10b8748dbdf935788927"}, {file = "Django-3.2.tar.gz", hash = "sha256:21f0f9643722675976004eb683c55d33c05486f94506672df3d6a141546f389d"}, @@ -2351,8 +2201,8 @@ django-extensions = [ {file = "django_extensions-2.2.6-py2.py3-none-any.whl", hash = "sha256:4524eca892d23fa6e93b0620901983b287ff5dc806f1b978d6a98541f06b9471"}, ] django-filter = [ - {file = "django-filter-2.4.0.tar.gz", hash = "sha256:84e9d5bb93f237e451db814ed422a3a625751cbc9968b484ecc74964a8696b06"}, - {file = "django_filter-2.4.0-py3-none-any.whl", hash = "sha256:e00d32cebdb3d54273c48f4f878f898dced8d5dfaad009438fe61ebdf535ace1"}, + {file = "django-filter-2.3.0.tar.gz", hash = "sha256:11e63dd759835d9ba7a763926ffb2662cf8a6dcb4c7971a95064de34dbc7e5af"}, + {file = "django_filter-2.3.0-py3-none-any.whl", hash = "sha256:616848eab6fc50193a1b3730140c49b60c57a3eda1f7fc57fa8505ac156c6c75"}, ] django-hijack = [ {file = "django-hijack-3.1.4.tar.gz", hash = "sha256:785940c2e693401d8302fff4ced2d8cf0beb69a88b7f944539b035ab11b1b6d3"}, @@ -2444,10 +2294,6 @@ faker = [ fastdiff = [ {file = "fastdiff-0.2.0.tar.gz", hash = "sha256:623ad3d9055ab78e014d0d10767cb033d98d5d4f66052abf498350c8e42e29aa"}, ] -filelock = [ - {file = "filelock-3.5.1-py3-none-any.whl", hash = "sha256:7b23620a293cf3e19924e469cb96672dc72b36c26e8f80f85668310117fcbe4e"}, - {file = "filelock-3.5.1.tar.gz", hash = "sha256:d1eccb164ed020bc84edd9e45bf6cdb177f64749f6b8fe066648832d2e98726d"}, -] freezegun = [ {file = "freezegun-0.3.14-py2.py3-none-any.whl", hash = "sha256:10336fc80a235847c64033f9727f3847f37db4bd549be1d9f3b5ae0279256c69"}, {file = "freezegun-0.3.14.tar.gz", hash = "sha256:6262de2f4bab671f7189bb8a0b9d8751da69a53f0b9813fb8f412681662d872a"}, @@ -2698,14 +2544,6 @@ pillow = [ {file = "Pillow-7.0.0-pp373-pypy36_pp73-win32.whl", hash = "sha256:8453f914f4e5a3d828281a6628cf517832abfa13ff50679a4848926dac7c0358"}, {file = "Pillow-7.0.0.tar.gz", hash = "sha256:4d9ed9a64095e031435af120d3c910148067087541131e82b3e8db302f4c8946"}, ] -platformdirs = [ - {file = "platformdirs-2.5.0-py3-none-any.whl", hash = "sha256:30671902352e97b1eafd74ade8e4a694782bd3471685e78c32d0fdfd3aa7e7bb"}, - {file = "platformdirs-2.5.0.tar.gz", hash = "sha256:8ec11dfba28ecc0715eb5fb0147a87b1bf325f349f3da9aab2cd6b50b96b692b"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] promise = [ {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, ] @@ -2770,10 +2608,6 @@ pure-eval = [ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, ] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] pycountry = [ {file = "pycountry-20.7.3.tar.gz", hash = "sha256:81084a53d3454344c0292deebc20fcd0a1488c136d4900312cbd465cf552cb42"}, ] @@ -3108,10 +2942,6 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tox = [ - {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"}, - {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"}, -] traitlets = [ {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, @@ -3158,10 +2988,6 @@ vine = [ {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] -virtualenv = [ - {file = "virtualenv-20.13.1-py2.py3-none-any.whl", hash = "sha256:45e1d053cad4cd453181ae877c4ffc053546ae99e7dd049b9ff1d9be7491abf7"}, - {file = "virtualenv-20.13.1.tar.gz", hash = "sha256:e0621bcbf4160e4e1030f05065c8834b4e93f4fcc223255db2a823440aca9c14"}, -] waitress = [ {file = "waitress-2.0.0-py3-none-any.whl", hash = "sha256:29af5a53e9fb4e158f525367678b50053808ca6c21ba585754c77d790008c746"}, {file = "waitress-2.0.0.tar.gz", hash = "sha256:69e1f242c7f80273490d3403c3976f3ac3b26e289856936d1f620ed48f321897"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 0c5e118804..eb4434e12f 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -75,7 +75,7 @@ django-countries = "^7.0" django-elasticsearch-dsl = "7.1.4" django-environ = "*" django-extensions = "2.2.6" -django-filter = "^2.2.0" +django-filter = "2.2.0" django-hijack = "*" django-jsoneditor = "*" django-model-utils = "4.0.0" @@ -131,8 +131,6 @@ watchdog = "^2.0.2" wasmer = "0.4.1" parso = "0.7.1" argh = "^0.26.2" -tox = "^3.24.5" -coverage = "^6.3.1" [build-system] requires = ["poetry-core>=1.0.0"] From 9f09dd52f60438bfb7e3bda0e2e8daae4ec2eb49 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Fri, 18 Feb 2022 10:52:27 +0000 Subject: [PATCH 21/24] Downgrade to forked django-filter --- backend/poetry.lock | 135 +++++++++++++++++------------------------ backend/pyproject.toml | 1 + 2 files changed, 56 insertions(+), 80 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 31dab9ed22..e177f93c80 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -568,14 +568,21 @@ six = ">=1.2" [[package]] name = "django-filter" -version = "2.3.0" -description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +version = "2.2.1" +description = "" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.4" +develop = false [package.dependencies] -Django = ">=2.2" +Django = ">=1.11" + +[package.source] +type = "git" +url = "https://github.com/srugano/django-filter.git" +reference = "d6d3641abd98caa04340c3c92d6bdd64cdbf52ed" +resolved_reference = "d6d3641abd98caa04340c3c92d6bdd64cdbf52ed" [[package]] name = "django-hijack" @@ -1146,11 +1153,11 @@ python-versions = "*" [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.0" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "matplotlib-inline" @@ -1958,7 +1965,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.1" -content-hash = "40ff4fbf156dc9e8ac0a64bc9f4ef93680c2d8965a011e46539f40b9de5eca7c" +content-hash = "88f35794defa38d8fb4a68e92d324f0ae9903d7af4ca718c61124335aeb64ecf" [metadata.files] amqp = [ @@ -2200,10 +2207,7 @@ django-extensions = [ {file = "django-extensions-2.2.6.tar.gz", hash = "sha256:936e8e3962024d3c75ea54f4e0248002404ca7ca7fb698430e60b06b5555b4e7"}, {file = "django_extensions-2.2.6-py2.py3-none-any.whl", hash = "sha256:4524eca892d23fa6e93b0620901983b287ff5dc806f1b978d6a98541f06b9471"}, ] -django-filter = [ - {file = "django-filter-2.3.0.tar.gz", hash = "sha256:11e63dd759835d9ba7a763926ffb2662cf8a6dcb4c7971a95064de34dbc7e5af"}, - {file = "django_filter-2.3.0-py3-none-any.whl", hash = "sha256:616848eab6fc50193a1b3730140c49b60c57a3eda1f7fc57fa8505ac156c6c75"}, -] +django-filter = [] django-hijack = [ {file = "django-hijack-3.1.4.tar.gz", hash = "sha256:785940c2e693401d8302fff4ced2d8cf0beb69a88b7f944539b035ab11b1b6d3"}, {file = "django_hijack-3.1.4-py3-none-any.whl", hash = "sha256:93944e6b940d66642fd154e77d960c0f1022eae3e6ca29f0ebce48790c3e62d0"}, @@ -2399,75 +2403,46 @@ markuppy = [ {file = "MarkupPy-1.14.tar.gz", hash = "sha256:1adee2c0a542af378fe84548ff6f6b0168f3cb7f426b46961038a2bcfaad0d5f"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, + {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, ] matplotlib-inline = [ {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index eb4434e12f..543dbad096 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -76,6 +76,7 @@ django-elasticsearch-dsl = "7.1.4" django-environ = "*" django-extensions = "2.2.6" django-filter = "2.2.0" +django-filter-cps = { git = "https://github.com/srugano/django-filter.git", rev="d6d3641abd98caa04340c3c92d6bdd64cdbf52ed" } django-hijack = "*" django-jsoneditor = "*" django-model-utils = "4.0.0" From c84dd08d3807d80b6daccddd5e2710147b7d9ca6 Mon Sep 17 00:00:00 2001 From: Allan Stockman Rugano <kuristockyrugano@gmail.com> Date: Fri, 18 Feb 2022 14:39:01 +0000 Subject: [PATCH 22/24] Updated version of filter --- backend/poetry.lock | 122 +++++++++++++++++++++++++---------------- backend/pyproject.toml | 5 +- 2 files changed, 79 insertions(+), 48 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index e177f93c80..00b1efe9be 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -444,6 +444,7 @@ python-dateutil = "2.6.0" [package.source] type = "url" url = "https://github.com/Tivix/django-auditlog/archive/refs/heads/master.zip" + [[package]] name = "django-celery-beat" version = "2.2.1" @@ -568,7 +569,7 @@ six = ">=1.2" [[package]] name = "django-filter" -version = "2.2.1" +version = "2.2.0" description = "" category = "main" optional = false @@ -581,8 +582,8 @@ Django = ">=1.11" [package.source] type = "git" url = "https://github.com/srugano/django-filter.git" -reference = "d6d3641abd98caa04340c3c92d6bdd64cdbf52ed" -resolved_reference = "d6d3641abd98caa04340c3c92d6bdd64cdbf52ed" +reference = "b01c91990550ccf543cf23110f6750016ca38ec6" +resolved_reference = "b01c91990550ccf543cf23110f6750016ca38ec6" [[package]] name = "django-hijack" @@ -1153,11 +1154,11 @@ python-versions = "*" [[package]] name = "markupsafe" -version = "2.1.0" +version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" [[package]] name = "matplotlib-inline" @@ -1965,7 +1966,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.1" -content-hash = "88f35794defa38d8fb4a68e92d324f0ae9903d7af4ca718c61124335aeb64ecf" +content-hash = "2a3cbb1061f37a1751e656ec65fd22a31d4586d15a5a73b8807738ebc3b29b33" [metadata.files] amqp = [ @@ -2403,46 +2404,75 @@ markuppy = [ {file = "MarkupPy-1.14.tar.gz", hash = "sha256:1adee2c0a542af378fe84548ff6f6b0168f3cb7f426b46961038a2bcfaad0d5f"}, ] markupsafe = [ - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, - {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] matplotlib-inline = [ {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 543dbad096..b71814d634 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -75,8 +75,8 @@ django-countries = "^7.0" django-elasticsearch-dsl = "7.1.4" django-environ = "*" django-extensions = "2.2.6" -django-filter = "2.2.0" -django-filter-cps = { git = "https://github.com/srugano/django-filter.git", rev="d6d3641abd98caa04340c3c92d6bdd64cdbf52ed" } +#django-filter = "2.2.0" +django-filter = { git = "https://github.com/srugano/django-filter.git", rev="b01c91990550ccf543cf23110f6750016ca38ec6" } django-hijack = "*" django-jsoneditor = "*" django-model-utils = "4.0.0" @@ -116,6 +116,7 @@ urllib3 = "1.26.2" xlrd = "1.2.0" argh = "^0.26.2" django-import-export = "^2.7.0" +MarkupSafe = "2.0.1" [tool.poetry.dev-dependencies] django-webtest = "^1.9.7" From 782a46944dee7416f799e739063c6dd043dbc5e7 Mon Sep 17 00:00:00 2001 From: Patryk Dabrowski <patryk.dabrowski@tivix.com> Date: Mon, 14 Mar 2022 17:40:49 +0100 Subject: [PATCH 23/24] Update Django, PyJWT, social-auth-app-django, social-auth-core --- backend/poetry.lock | 66 ++++++++++++++++++++---------------------- backend/pyproject.toml | 8 ++--- 2 files changed, 35 insertions(+), 39 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 00b1efe9be..8e29cd2545 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -348,7 +348,7 @@ python-versions = ">=2.7" [[package]] name = "django" -version = "3.2" +version = "3.2.12" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." category = "main" optional = false @@ -725,7 +725,7 @@ full = ["django-adminfilters (>=1.7.1)", "django-admin-extra-urls (>=3.5.1)", "d [[package]] name = "django-sql-explorer" -version = "2.4" +version = "2.4.1" description = "A pluggable app that allows users (admins) to execute SQL, view, and export the results." category = "main" optional = false @@ -1391,16 +1391,17 @@ python-versions = ">=3.5" [[package]] name = "pyjwt" -version = "1.7.1" +version = "2.3.0" description = "JSON Web Token implementation in Python" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.extras] -crypto = ["cryptography (>=1.4)"] -flake8 = ["flake8", "flake8-import-order", "pep8-naming"] -test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"] +crypto = ["cryptography (>=3.3.1)"] +dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pyparsing" @@ -1664,41 +1665,38 @@ test = ["pytest (>=4.6)", "pytest-cov", "nose", "django (>=1.10.6)"] [[package]] name = "social-auth-app-django" -version = "3.4.0" +version = "5.0.0" description = "Python Social Authentication, Django integration." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -six = "*" -social-auth-core = ">=3.3.0" +social-auth-core = ">=4.1.0" [[package]] name = "social-auth-core" -version = "3.4.0" +version = "4.2.0" description = "Python social authentication made simple." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] cryptography = ">=1.4" -defusedxml = {version = ">=0.5.0rc1", markers = "python_version >= \"3.0\""} +defusedxml = ">=0.5.0rc1" oauthlib = ">=1.0.3" -PyJWT = ">=1.4.0" -python3-openid = {version = ">=3.0.10", markers = "python_version >= \"3.0\""} +PyJWT = ">=2.0.0" +python3-openid = ">=3.0.10" requests = ">=2.9.1" requests-oauthlib = ">=0.6.1" -six = ">=1.10.0" [package.extras] -all = ["python-jose (>=3.0.0)", "pyjwt (>=1.7.1)", "python-saml (>=2.2.0)", "cryptography (>=2.1.1)"] -allpy2 = ["python-jose (>=3.0.0)", "pyjwt (>=1.7.1)", "python-saml (>=2.2.0)", "cryptography (>=2.1.1)", "python-openid (>=2.2.5)"] -allpy3 = ["python-jose (>=3.0.0)", "pyjwt (>=1.7.1)", "python-saml (>=2.2.0)", "cryptography (>=2.1.1)", "defusedxml (>=0.5.0rc1)", "python3-openid (>=3.0.10)"] +all = ["python-jose (>=3.0.0)", "python3-saml (>=1.2.1)", "lxml (<4.7)", "cryptography (>=2.1.1)"] +allpy3 = ["python-jose (>=3.0.0)", "python3-saml (>=1.2.1)", "lxml (<4.7)", "cryptography (>=2.1.1)"] azuread = ["cryptography (>=2.1.1)"] -openidconnect = ["python-jose (>=3.0.0)", "pyjwt (>=1.7.1)"] -saml = ["python-saml (>=2.2.0)"] +openidconnect = ["python-jose (>=3.0.0)"] +saml = ["python3-saml (>=1.2.1)", "lxml (<4.7)"] [[package]] name = "sorl-thumbnail" @@ -1966,7 +1964,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "1.1" python-versions = "3.9.1" -content-hash = "2a3cbb1061f37a1751e656ec65fd22a31d4586d15a5a73b8807738ebc3b29b33" +content-hash = "b85434f0dddb93129a28473c8e04cad5ec870a27b61511835fa60a2ac34f4d3a" [metadata.files] amqp = [ @@ -2147,8 +2145,8 @@ diff-match-patch = [ {file = "diff_match_patch-20200713-py3-none-any.whl", hash = "sha256:8bf9d9c4e059d917b5c6312bac0c137971a32815ddbda9c682b949f2986b4d34"}, ] django = [ - {file = "Django-3.2-py3-none-any.whl", hash = "sha256:0604e84c4fb698a5e53e5857b5aea945b2f19a18f25f10b8748dbdf935788927"}, - {file = "Django-3.2.tar.gz", hash = "sha256:21f0f9643722675976004eb683c55d33c05486f94506672df3d6a141546f389d"}, + {file = "Django-3.2.12-py3-none-any.whl", hash = "sha256:9b06c289f9ba3a8abea16c9c9505f25107809fb933676f6c891ded270039d965"}, + {file = "Django-3.2.12.tar.gz", hash = "sha256:9772e6935703e59e993960832d66a614cf0233a1c5123bc6224ecc6ad69e41e2"}, ] django-admin-extra-urls = [ {file = "django-admin-extra-urls-3.5.1.tar.gz", hash = "sha256:fc68efd40569f2301cb329a0c445dd3517b0aa9c03c1ab7d9fc8a006d22de9b8"}, @@ -2253,8 +2251,8 @@ django-smart-admin = [ {file = "django-smart-admin-1.6.0.tar.gz", hash = "sha256:cbd9b0543453cdfac9e3da8fa5cdd9d4ddf9ac4db966d814f9dac04bd4ce1847"}, ] django-sql-explorer = [ - {file = "django-sql-explorer-2.4.tar.gz", hash = "sha256:6f27c8842600f19704ac20bef52d837a25da11c090cd290ee9ff40f79e48da30"}, - {file = "django_sql_explorer-2.4-py3-none-any.whl", hash = "sha256:2b20499a21e0b1faced83eb63f9962dcaea644429092d645583e2f2ea3983cdb"}, + {file = "django-sql-explorer-2.4.1.tar.gz", hash = "sha256:fd3b92b5fdbcb4ecda27454eb70510a44f29176ca43c04dee704503e53b90cf3"}, + {file = "django_sql_explorer-2.4.1-py3-none-any.whl", hash = "sha256:d9a36461c70184fa1f54d038b20d6b08b40bb1709448a827123ecee5cc6dbbab"}, ] django-storages = [ {file = "django-storages-1.8.tar.gz", hash = "sha256:9322ab74ba6371e2e0fccc350c741686ade829e43085597b26b07ae8955a0a00"}, @@ -2625,8 +2623,8 @@ pygments = [ {file = "Pygments-2.7.3.tar.gz", hash = "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716"}, ] pyjwt = [ - {file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"}, - {file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"}, + {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, + {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, ] pyparsing = [ {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, @@ -2907,14 +2905,12 @@ snapshottest = [ {file = "snapshottest-1.0.0a0.tar.gz", hash = "sha256:dfcde50b4bc8a9336a40bfae1127e2f1310f6bbab4394ffcd0cc192f4c929ba0"}, ] social-auth-app-django = [ - {file = "social-auth-app-django-3.4.0.tar.gz", hash = "sha256:09575f5c7dd91465df3a898c58e7c4ae1e78f31edba36b8b7be47ab0aeef2789"}, - {file = "social_auth_app_django-3.4.0-py2-none-any.whl", hash = "sha256:02b561e175d4a93896e4436b591586b61e647bd8eeef14c99a26344eb3b48d0e"}, - {file = "social_auth_app_django-3.4.0-py3-none-any.whl", hash = "sha256:47d1720115a9eaad78a67e99987d556abaa01222b9c2b9538182bbdbb10304ba"}, + {file = "social-auth-app-django-5.0.0.tar.gz", hash = "sha256:b6e3132ce087cdd6e1707aeb1b588be41d318408fcf6395435da0bc6fe9a9795"}, + {file = "social_auth_app_django-5.0.0-py3-none-any.whl", hash = "sha256:52241a25445a010ab1c108bafff21fc5522d5c8cd0d48a92c39c7371824b065d"}, ] social-auth-core = [ - {file = "social-auth-core-3.4.0.tar.gz", hash = "sha256:aaec7f1e1a9bb61d0467d05c8cfe8dd55402f39229716b933e3dc29eb5f1e61a"}, - {file = "social_auth_core-3.4.0-py2-none-any.whl", hash = "sha256:a4b972b6250d7a32940aec2972e33ebc645de91b2153d18dcd3e38fb74271042"}, - {file = "social_auth_core-3.4.0-py3-none-any.whl", hash = "sha256:b3aa96be236e59842ae45a5a51fe75c97814087ab5ba3092e80b41cb3dcdd8af"}, + {file = "social-auth-core-4.2.0.tar.gz", hash = "sha256:af6fc7d9ee39f0aa697cd953121add638fc32a49816341a9838a67904c39e034"}, + {file = "social_auth_core-4.2.0-py3-none-any.whl", hash = "sha256:08fe6645b98950cff9d6970b6704345f821d78729df0164bca0002c624b001e1"}, ] sorl-thumbnail = [ {file = "sorl-thumbnail-12.5.0.tar.gz", hash = "sha256:8dfe5fda91a5047d1d35a0b9effe7b000764a01d648e15ca076f44e9c34b6dbd"}, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index b71814d634..565d9079da 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -52,11 +52,11 @@ authors = ["Tivix"] [tool.poetry.dependencies] python = "3.9.1" -Django = "3.2" +Django = "3.2.12" Jinja2 = "2.11.2" #name = "HCT MIS Backend" #version = "2022.1.1" -PyJWT = "1.7.1" +PyJWT = "^2.0.0" PyYAML = "^5.4.1" black = "^20.8b1" celery = {extras = ["redis"], version = "^5.0.5"} @@ -109,8 +109,8 @@ pygments = "2.7.3" pytz = "2020.4" sentry-sdk = "0.19.5" single-source = "^0.1.5" -social-auth-app-django = "^3.1.0" -social-auth-core = "^3.3.3" +social-auth-app-django = "^5.0.0" +social-auth-core = "^4.2.0" sorl-thumbnail = "12.5.0" urllib3 = "1.26.2" xlrd = "1.2.0" From 0c84ad167c7287c60d702d41c398e45f8a7059d2 Mon Sep 17 00:00:00 2001 From: Patryk Dabrowski <patryk.dabrowski@tivix.com> Date: Wed, 16 Mar 2022 10:00:01 +0100 Subject: [PATCH 24/24] Fix black format --- backend/hct_mis_api/apps/core/schema.py | 1 + backend/hct_mis_api/apps/payment/utils.py | 2 +- backend/hct_mis_api/apps/sanction_list/schema.py | 3 +-- backend/hct_mis_api/apps/steficon/forms.py | 1 + backend/hct_mis_api/apps/targeting/steficon.py | 1 + 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/backend/hct_mis_api/apps/core/schema.py b/backend/hct_mis_api/apps/core/schema.py index 10bead2bcd..396578cc4d 100644 --- a/backend/hct_mis_api/apps/core/schema.py +++ b/backend/hct_mis_api/apps/core/schema.py @@ -48,6 +48,7 @@ class Meta: "title": ["exact", "istartswith"], } + class ChoiceObject(graphene.ObjectType): name = String() value = String() diff --git a/backend/hct_mis_api/apps/payment/utils.py b/backend/hct_mis_api/apps/payment/utils.py index 9d16be41e4..bbf72d02ce 100644 --- a/backend/hct_mis_api/apps/payment/utils.py +++ b/backend/hct_mis_api/apps/payment/utils.py @@ -12,7 +12,7 @@ def get_number_of_samples(payment_records_sample_count, confidence_interval, mar variable = 0.5 z_score = NormalDist().inv_cdf(confidence_interval + (1 - confidence_interval) / 2) - theoretical_sample = (z_score**2) * variable * (1 - variable) / margin_of_error**2 + theoretical_sample = (z_score ** 2) * variable * (1 - variable) / margin_of_error ** 2 actual_sample = ceil( (payment_records_sample_count * theoretical_sample / (theoretical_sample + payment_records_sample_count)) * 1.5 ) diff --git a/backend/hct_mis_api/apps/sanction_list/schema.py b/backend/hct_mis_api/apps/sanction_list/schema.py index ef8649f374..4cf6b5a2ee 100644 --- a/backend/hct_mis_api/apps/sanction_list/schema.py +++ b/backend/hct_mis_api/apps/sanction_list/schema.py @@ -19,9 +19,8 @@ class SanctionListIndividualFilter(FilterSet): - class Meta: - fields = fields = { + fields = { "id": ["exact"], "full_name": ["exact", "startswith"], "reference_number": ["exact"], diff --git a/backend/hct_mis_api/apps/steficon/forms.py b/backend/hct_mis_api/apps/steficon/forms.py index 9d2a0b0b8e..daabd216af 100644 --- a/backend/hct_mis_api/apps/steficon/forms.py +++ b/backend/hct_mis_api/apps/steficon/forms.py @@ -28,6 +28,7 @@ def format_code(code): return black.format_file_contents(code, fast=False, mode=mode) + except ImportError as ex: if config.USE_BLACK: logger.warning(f"Steficon is configured to use Black, but was unable to import it: {ex}") diff --git a/backend/hct_mis_api/apps/targeting/steficon.py b/backend/hct_mis_api/apps/targeting/steficon.py index b1bfa4c42a..6cfb42ccd9 100644 --- a/backend/hct_mis_api/apps/targeting/steficon.py +++ b/backend/hct_mis_api/apps/targeting/steficon.py @@ -90,6 +90,7 @@ def test_steficon(self, request, pk): context["form"] = form return TemplateResponse(request, "admin/targeting/targetpopulation/steficon_test.html", context) + except ImportError: class SteficonExecutorMixin: